From 3d3fd80629c804b0593692318a94dc9f344936fd Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 26 Feb 2024 23:59:29 +0100 Subject: Add basic DAV server --- Cargo.lock | 14 ++++++++++- Cargo.toml | 29 +++++++++++++---------- src/config.rs | 7 ++++++ src/dav/mod.rs | 75 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ src/main.rs | 4 ++++ src/server.rs | 13 ++++++++++ 6 files changed, 129 insertions(+), 13 deletions(-) create mode 100644 src/dav/mod.rs diff --git a/Cargo.lock b/Cargo.lock index a50d101..c918f48 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -28,7 +28,7 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aerogramme" -version = "0.2.2" +version = "0.3.0" dependencies = [ "anyhow", "argon2", @@ -46,6 +46,8 @@ dependencies = [ "eml-codec", "futures", "hex", + "http-body-util", + "hyper 1.2.0", "hyper-rustls 0.26.0", "hyper-util", "im", @@ -58,6 +60,7 @@ dependencies = [ "log", "nix", "nom 7.1.3", + "quick-xml", "rand", "rmp-serde", "rpassword", @@ -2691,6 +2694,15 @@ dependencies = [ "prost", ] +[[package]] +name = "quick-xml" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +dependencies = [ + "memchr", +] + [[package]] name = "quote" version = "1.0.35" diff --git a/Cargo.toml b/Cargo.toml index 7d2e032..218a0ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aerogramme" -version = "0.2.2" +version = "0.3.0" authors = ["Alex Auvolat ", "Quentin Dufour "] edition = "2021" license = "EUPL-1.2" @@ -18,6 +18,7 @@ backtrace = "0.3" console-subscriber = "0.2" tracing-subscriber = "0.3" tracing = "0.1" +thiserror = "1.0.56" # language extensions lazy_static = "1.4" @@ -32,13 +33,27 @@ chrono = { version = "0.4", default-features = false, features = ["alloc"] } nix = { version = "0.27", features = ["signal"] } clap = { version = "3.1.18", features = ["derive", "env"] } -# serialization & parsing +# email protocols +eml-codec = "0.1.2" +smtp-message = { git = "http://github.com/Alexis211/kannader", branch = "feature/lmtp" } +smtp-server = { git = "http://github.com/Alexis211/kannader", branch = "feature/lmtp" } +imap-codec = { version = "2.0.0", features = ["bounded-static", "ext_condstore_qresync"] } +imap-flow = { git = "https://github.com/duesee/imap-flow.git", branch = "main" } + +# http & web +http-body-util = "0.1" +hyper = "1.2" +hyper-rustls = { version = "0.26", features = ["http2"] } +hyper-util = { version = "0.1", features = ["full"] } + +# serialization, compression & parsing serde = "1.0.137" rmp-serde = "0.15" toml = "0.5" base64 = "0.21" hex = "0.4" nom = "7.1" +quick-xml = "0.31" zstd = { version = "0.9", default-features = false } # cryptography & security @@ -48,8 +63,6 @@ rand = "0.8.5" rustls = "0.22" rustls-pemfile = "2.0" tokio-rustls = "0.25" -hyper-rustls = { version = "0.26", features = ["http2"] } -hyper-util = { version = "0.1", features = ["full"] } rpassword = "7.0" # login @@ -62,14 +75,6 @@ aws-sdk-s3 = "1" aws-smithy-runtime = "1" aws-smithy-runtime-api = "1" -# email protocols -eml-codec = "0.1.2" -smtp-message = { git = "http://github.com/Alexis211/kannader", branch = "feature/lmtp" } -smtp-server = { git = "http://github.com/Alexis211/kannader", branch = "feature/lmtp" } -imap-codec = { version = "2.0.0", features = ["bounded-static", "ext_condstore_qresync"] } -imap-flow = { git = "https://github.com/duesee/imap-flow.git", branch = "main" } -thiserror = "1.0.56" - [dev-dependencies] [patch.crates-io] diff --git a/src/config.rs b/src/config.rs index faaa1ba..7de2eac 100644 --- a/src/config.rs +++ b/src/config.rs @@ -10,6 +10,7 @@ use serde::{Deserialize, Serialize}; pub struct CompanionConfig { pub pid: Option, pub imap: ImapUnsecureConfig, + // @FIXME Add DAV #[serde(flatten)] pub users: LoginStaticConfig, @@ -22,6 +23,7 @@ pub struct ProviderConfig { pub imap_unsecure: Option, pub lmtp: Option, pub auth: Option, + pub dav_unsecure: Option, pub users: UserManagement, } @@ -51,6 +53,11 @@ pub struct ImapConfig { pub key: PathBuf, } +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct DavUnsecureConfig { + pub bind_addr: SocketAddr, +} + #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ImapUnsecureConfig { pub bind_addr: SocketAddr, diff --git a/src/dav/mod.rs b/src/dav/mod.rs new file mode 100644 index 0000000..36d154a --- /dev/null +++ b/src/dav/mod.rs @@ -0,0 +1,75 @@ +use std::net::SocketAddr; + +use anyhow::Result; +use hyper::service::service_fn; +use hyper::{Request, Response, body::Bytes}; +use hyper::server::conn::http1 as http; +use hyper_util::rt::TokioIo; +use http_body_util::Full; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::net::TcpListener; +use tokio::sync::watch; + +use crate::config::DavUnsecureConfig; +use crate::login::ArcLoginProvider; + +pub struct Server { + bind_addr: SocketAddr, + login_provider: ArcLoginProvider, +} + +pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { + Server { + bind_addr: config.bind_addr, + login_provider: login, + } +} + +impl Server { + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("DAV server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("DAV: accepted connection from {}", remote_addr); + let stream = TokioIo::new(socket); + let conn = tokio::spawn(async { + //@FIXME should create a generic "public web" server on which "routers" could be + //abitrarily bound + //@FIXME replace with a handler supporting http2 and TLS + match http::Builder::new().serve_connection(stream, service_fn(router)).await { + Err(e) => tracing::warn!(err=?e, "connection failed"), + Ok(()) => tracing::trace!("connection terminated with success"), + } + }); + connections.push(conn); + } + drop(tcp); + + tracing::info!("DAV server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +async fn router(req: Request) -> Result>> { + let url_exploded: Vec<_> = req.uri().path().split(",").collect(); + match url_exploded { + _ => unimplemented!(), + } + Ok(Response::new(Full::new(Bytes::from("Hello World!")))) +} diff --git a/src/main.rs b/src/main.rs index 12a5895..6e3057a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,6 +4,7 @@ mod auth; mod bayou; mod config; mod cryptoblob; +mod dav; mod imap; mod k2v_util; mod lmtp; @@ -187,6 +188,9 @@ async fn main() -> Result<()> { imap_unsecure: Some(ImapUnsecureConfig { bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1143), }), + dav_unsecure: Some(DavUnsecureConfig { + bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 8087), + }), lmtp: Some(LmtpConfig { bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1025), hostname: "example.tld".to_string(), diff --git a/src/server.rs b/src/server.rs index 9899981..09e91ad 100644 --- a/src/server.rs +++ b/src/server.rs @@ -9,6 +9,7 @@ use tokio::sync::watch; use crate::auth; use crate::config::*; +use crate::dav; use crate::imap; use crate::lmtp::*; use crate::login::ArcLoginProvider; @@ -19,6 +20,7 @@ pub struct Server { imap_unsecure_server: Option, imap_server: Option, auth_server: Option, + dav_unsecure_server: Option, pid_file: Option, } @@ -34,6 +36,7 @@ impl Server { imap_unsecure_server, imap_server: None, auth_server: None, + dav_unsecure_server: None, pid_file: config.pid, }) } @@ -57,11 +60,15 @@ impl Server { let auth_server = config .auth .map(|auth| auth::AuthServer::new(auth, login.clone())); + let dav_unsecure_server = config + .dav_unsecure + .map(|dav_config| dav::new_unsecure(dav_config, login.clone())); Ok(Self { lmtp_server, imap_unsecure_server, imap_server, + dav_unsecure_server, auth_server, pid_file: config.pid, }) @@ -112,6 +119,12 @@ impl Server { None => Ok(()), Some(a) => a.run(exit_signal.clone()).await, } + }, + async { + match self.dav_unsecure_server { + None => Ok(()), + Some(s) => s.run(exit_signal.clone()).await, + } } )?; -- cgit v1.2.3 From ea32a813a7847732e33f42c40a49c5136562b0d4 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 27 Feb 2024 00:12:01 +0100 Subject: basic router, define URI pattern --- src/dav/mod.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/dav/mod.rs b/src/dav/mod.rs index 36d154a..d3347f3 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -67,8 +67,13 @@ impl Server { } async fn router(req: Request) -> Result>> { - let url_exploded: Vec<_> = req.uri().path().split(",").collect(); - match url_exploded { + let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); + match path_segments.as_slice() { + [] => tracing::info!("root"), + [ user ] => tracing::info!(user=user, "user home"), + [ user, coltype ] => tracing::info!(user=user, cat=coltype, "user cat of coll"), + [ user, coltype, colname ] => tracing::info!(user=user, cat=coltype, name=colname, "user coll"), + [ user, coltype, colname, member ] => tracing::info!(user=user, cat=coltype, name=colname, obj=member, "accessing file"), _ => unimplemented!(), } Ok(Response::new(Full::new(Bytes::from("Hello World!")))) -- cgit v1.2.3 From 9a58a4e932156e0207380bb3b0a93a59864b0e1c Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 27 Feb 2024 01:05:51 +0100 Subject: WIP login --- src/dav/mod.rs | 52 +++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/src/dav/mod.rs b/src/dav/mod.rs index d3347f3..709abd5 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,6 +1,7 @@ use std::net::SocketAddr; -use anyhow::Result; +use anyhow::{anyhow, Result}; +use base64::Engine; use hyper::service::service_fn; use hyper::{Request, Response, body::Bytes}; use hyper::server::conn::http1 as http; @@ -46,11 +47,17 @@ impl Server { }; tracing::info!("DAV: accepted connection from {}", remote_addr); let stream = TokioIo::new(socket); - let conn = tokio::spawn(async { + let login = self.login_provider.clone(); + let conn = tokio::spawn(async move { //@FIXME should create a generic "public web" server on which "routers" could be //abitrarily bound //@FIXME replace with a handler supporting http2 and TLS - match http::Builder::new().serve_connection(stream, service_fn(router)).await { + match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { + let login = login.clone(); + async move { + auth(login, req).await + } + })).await { Err(e) => tracing::warn!(err=?e, "connection failed"), Ok(()) => tracing::trace!("connection terminated with success"), } @@ -66,6 +73,45 @@ impl Server { } } +async fn auth( + login: ArcLoginProvider, + req: Request, +) -> Result>> { + + let auth_val = match req.headers().get("Authorization") { + Some(hv) => hv.to_str()?, + None => return Ok(Response::builder() + .status(401) + .body(Full::new(Bytes::from("Missing Authorization field")))?), + }; + + let b64_creds_maybe_padded = match auth_val.split_once(" ") { + Some(("Basic", b64)) => b64, + _ => return Ok(Response::builder() + .status(400) + .body(Full::new(Bytes::from("Unsupported Authorization field")))?), + }; + + // base64urlencoded may have trailing equals, base64urlsafe has not + // theoretically authorization is padded but "be liberal in what you accept" + let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); + + // Decode base64 + let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; + let str_creds = std::str::from_utf8(&creds)?; + + // Split username and password + let (username, password) = str_creds + .split_once(':') + .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; + + // Call login provider + + // Call router with user + + unimplemented!(); +} + async fn router(req: Request) -> Result>> { let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); match path_segments.as_slice() { -- cgit v1.2.3 From 7f35e68bfe21c61f4da9e37f127dd7abb73291fa Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 27 Feb 2024 18:33:49 +0100 Subject: Refactor --- src/dav/mod.rs | 7 + src/imap/command/anonymous.rs | 2 +- src/imap/command/authenticated.rs | 3 +- src/imap/command/mod.rs | 2 +- src/imap/command/selected.rs | 2 +- src/imap/flow.rs | 2 +- src/mail/incoming.rs | 2 +- src/mail/mailbox.rs | 2 +- src/mail/mod.rs | 2 +- src/mail/namespace.rs | 209 ++++++++++++++++ src/mail/user.rs | 500 -------------------------------------- src/main.rs | 1 + src/user.rs | 313 ++++++++++++++++++++++++ 13 files changed, 539 insertions(+), 508 deletions(-) create mode 100644 src/mail/namespace.rs delete mode 100644 src/mail/user.rs create mode 100644 src/user.rs diff --git a/src/dav/mod.rs b/src/dav/mod.rs index 709abd5..ac25f2d 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -106,6 +106,13 @@ async fn auth( .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; // Call login provider + let creds = match login.login(username, password).await { + Ok(c) => c, + Err(e) => return Ok(Response::builder() + .status(401) + .body(Full::new(Bytes::from("Wrong credentials")))?), + }; + // Call router with user diff --git a/src/imap/command/anonymous.rs b/src/imap/command/anonymous.rs index 0582b06..811d1e4 100644 --- a/src/imap/command/anonymous.rs +++ b/src/imap/command/anonymous.rs @@ -9,7 +9,7 @@ use crate::imap::command::anystate; use crate::imap::flow; use crate::imap::response::Response; use crate::login::ArcLoginProvider; -use crate::mail::user::User; +use crate::user::User; //--- dispatching diff --git a/src/imap/command/authenticated.rs b/src/imap/command/authenticated.rs index eb8833d..3d332ec 100644 --- a/src/imap/command/authenticated.rs +++ b/src/imap/command/authenticated.rs @@ -22,8 +22,9 @@ use crate::imap::response::Response; use crate::imap::Body; use crate::mail::uidindex::*; -use crate::mail::user::{User, MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW}; +use crate::user::User; use crate::mail::IMF; +use crate::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; pub struct AuthenticatedContext<'a> { pub req: &'a Command<'static>, diff --git a/src/imap/command/mod.rs b/src/imap/command/mod.rs index 073040e..f201eb6 100644 --- a/src/imap/command/mod.rs +++ b/src/imap/command/mod.rs @@ -3,7 +3,7 @@ pub mod anystate; pub mod authenticated; pub mod selected; -use crate::mail::user::INBOX; +use crate::mail::namespace::INBOX; use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; /// Convert an IMAP mailbox name/identifier representation diff --git a/src/imap/command/selected.rs b/src/imap/command/selected.rs index d000905..eedfbd6 100644 --- a/src/imap/command/selected.rs +++ b/src/imap/command/selected.rs @@ -17,7 +17,7 @@ use crate::imap::command::{anystate, authenticated, MailboxName}; use crate::imap::flow; use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; use crate::imap::response::Response; -use crate::mail::user::User; +use crate::user::User; pub struct SelectedContext<'a> { pub req: &'a Command<'static>, diff --git a/src/imap/flow.rs b/src/imap/flow.rs index e372d69..86eb12e 100644 --- a/src/imap/flow.rs +++ b/src/imap/flow.rs @@ -6,7 +6,7 @@ use imap_codec::imap_types::core::Tag; use tokio::sync::Notify; use crate::imap::mailbox_view::MailboxView; -use crate::mail::user::User; +use crate::user::User; #[derive(Debug)] pub enum Error { diff --git a/src/mail/incoming.rs b/src/mail/incoming.rs index 781d8dc..e2ad97d 100644 --- a/src/mail/incoming.rs +++ b/src/mail/incoming.rs @@ -16,7 +16,7 @@ use crate::login::{Credentials, PublicCredentials}; use crate::mail::mailbox::Mailbox; use crate::mail::uidindex::ImapUidvalidity; use crate::mail::unique_ident::*; -use crate::mail::user::User; +use crate::user::User; use crate::mail::IMF; use crate::storage; use crate::timestamp::now_msec; diff --git a/src/mail/mailbox.rs b/src/mail/mailbox.rs index 9190883..d1a5473 100644 --- a/src/mail/mailbox.rs +++ b/src/mail/mailbox.rs @@ -17,7 +17,7 @@ pub struct Mailbox { } impl Mailbox { - pub(super) async fn open( + pub(crate) async fn open( creds: &Credentials, id: UniqueIdent, min_uidvalidity: ImapUidvalidity, diff --git a/src/mail/mod.rs b/src/mail/mod.rs index 37578b8..03e85cd 100644 --- a/src/mail/mod.rs +++ b/src/mail/mod.rs @@ -6,7 +6,7 @@ pub mod query; pub mod snapshot; pub mod uidindex; pub mod unique_ident; -pub mod user; +pub mod namespace; // Internet Message Format // aka RFC 822 - RFC 2822 - RFC 5322 diff --git a/src/mail/namespace.rs b/src/mail/namespace.rs new file mode 100644 index 0000000..5e67173 --- /dev/null +++ b/src/mail/namespace.rs @@ -0,0 +1,209 @@ +use std::collections::{BTreeMap, HashMap}; +use std::sync::{Arc, Weak}; + +use anyhow::{anyhow, bail, Result}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use tokio::sync::watch; + +use crate::cryptoblob::{open_deserialize, seal_serialize}; +use crate::login::Credentials; +use crate::mail::incoming::incoming_mail_watch_process; +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::{gen_ident, UniqueIdent}; +use crate::storage; +use crate::timestamp::now_msec; + +pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; + +/// INBOX is the only mailbox that must always exist. +/// It is created automatically when the account is created. +/// IMAP allows the user to rename INBOX to something else, +/// in this case all messages from INBOX are moved to a mailbox +/// with the new name and the INBOX mailbox still exists and is empty. +/// In our implementation, we indeed move the underlying mailbox +/// to the new name (i.e. the new name has the same id as the previous +/// INBOX), and we create a new empty mailbox for INBOX. +pub const INBOX: &str = "INBOX"; + +/// For convenience purpose, we also create some special mailbox +/// that are described in RFC6154 SPECIAL-USE +/// @FIXME maybe it should be a configuration parameter +/// @FIXME maybe we should have a per-mailbox flag mechanism, either an enum or a string, so we +/// track which mailbox is used for what. +/// @FIXME Junk could be useful but we don't have any antispam solution yet so... +/// @FIXME IMAP supports virtual mailbox. \All or \Flagged are intended to be virtual mailboxes. +/// \Trash might be one, or not one. I don't know what we should do there. +pub const DRAFTS: &str = "Drafts"; +pub const ARCHIVE: &str = "Archive"; +pub const SENT: &str = "Sent"; +pub const TRASH: &str = "Trash"; + +pub(crate) const MAILBOX_LIST_PK: &str = "mailboxes"; +pub(crate) const MAILBOX_LIST_SK: &str = "list"; + +// ---- User's mailbox list (serialized in K2V) ---- + +#[derive(Serialize, Deserialize)] +pub(crate) struct MailboxList(BTreeMap); + +#[derive(Serialize, Deserialize, Clone, Copy, Debug)] +pub(crate) struct MailboxListEntry { + id_lww: (u64, Option), + uidvalidity: ImapUidvalidity, +} + +impl MailboxListEntry { + fn merge(&mut self, other: &Self) { + // Simple CRDT merge rule + if other.id_lww.0 > self.id_lww.0 + || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) + { + self.id_lww = other.id_lww; + } + self.uidvalidity = std::cmp::max(self.uidvalidity, other.uidvalidity); + } +} + +impl MailboxList { + pub(crate) fn new() -> Self { + Self(BTreeMap::new()) + } + + pub(crate) fn merge(&mut self, list2: Self) { + for (k, v) in list2.0.into_iter() { + if let Some(e) = self.0.get_mut(&k) { + e.merge(&v); + } else { + self.0.insert(k, v); + } + } + } + + pub(crate) fn existing_mailbox_names(&self) -> Vec { + self.0 + .iter() + .filter(|(_, v)| v.id_lww.1.is_some()) + .map(|(k, _)| k.to_string()) + .collect() + } + + pub(crate) fn has_mailbox(&self, name: &str) -> bool { + matches!( + self.0.get(name), + Some(MailboxListEntry { + id_lww: (_, Some(_)), + .. + }) + ) + } + + pub(crate) fn get_mailbox(&self, name: &str) -> Option<(ImapUidvalidity, Option)> { + self.0.get(name).map( + |MailboxListEntry { + id_lww: (_, mailbox_id), + uidvalidity, + }| (*uidvalidity, *mailbox_id), + ) + } + + /// Ensures mailbox `name` maps to id `id`. + /// If it already mapped to that, returns None. + /// If a change had to be done, returns Some(new uidvalidity in mailbox). + pub(crate) fn set_mailbox(&mut self, name: &str, id: Option) -> Option { + let (ts, id, uidvalidity) = match self.0.get_mut(name) { + None => { + if id.is_none() { + return None; + } else { + (now_msec(), id, ImapUidvalidity::new(1).unwrap()) + } + } + Some(MailboxListEntry { + id_lww, + uidvalidity, + }) => { + if id_lww.1 == id { + return None; + } else { + ( + std::cmp::max(id_lww.0 + 1, now_msec()), + id, + ImapUidvalidity::new(uidvalidity.get() + 1).unwrap(), + ) + } + } + }; + + self.0.insert( + name.into(), + MailboxListEntry { + id_lww: (ts, id), + uidvalidity, + }, + ); + Some(uidvalidity) + } + + pub(crate) fn update_uidvalidity(&mut self, name: &str, new_uidvalidity: ImapUidvalidity) { + match self.0.get_mut(name) { + None => { + self.0.insert( + name.into(), + MailboxListEntry { + id_lww: (now_msec(), None), + uidvalidity: new_uidvalidity, + }, + ); + } + Some(MailboxListEntry { uidvalidity, .. }) => { + *uidvalidity = std::cmp::max(*uidvalidity, new_uidvalidity); + } + } + } + + pub(crate) fn create_mailbox(&mut self, name: &str) -> CreatedMailbox { + if let Some(MailboxListEntry { + id_lww: (_, Some(id)), + uidvalidity, + }) = self.0.get(name) + { + return CreatedMailbox::Existed(*id, *uidvalidity); + } + + let id = gen_ident(); + let uidvalidity = self.set_mailbox(name, Some(id)).unwrap(); + CreatedMailbox::Created(id, uidvalidity) + } + + pub(crate) fn rename_mailbox(&mut self, old_name: &str, new_name: &str) -> Result<()> { + if let Some((uidvalidity, Some(mbid))) = self.get_mailbox(old_name) { + if self.has_mailbox(new_name) { + bail!( + "Cannot rename {} into {}: {} already exists", + old_name, + new_name, + new_name + ); + } + + self.set_mailbox(old_name, None); + self.set_mailbox(new_name, Some(mbid)); + self.update_uidvalidity(new_name, uidvalidity); + Ok(()) + } else { + bail!( + "Cannot rename {} into {}: {} doesn't exist", + old_name, + new_name, + old_name + ); + } + } +} + +pub(crate) enum CreatedMailbox { + Created(UniqueIdent, ImapUidvalidity), + Existed(UniqueIdent, ImapUidvalidity), +} diff --git a/src/mail/user.rs b/src/mail/user.rs deleted file mode 100644 index ad05615..0000000 --- a/src/mail/user.rs +++ /dev/null @@ -1,500 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::sync::{Arc, Weak}; - -use anyhow::{anyhow, bail, Result}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use tokio::sync::watch; - -use crate::cryptoblob::{open_deserialize, seal_serialize}; -use crate::login::Credentials; -use crate::mail::incoming::incoming_mail_watch_process; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::{gen_ident, UniqueIdent}; -use crate::storage; -use crate::timestamp::now_msec; - -pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; - -/// INBOX is the only mailbox that must always exist. -/// It is created automatically when the account is created. -/// IMAP allows the user to rename INBOX to something else, -/// in this case all messages from INBOX are moved to a mailbox -/// with the new name and the INBOX mailbox still exists and is empty. -/// In our implementation, we indeed move the underlying mailbox -/// to the new name (i.e. the new name has the same id as the previous -/// INBOX), and we create a new empty mailbox for INBOX. -pub const INBOX: &str = "INBOX"; - -/// For convenience purpose, we also create some special mailbox -/// that are described in RFC6154 SPECIAL-USE -/// @FIXME maybe it should be a configuration parameter -/// @FIXME maybe we should have a per-mailbox flag mechanism, either an enum or a string, so we -/// track which mailbox is used for what. -/// @FIXME Junk could be useful but we don't have any antispam solution yet so... -/// @FIXME IMAP supports virtual mailbox. \All or \Flagged are intended to be virtual mailboxes. -/// \Trash might be one, or not one. I don't know what we should do there. -pub const DRAFTS: &str = "Drafts"; -pub const ARCHIVE: &str = "Archive"; -pub const SENT: &str = "Sent"; -pub const TRASH: &str = "Trash"; - -const MAILBOX_LIST_PK: &str = "mailboxes"; -const MAILBOX_LIST_SK: &str = "list"; - -pub struct User { - pub username: String, - pub creds: Credentials, - pub storage: storage::Store, - pub mailboxes: std::sync::Mutex>>, - - tx_inbox_id: watch::Sender>, -} - -impl User { - pub async fn new(username: String, creds: Credentials) -> Result> { - let cache_key = (username.clone(), creds.storage.unique()); - - { - let cache = USER_CACHE.lock().unwrap(); - if let Some(u) = cache.get(&cache_key).and_then(Weak::upgrade) { - return Ok(u); - } - } - - let user = Self::open(username, creds).await?; - - let mut cache = USER_CACHE.lock().unwrap(); - if let Some(concurrent_user) = cache.get(&cache_key).and_then(Weak::upgrade) { - drop(user); - Ok(concurrent_user) - } else { - cache.insert(cache_key, Arc::downgrade(&user)); - Ok(user) - } - } - - /// Lists user's available mailboxes - pub async fn list_mailboxes(&self) -> Result> { - let (list, _ct) = self.load_mailbox_list().await?; - Ok(list.existing_mailbox_names()) - } - - /// Opens an existing mailbox given its IMAP name. - pub async fn open_mailbox(&self, name: &str) -> Result>> { - let (mut list, ct) = self.load_mailbox_list().await?; - - //@FIXME it could be a trace or an opentelemtry trace thing. - // Be careful to not leak sensible data - /* - eprintln!("List of mailboxes:"); - for ent in list.0.iter() { - eprintln!(" - {:?}", ent); - } - */ - - if let Some((uidvalidity, Some(mbid))) = list.get_mailbox(name) { - let mb = self.open_mailbox_by_id(mbid, uidvalidity).await?; - let mb_uidvalidity = mb.current_uid_index().await.uidvalidity; - if mb_uidvalidity > uidvalidity { - list.update_uidvalidity(name, mb_uidvalidity); - self.save_mailbox_list(&list, ct).await?; - } - Ok(Some(mb)) - } else { - Ok(None) - } - } - - /// Check whether mailbox exists - pub async fn has_mailbox(&self, name: &str) -> Result { - let (list, _ct) = self.load_mailbox_list().await?; - Ok(list.has_mailbox(name)) - } - - /// Creates a new mailbox in the user's IMAP namespace. - pub async fn create_mailbox(&self, name: &str) -> Result<()> { - if name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", name); - } - - let (mut list, ct) = self.load_mailbox_list().await?; - match list.create_mailbox(name) { - CreatedMailbox::Created(_, _) => { - self.save_mailbox_list(&list, ct).await?; - Ok(()) - } - CreatedMailbox::Existed(_, _) => Err(anyhow!("Mailbox {} already exists", name)), - } - } - - /// Deletes a mailbox in the user's IMAP namespace. - pub async fn delete_mailbox(&self, name: &str) -> Result<()> { - if name == INBOX { - bail!("Cannot delete INBOX"); - } - - let (mut list, ct) = self.load_mailbox_list().await?; - if list.has_mailbox(name) { - //@TODO: actually delete mailbox contents - list.set_mailbox(name, None); - self.save_mailbox_list(&list, ct).await?; - Ok(()) - } else { - bail!("Mailbox {} does not exist", name); - } - } - - /// Renames a mailbox in the user's IMAP namespace. - pub async fn rename_mailbox(&self, old_name: &str, new_name: &str) -> Result<()> { - let (mut list, ct) = self.load_mailbox_list().await?; - - if old_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", old_name); - } - if new_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", new_name); - } - - if old_name == INBOX { - list.rename_mailbox(old_name, new_name)?; - if !self.ensure_inbox_exists(&mut list, &ct).await? { - self.save_mailbox_list(&list, ct).await?; - } - } else { - let names = list.existing_mailbox_names(); - - let old_name_w_delim = format!("{}{}", old_name, MAILBOX_HIERARCHY_DELIMITER); - let new_name_w_delim = format!("{}{}", new_name, MAILBOX_HIERARCHY_DELIMITER); - - if names - .iter() - .any(|x| x == new_name || x.starts_with(&new_name_w_delim)) - { - bail!("Mailbox {} already exists", new_name); - } - - for name in names.iter() { - if name == old_name { - list.rename_mailbox(name, new_name)?; - } else if let Some(tail) = name.strip_prefix(&old_name_w_delim) { - let nnew = format!("{}{}", new_name_w_delim, tail); - list.rename_mailbox(name, &nnew)?; - } - } - - self.save_mailbox_list(&list, ct).await?; - } - Ok(()) - } - - // ---- Internal user & mailbox management ---- - - async fn open(username: String, creds: Credentials) -> Result> { - let storage = creds.storage.build().await?; - - let (tx_inbox_id, rx_inbox_id) = watch::channel(None); - - let user = Arc::new(Self { - username, - creds: creds.clone(), - storage, - tx_inbox_id, - mailboxes: std::sync::Mutex::new(HashMap::new()), - }); - - // Ensure INBOX exists (done inside load_mailbox_list) - user.load_mailbox_list().await?; - - tokio::spawn(incoming_mail_watch_process( - Arc::downgrade(&user), - user.creds.clone(), - rx_inbox_id, - )); - - Ok(user) - } - - pub(super) async fn open_mailbox_by_id( - &self, - id: UniqueIdent, - min_uidvalidity: ImapUidvalidity, - ) -> Result> { - { - let cache = self.mailboxes.lock().unwrap(); - if let Some(mb) = cache.get(&id).and_then(Weak::upgrade) { - return Ok(mb); - } - } - - let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); - - let mut cache = self.mailboxes.lock().unwrap(); - if let Some(concurrent_mb) = cache.get(&id).and_then(Weak::upgrade) { - drop(mb); // we worked for nothing but at least we didn't starve someone else - Ok(concurrent_mb) - } else { - cache.insert(id, Arc::downgrade(&mb)); - Ok(mb) - } - } - - // ---- Mailbox list management ---- - - async fn load_mailbox_list(&self) -> Result<(MailboxList, Option)> { - let row_ref = storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK); - let (mut list, row) = match self - .storage - .row_fetch(&storage::Selector::Single(&row_ref)) - .await - { - Err(storage::StorageError::NotFound) => (MailboxList::new(), None), - Err(e) => return Err(e.into()), - Ok(rv) => { - let mut list = MailboxList::new(); - let (row_ref, row_vals) = match rv.into_iter().next() { - Some(row_val) => (row_val.row_ref, row_val.value), - None => (row_ref, vec![]), - }; - - for v in row_vals { - if let storage::Alternative::Value(vbytes) = v { - let list2 = - open_deserialize::(&vbytes, &self.creds.keys.master)?; - list.merge(list2); - } - } - (list, Some(row_ref)) - } - }; - - let is_default_mbx_missing = [DRAFTS, ARCHIVE, SENT, TRASH] - .iter() - .map(|mbx| list.create_mailbox(mbx)) - .fold(false, |acc, r| { - acc || matches!(r, CreatedMailbox::Created(..)) - }); - let is_inbox_missing = self.ensure_inbox_exists(&mut list, &row).await?; - if is_default_mbx_missing && !is_inbox_missing { - // It's the only case where we created some mailboxes and not saved them - // So we save them! - self.save_mailbox_list(&list, row.clone()).await?; - } - - Ok((list, row)) - } - - async fn ensure_inbox_exists( - &self, - list: &mut MailboxList, - ct: &Option, - ) -> Result { - // If INBOX doesn't exist, create a new mailbox with that name - // and save new mailbox list. - // Also, ensure that the mpsc::watch that keeps track of the - // inbox id is up-to-date. - let saved; - let (inbox_id, inbox_uidvalidity) = match list.create_mailbox(INBOX) { - CreatedMailbox::Created(i, v) => { - self.save_mailbox_list(list, ct.clone()).await?; - saved = true; - (i, v) - } - CreatedMailbox::Existed(i, v) => { - saved = false; - (i, v) - } - }; - let inbox_id = Some((inbox_id, inbox_uidvalidity)); - if *self.tx_inbox_id.borrow() != inbox_id { - self.tx_inbox_id.send(inbox_id).unwrap(); - } - - Ok(saved) - } - - async fn save_mailbox_list( - &self, - list: &MailboxList, - ct: Option, - ) -> Result<()> { - let list_blob = seal_serialize(list, &self.creds.keys.master)?; - let rref = ct.unwrap_or(storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK)); - let row_val = storage::RowVal::new(rref, list_blob); - self.storage.row_insert(vec![row_val]).await?; - Ok(()) - } -} - -// ---- User's mailbox list (serialized in K2V) ---- - -#[derive(Serialize, Deserialize)] -struct MailboxList(BTreeMap); - -#[derive(Serialize, Deserialize, Clone, Copy, Debug)] -struct MailboxListEntry { - id_lww: (u64, Option), - uidvalidity: ImapUidvalidity, -} - -impl MailboxListEntry { - fn merge(&mut self, other: &Self) { - // Simple CRDT merge rule - if other.id_lww.0 > self.id_lww.0 - || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) - { - self.id_lww = other.id_lww; - } - self.uidvalidity = std::cmp::max(self.uidvalidity, other.uidvalidity); - } -} - -impl MailboxList { - fn new() -> Self { - Self(BTreeMap::new()) - } - - fn merge(&mut self, list2: Self) { - for (k, v) in list2.0.into_iter() { - if let Some(e) = self.0.get_mut(&k) { - e.merge(&v); - } else { - self.0.insert(k, v); - } - } - } - - fn existing_mailbox_names(&self) -> Vec { - self.0 - .iter() - .filter(|(_, v)| v.id_lww.1.is_some()) - .map(|(k, _)| k.to_string()) - .collect() - } - - fn has_mailbox(&self, name: &str) -> bool { - matches!( - self.0.get(name), - Some(MailboxListEntry { - id_lww: (_, Some(_)), - .. - }) - ) - } - - fn get_mailbox(&self, name: &str) -> Option<(ImapUidvalidity, Option)> { - self.0.get(name).map( - |MailboxListEntry { - id_lww: (_, mailbox_id), - uidvalidity, - }| (*uidvalidity, *mailbox_id), - ) - } - - /// Ensures mailbox `name` maps to id `id`. - /// If it already mapped to that, returns None. - /// If a change had to be done, returns Some(new uidvalidity in mailbox). - fn set_mailbox(&mut self, name: &str, id: Option) -> Option { - let (ts, id, uidvalidity) = match self.0.get_mut(name) { - None => { - if id.is_none() { - return None; - } else { - (now_msec(), id, ImapUidvalidity::new(1).unwrap()) - } - } - Some(MailboxListEntry { - id_lww, - uidvalidity, - }) => { - if id_lww.1 == id { - return None; - } else { - ( - std::cmp::max(id_lww.0 + 1, now_msec()), - id, - ImapUidvalidity::new(uidvalidity.get() + 1).unwrap(), - ) - } - } - }; - - self.0.insert( - name.into(), - MailboxListEntry { - id_lww: (ts, id), - uidvalidity, - }, - ); - Some(uidvalidity) - } - - fn update_uidvalidity(&mut self, name: &str, new_uidvalidity: ImapUidvalidity) { - match self.0.get_mut(name) { - None => { - self.0.insert( - name.into(), - MailboxListEntry { - id_lww: (now_msec(), None), - uidvalidity: new_uidvalidity, - }, - ); - } - Some(MailboxListEntry { uidvalidity, .. }) => { - *uidvalidity = std::cmp::max(*uidvalidity, new_uidvalidity); - } - } - } - - fn create_mailbox(&mut self, name: &str) -> CreatedMailbox { - if let Some(MailboxListEntry { - id_lww: (_, Some(id)), - uidvalidity, - }) = self.0.get(name) - { - return CreatedMailbox::Existed(*id, *uidvalidity); - } - - let id = gen_ident(); - let uidvalidity = self.set_mailbox(name, Some(id)).unwrap(); - CreatedMailbox::Created(id, uidvalidity) - } - - fn rename_mailbox(&mut self, old_name: &str, new_name: &str) -> Result<()> { - if let Some((uidvalidity, Some(mbid))) = self.get_mailbox(old_name) { - if self.has_mailbox(new_name) { - bail!( - "Cannot rename {} into {}: {} already exists", - old_name, - new_name, - new_name - ); - } - - self.set_mailbox(old_name, None); - self.set_mailbox(new_name, Some(mbid)); - self.update_uidvalidity(new_name, uidvalidity); - Ok(()) - } else { - bail!( - "Cannot rename {} into {}: {} doesn't exist", - old_name, - new_name, - old_name - ); - } - } -} - -enum CreatedMailbox { - Created(UniqueIdent, ImapUidvalidity), - Existed(UniqueIdent, ImapUidvalidity), -} - -// ---- User cache ---- - -lazy_static! { - static ref USER_CACHE: std::sync::Mutex>> = - std::sync::Mutex::new(HashMap::new()); -} diff --git a/src/main.rs b/src/main.rs index 6e3057a..5f5089f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -13,6 +13,7 @@ mod mail; mod server; mod storage; mod timestamp; +mod user; use std::io::Read; use std::path::PathBuf; diff --git a/src/user.rs b/src/user.rs new file mode 100644 index 0000000..a38b9c1 --- /dev/null +++ b/src/user.rs @@ -0,0 +1,313 @@ +use std::collections::{BTreeMap, HashMap}; +use std::sync::{Arc, Weak}; + +use anyhow::{anyhow, bail, Result}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use tokio::sync::watch; + +use crate::cryptoblob::{open_deserialize, seal_serialize}; +use crate::login::Credentials; +use crate::mail::incoming::incoming_mail_watch_process; +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::{gen_ident, UniqueIdent}; +use crate::storage; +use crate::timestamp::now_msec; + +use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; + +//@FIXME User should be totally rewriten +//to extract the local mailbox list +//to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) + +pub struct User { + pub username: String, + pub creds: Credentials, + pub storage: storage::Store, + pub mailboxes: std::sync::Mutex>>, + + tx_inbox_id: watch::Sender>, +} + +impl User { + pub async fn new(username: String, creds: Credentials) -> Result> { + let cache_key = (username.clone(), creds.storage.unique()); + + { + let cache = USER_CACHE.lock().unwrap(); + if let Some(u) = cache.get(&cache_key).and_then(Weak::upgrade) { + return Ok(u); + } + } + + let user = Self::open(username, creds).await?; + + let mut cache = USER_CACHE.lock().unwrap(); + if let Some(concurrent_user) = cache.get(&cache_key).and_then(Weak::upgrade) { + drop(user); + Ok(concurrent_user) + } else { + cache.insert(cache_key, Arc::downgrade(&user)); + Ok(user) + } + } + + /// Lists user's available mailboxes + pub async fn list_mailboxes(&self) -> Result> { + let (list, _ct) = self.load_mailbox_list().await?; + Ok(list.existing_mailbox_names()) + } + + /// Opens an existing mailbox given its IMAP name. + pub async fn open_mailbox(&self, name: &str) -> Result>> { + let (mut list, ct) = self.load_mailbox_list().await?; + + //@FIXME it could be a trace or an opentelemtry trace thing. + // Be careful to not leak sensible data + /* + eprintln!("List of mailboxes:"); + for ent in list.0.iter() { + eprintln!(" - {:?}", ent); + } + */ + + if let Some((uidvalidity, Some(mbid))) = list.get_mailbox(name) { + let mb = self.open_mailbox_by_id(mbid, uidvalidity).await?; + let mb_uidvalidity = mb.current_uid_index().await.uidvalidity; + if mb_uidvalidity > uidvalidity { + list.update_uidvalidity(name, mb_uidvalidity); + self.save_mailbox_list(&list, ct).await?; + } + Ok(Some(mb)) + } else { + Ok(None) + } + } + + /// Check whether mailbox exists + pub async fn has_mailbox(&self, name: &str) -> Result { + let (list, _ct) = self.load_mailbox_list().await?; + Ok(list.has_mailbox(name)) + } + + /// Creates a new mailbox in the user's IMAP namespace. + pub async fn create_mailbox(&self, name: &str) -> Result<()> { + if name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", name); + } + + let (mut list, ct) = self.load_mailbox_list().await?; + match list.create_mailbox(name) { + CreatedMailbox::Created(_, _) => { + self.save_mailbox_list(&list, ct).await?; + Ok(()) + } + CreatedMailbox::Existed(_, _) => Err(anyhow!("Mailbox {} already exists", name)), + } + } + + /// Deletes a mailbox in the user's IMAP namespace. + pub async fn delete_mailbox(&self, name: &str) -> Result<()> { + if name == INBOX { + bail!("Cannot delete INBOX"); + } + + let (mut list, ct) = self.load_mailbox_list().await?; + if list.has_mailbox(name) { + //@TODO: actually delete mailbox contents + list.set_mailbox(name, None); + self.save_mailbox_list(&list, ct).await?; + Ok(()) + } else { + bail!("Mailbox {} does not exist", name); + } + } + + /// Renames a mailbox in the user's IMAP namespace. + pub async fn rename_mailbox(&self, old_name: &str, new_name: &str) -> Result<()> { + let (mut list, ct) = self.load_mailbox_list().await?; + + if old_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", old_name); + } + if new_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", new_name); + } + + if old_name == INBOX { + list.rename_mailbox(old_name, new_name)?; + if !self.ensure_inbox_exists(&mut list, &ct).await? { + self.save_mailbox_list(&list, ct).await?; + } + } else { + let names = list.existing_mailbox_names(); + + let old_name_w_delim = format!("{}{}", old_name, MAILBOX_HIERARCHY_DELIMITER); + let new_name_w_delim = format!("{}{}", new_name, MAILBOX_HIERARCHY_DELIMITER); + + if names + .iter() + .any(|x| x == new_name || x.starts_with(&new_name_w_delim)) + { + bail!("Mailbox {} already exists", new_name); + } + + for name in names.iter() { + if name == old_name { + list.rename_mailbox(name, new_name)?; + } else if let Some(tail) = name.strip_prefix(&old_name_w_delim) { + let nnew = format!("{}{}", new_name_w_delim, tail); + list.rename_mailbox(name, &nnew)?; + } + } + + self.save_mailbox_list(&list, ct).await?; + } + Ok(()) + } + + // ---- Internal user & mailbox management ---- + + async fn open(username: String, creds: Credentials) -> Result> { + let storage = creds.storage.build().await?; + + let (tx_inbox_id, rx_inbox_id) = watch::channel(None); + + let user = Arc::new(Self { + username, + creds: creds.clone(), + storage, + tx_inbox_id, + mailboxes: std::sync::Mutex::new(HashMap::new()), + }); + + // Ensure INBOX exists (done inside load_mailbox_list) + user.load_mailbox_list().await?; + + tokio::spawn(incoming_mail_watch_process( + Arc::downgrade(&user), + user.creds.clone(), + rx_inbox_id, + )); + + Ok(user) + } + + pub(super) async fn open_mailbox_by_id( + &self, + id: UniqueIdent, + min_uidvalidity: ImapUidvalidity, + ) -> Result> { + { + let cache = self.mailboxes.lock().unwrap(); + if let Some(mb) = cache.get(&id).and_then(Weak::upgrade) { + return Ok(mb); + } + } + + let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); + + let mut cache = self.mailboxes.lock().unwrap(); + if let Some(concurrent_mb) = cache.get(&id).and_then(Weak::upgrade) { + drop(mb); // we worked for nothing but at least we didn't starve someone else + Ok(concurrent_mb) + } else { + cache.insert(id, Arc::downgrade(&mb)); + Ok(mb) + } + } + + // ---- Mailbox list management ---- + + async fn load_mailbox_list(&self) -> Result<(MailboxList, Option)> { + let row_ref = storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK); + let (mut list, row) = match self + .storage + .row_fetch(&storage::Selector::Single(&row_ref)) + .await + { + Err(storage::StorageError::NotFound) => (MailboxList::new(), None), + Err(e) => return Err(e.into()), + Ok(rv) => { + let mut list = MailboxList::new(); + let (row_ref, row_vals) = match rv.into_iter().next() { + Some(row_val) => (row_val.row_ref, row_val.value), + None => (row_ref, vec![]), + }; + + for v in row_vals { + if let storage::Alternative::Value(vbytes) = v { + let list2 = + open_deserialize::(&vbytes, &self.creds.keys.master)?; + list.merge(list2); + } + } + (list, Some(row_ref)) + } + }; + + let is_default_mbx_missing = [DRAFTS, ARCHIVE, SENT, TRASH] + .iter() + .map(|mbx| list.create_mailbox(mbx)) + .fold(false, |acc, r| { + acc || matches!(r, CreatedMailbox::Created(..)) + }); + let is_inbox_missing = self.ensure_inbox_exists(&mut list, &row).await?; + if is_default_mbx_missing && !is_inbox_missing { + // It's the only case where we created some mailboxes and not saved them + // So we save them! + self.save_mailbox_list(&list, row.clone()).await?; + } + + Ok((list, row)) + } + + async fn ensure_inbox_exists( + &self, + list: &mut MailboxList, + ct: &Option, + ) -> Result { + // If INBOX doesn't exist, create a new mailbox with that name + // and save new mailbox list. + // Also, ensure that the mpsc::watch that keeps track of the + // inbox id is up-to-date. + let saved; + let (inbox_id, inbox_uidvalidity) = match list.create_mailbox(INBOX) { + CreatedMailbox::Created(i, v) => { + self.save_mailbox_list(list, ct.clone()).await?; + saved = true; + (i, v) + } + CreatedMailbox::Existed(i, v) => { + saved = false; + (i, v) + } + }; + let inbox_id = Some((inbox_id, inbox_uidvalidity)); + if *self.tx_inbox_id.borrow() != inbox_id { + self.tx_inbox_id.send(inbox_id).unwrap(); + } + + Ok(saved) + } + + async fn save_mailbox_list( + &self, + list: &MailboxList, + ct: Option, + ) -> Result<()> { + let list_blob = seal_serialize(list, &self.creds.keys.master)?; + let rref = ct.unwrap_or(storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK)); + let row_val = storage::RowVal::new(rref, list_blob); + self.storage.row_insert(vec![row_val]).await?; + Ok(()) + } +} + +// ---- User cache ---- + +lazy_static! { + static ref USER_CACHE: std::sync::Mutex>> = + std::sync::Mutex::new(HashMap::new()); +} -- cgit v1.2.3 From 239df7bd141fe08f06e48cbb14c5f8ddecf8df8f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 27 Feb 2024 19:30:51 +0100 Subject: Working on DAV router --- src/dav/mod.rs | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/src/dav/mod.rs b/src/dav/mod.rs index ac25f2d..0fa67a3 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -13,6 +13,7 @@ use tokio::sync::watch; use crate::config::DavUnsecureConfig; use crate::login::ArcLoginProvider; +use crate::user::User; pub struct Server { bind_addr: SocketAddr, @@ -45,7 +46,7 @@ impl Server { _ = wait_conn_finished => continue, _ = must_exit.changed() => continue, }; - tracing::info!("DAV: accepted connection from {}", remote_addr); + tracing::info!("Accepted connection from {}", remote_addr); let stream = TokioIo::new(socket); let login = self.login_provider.clone(); let conn = tokio::spawn(async move { @@ -66,13 +67,14 @@ impl Server { } drop(tcp); - tracing::info!("DAV server shutting down, draining remaining connections..."); + tracing::info!("Server shutting down, draining remaining connections..."); while connections.next().await.is_some() {} Ok(()) } } +//@FIXME We should not support only BasicAuth async fn auth( login: ArcLoginProvider, req: Request, @@ -113,21 +115,27 @@ async fn auth( .body(Full::new(Bytes::from("Wrong credentials")))?), }; - + // Build a user + let user = User::new(username.into(), creds).await?; + // Call router with user - - unimplemented!(); + router(user, req).await } -async fn router(req: Request) -> Result>> { +async fn router(user: std::sync::Arc, req: Request) -> Result>> { let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); match path_segments.as_slice() { [] => tracing::info!("root"), - [ user ] => tracing::info!(user=user, "user home"), - [ user, coltype ] => tracing::info!(user=user, cat=coltype, "user cat of coll"), - [ user, coltype, colname ] => tracing::info!(user=user, cat=coltype, name=colname, "user coll"), - [ user, coltype, colname, member ] => tracing::info!(user=user, cat=coltype, name=colname, obj=member, "accessing file"), - _ => unimplemented!(), + [ username, ..] if *username != user.username => return Ok(Response::builder() + .status(403) + .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), + [ _ ] => tracing::info!(user=username, "user home"), + [ _, "calendar" ] => tracing::info!(user=username, cat=coltype, "user cat of coll"), + [ _, "calendar", colname ] => tracing::info!(user=username, cat=coltype, name=colname, "user coll"), + [ _, "calendar", colname, member ] => tracing::info!(user=username, cat=coltype, name=colname, obj=member, "accessing file"), + _ => return Ok(Response::builder() + .status(404) + .body(Full::new(Bytes::from("Resource not found")))?), } Ok(Response::new(Full::new(Bytes::from("Hello World!")))) } -- cgit v1.2.3 From c10eb33585bbe92f6cfd0f111c989cc8fda4666c Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 28 Feb 2024 10:20:28 +0100 Subject: WIP DAV types & encoder --- src/dav/encoder.rs | 16 +++++ src/dav/mod.rs | 15 ++-- src/dav/types.rs | 197 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 224 insertions(+), 4 deletions(-) create mode 100644 src/dav/encoder.rs create mode 100644 src/dav/types.rs diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs new file mode 100644 index 0000000..9bc564a --- /dev/null +++ b/src/dav/encoder.rs @@ -0,0 +1,16 @@ + + +pub trait Encode { + fn write(&self, a: &mut u64) -> String; +} + + +#[cfg(test)] +mod tests { + // Note this useful idiom: importing names from outer (for mod tests) scope. + use super::*; + + #[test] + fn test_href() { + } +} diff --git a/src/dav/mod.rs b/src/dav/mod.rs index 0fa67a3..a542bbb 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,3 +1,6 @@ +mod types; +mod encoder; + use std::net::SocketAddr; use anyhow::{anyhow, Result}; @@ -129,13 +132,17 @@ async fn router(user: std::sync::Arc, req: Request [ username, ..] if *username != user.username => return Ok(Response::builder() .status(403) .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), - [ _ ] => tracing::info!(user=username, "user home"), - [ _, "calendar" ] => tracing::info!(user=username, cat=coltype, "user cat of coll"), - [ _, "calendar", colname ] => tracing::info!(user=username, cat=coltype, name=colname, "user coll"), - [ _, "calendar", colname, member ] => tracing::info!(user=username, cat=coltype, name=colname, obj=member, "accessing file"), + [ _ ] => tracing::info!("user home"), + [ _, "calendar" ] => tracing::info!("user calendars"), + [ _, "calendar", colname ] => tracing::info!(name=colname, "selected calendar"), + [ _, "calendar", colname, member ] => tracing::info!(name=colname, obj=member, "selected event"), _ => return Ok(Response::builder() .status(404) .body(Full::new(Bytes::from("Resource not found")))?), } Ok(Response::new(Full::new(Bytes::from("Hello World!")))) } + +async fn collections(user: std::sync::Arc, req: Request) -> Result>> { + unimplemented!(); +} diff --git a/src/dav/types.rs b/src/dav/types.rs new file mode 100644 index 0000000..b7f97f9 --- /dev/null +++ b/src/dav/types.rs @@ -0,0 +1,197 @@ +pub enum Error { + /// Name: lock-token-matches-request-uri + /// + /// Use with: 409 Conflict + /// + /// Purpose: (precondition) -- A request may include a Lock-Token header + /// to identify a lock for the UNLOCK method. However, if the + /// Request-URI does not fall within the scope of the lock identified + /// by the token, the server SHOULD use this error. The lock may have + /// a scope that does not include the Request-URI, or the lock could + /// have disappeared, or the token may be invalid. + LockTokenMatchesRequestUri, + + /// Name: lock-token-submitted (precondition) + /// + /// Use with: 423 Locked + /// + /// Purpose: The request could not succeed because a lock token should + /// have been submitted. This element, if present, MUST contain at + /// least one URL of a locked resource that prevented the request. In + /// cases of MOVE, COPY, and DELETE where collection locks are + /// involved, it can be difficult for the client to find out which + /// locked resource made the request fail -- but the server is only + /// responsible for returning one such locked resource. The server + /// MAY return every locked resource that prevented the request from + /// succeeding if it knows them all. + /// + /// + LockTokenSubmitted(Vec), + NoConflictingLock, + NoExternalEntities, + PreservedLiveProperties, + PropfindFiniteDepth, + Calendar(u64), +} + +/// 14.1. activelock XML Element +/// +/// Name: activelock +/// +/// Purpose: Describes a lock on a resource. +/// +pub struct ActiveLock { + lockscope: u64, + locktype: u64, + depth: Depth, + owner: Option, + timeout: Option, +} + +/// allprop XML Element +/// +/// Name: allprop +/// +/// Purpose: Specifies that all names and values of dead properties and +/// the live properties defined by this document existing on the +/// resource are to be returned. +/// +/// +pub struct AllProp{} + +/// collection XML Element +/// +/// Name: collection +/// +/// Purpose: Identifies the associated resource as a collection. The +/// DAV:resourcetype property of a collection resource MUST contain +/// this element. It is normally empty but extensions may add sub- +/// elements. +/// +/// +pub struct Collection{} + +/// depth XML Element +/// +/// Name: depth +/// +/// Purpose: Used for representing depth values in XML content (e.g., +/// in lock information). +/// +/// Value: "0" | "1" | "infinity" +/// +/// +pub enum Depth { + Zero, + One, + Infinity +} + +/// 14.6. exclusive XML Element +/// +/// Name: exclusive +/// +/// Purpose: Specifies an exclusive lock. +/// +/// +pub struct Exclusive {} + +pub struct Href(String); + +pub struct Status(String); + +pub struct ResponseDescription(String); + +pub struct Location(Href); + +/// 14.18. prop XML Element +/// +/// Name: prop +/// +/// Purpose: Contains properties related to a resource. +/// +/// Description: A generic container for properties defined on +/// resources. All elements inside a 'prop' XML element MUST define +/// properties related to the resource, although possible property +/// names are in no way limited to those property names defined in +/// this document or other standards. This element MUST NOT contain +/// text or mixed content. +/// +/// +pub struct Prop { + something: u64, +} + +/// propstat XML Element +/// +/// Name: propstat +/// +/// Purpose: Groups together a prop and status element that is +/// associated with a particular 'href' element. +/// +/// Description: The propstat XML element MUST contain one prop XML +/// element and one status XML element. The contents of the prop XML +/// element MUST only list the names of properties to which the result +/// in the status element applies. The optional precondition/ +/// postcondition element and 'responsedescription' text also apply to +/// the properties named in 'prop'. +/// +/// +pub struct PropStat { + prop: Prop, + status: Status, + error: Option, + responsedescription: Option, +} + +/// 14.24. response XML Element +/// +/// Name: response +/// +/// Purpose: Holds a single response describing the effect of a method +/// on resource and/or its properties. +/// +/// Description: The 'href' element contains an HTTP URL pointing to a +/// WebDAV resource when used in the 'response' container. A +/// particular 'href' value MUST NOT appear more than once as the +/// child of a 'response' XML element under a 'multistatus' XML +/// element. This requirement is necessary in order to keep +/// processing costs for a response to linear time. Essentially, this +/// prevents having to search in order to group together all the +/// responses by 'href'. There are, however, no requirements +/// regarding ordering based on 'href' values. The optional +/// precondition/postcondition element and 'responsedescription' text +/// can provide additional information about this resource relative to +/// the request or result. +/// +/// +pub struct Response { + href: Vec, + status: Status, + propstat: Vec, + error: Option, + responsedescription: Option, + location: Option, +} + +/// 14.16. multistatus XML Element +/// +/// Name: multistatus +/// +/// Purpose: Contains multiple response messages. +/// +/// Description: The 'responsedescription' element at the top level is +/// used to provide a general message describing the overarching +/// nature of the response. If this value is available, an +/// application may use it instead of presenting the individual +/// response descriptions contained within the responses. +/// +/// +pub struct Multistatus { + responses: Vec, + responsedescription: Option, +} + + -- cgit v1.2.3 From ffe4d071f6e7c63f585953741e0fa8cb4ad10488 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 28 Feb 2024 22:00:47 +0100 Subject: Dav XML types --- Cargo.lock | 2 + Cargo.toml | 3 +- src/dav/encoder.rs | 40 ++- src/dav/types.rs | 779 ++++++++++++++++++++++++++++++++++++++++++++++++----- 4 files changed, 759 insertions(+), 65 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c918f48..a4af312 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -46,6 +46,7 @@ dependencies = [ "eml-codec", "futures", "hex", + "http 1.0.0", "http-body-util", "hyper 1.2.0", "hyper-rustls 0.26.0", @@ -2701,6 +2702,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" dependencies = [ "memchr", + "tokio", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 218a0ed..e362c07 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,6 +41,7 @@ imap-codec = { version = "2.0.0", features = ["bounded-static", "ext_condstore_q imap-flow = { git = "https://github.com/duesee/imap-flow.git", branch = "main" } # http & web +http = "1.0" http-body-util = "0.1" hyper = "1.2" hyper-rustls = { version = "0.26", features = ["http2"] } @@ -53,7 +54,7 @@ toml = "0.5" base64 = "0.21" hex = "0.4" nom = "7.1" -quick-xml = "0.31" +quick-xml = { version = "0.31", features = ["async-tokio"] } zstd = { version = "0.9", default-features = false } # cryptography & security diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 9bc564a..28f807a 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -1,16 +1,48 @@ +use std::io::Cursor; +use anyhow::Result; +use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; +use quick_xml::writer::{ElementWriter, Writer}; +use quick_xml::name::PrefixDeclaration; +use tokio::io::AsyncWrite; +use super::types::*; + +//@FIXME a cleaner way to manager namespace would be great +//but at the same time, the quick-xml library is not cooperating. +//So instead of writing many cursed workarounds - I tried, I am just hardcoding the namespaces... pub trait Encode { - fn write(&self, a: &mut u64) -> String; + async fn write(&self, xml: &mut Writer) -> Result<()>; +} + +impl Encode for Href { + async fn write(&self, xml: &mut Writer) -> Result<()> { + xml.create_element("D:href") + .write_text_content_async(BytesText::new(&self.0)) + .await?; + + Ok(()) + } } #[cfg(test)] mod tests { - // Note this useful idiom: importing names from outer (for mod tests) scope. use super::*; + use tokio::io::AsyncWriteExt; + + /// To run only the unit tests and avoid the behavior ones: + /// cargo test --bin aerogramme + + #[tokio::test] + async fn test_href() { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + + Href("/SOGo/dav/so/".into()).write(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); - #[test] - fn test_href() { + assert_eq!(buffer.as_slice(), &b"/SOGo/dav/so/"[..]); } } diff --git a/src/dav/types.rs b/src/dav/types.rs index b7f97f9..1ff690f 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -1,38 +1,6 @@ -pub enum Error { - /// Name: lock-token-matches-request-uri - /// - /// Use with: 409 Conflict - /// - /// Purpose: (precondition) -- A request may include a Lock-Token header - /// to identify a lock for the UNLOCK method. However, if the - /// Request-URI does not fall within the scope of the lock identified - /// by the token, the server SHOULD use this error. The lock may have - /// a scope that does not include the Request-URI, or the lock could - /// have disappeared, or the token may be invalid. - LockTokenMatchesRequestUri, +#![allow(dead_code)] - /// Name: lock-token-submitted (precondition) - /// - /// Use with: 423 Locked - /// - /// Purpose: The request could not succeed because a lock token should - /// have been submitted. This element, if present, MUST contain at - /// least one URL of a locked resource that prevented the request. In - /// cases of MOVE, COPY, and DELETE where collection locks are - /// involved, it can be difficult for the client to find out which - /// locked resource made the request fail -- but the server is only - /// responsible for returning one such locked resource. The server - /// MAY return every locked resource that prevented the request from - /// succeeding if it knows them all. - /// - /// - LockTokenSubmitted(Vec), - NoConflictingLock, - NoExternalEntities, - PreservedLiveProperties, - PropfindFiniteDepth, - Calendar(u64), -} +use chrono::{DateTime,FixedOffset}; /// 14.1. activelock XML Element /// @@ -49,7 +17,7 @@ pub struct ActiveLock { timeout: Option, } -/// allprop XML Element +/// 14.2 allprop XML Element /// /// Name: allprop /// @@ -60,7 +28,7 @@ pub struct ActiveLock { /// pub struct AllProp{} -/// collection XML Element +/// 14.3 collection XML Element /// /// Name: collection /// @@ -72,7 +40,7 @@ pub struct AllProp{} /// pub struct Collection{} -/// depth XML Element +/// 14.4 depth XML Element /// /// Name: depth /// @@ -88,6 +56,109 @@ pub enum Depth { Infinity } +/// 14.5 error XML Element +/// +/// Name: error +/// +/// Purpose: Error responses, particularly 403 Forbidden and 409 +/// Conflict, sometimes need more information to indicate what went +/// wrong. In these cases, servers MAY return an XML response body +/// with a document element of 'error', containing child elements +/// identifying particular condition codes. +/// +/// Description: Contains at least one XML element, and MUST NOT +/// contain text or mixed content. Any element that is a child of the +/// 'error' element is considered to be a precondition or +/// postcondition code. Unrecognized elements MUST be ignored. +/// +/// +pub enum Error { + /// Name: lock-token-matches-request-uri + /// + /// Use with: 409 Conflict + /// + /// Purpose: (precondition) -- A request may include a Lock-Token header + /// to identify a lock for the UNLOCK method. However, if the + /// Request-URI does not fall within the scope of the lock identified + /// by the token, the server SHOULD use this error. The lock may have + /// a scope that does not include the Request-URI, or the lock could + /// have disappeared, or the token may be invalid. + LockTokenMatchesRequestUri, + + /// Name: lock-token-submitted (precondition) + /// + /// Use with: 423 Locked + /// + /// Purpose: The request could not succeed because a lock token should + /// have been submitted. This element, if present, MUST contain at + /// least one URL of a locked resource that prevented the request. In + /// cases of MOVE, COPY, and DELETE where collection locks are + /// involved, it can be difficult for the client to find out which + /// locked resource made the request fail -- but the server is only + /// responsible for returning one such locked resource. The server + /// MAY return every locked resource that prevented the request from + /// succeeding if it knows them all. + /// + /// + LockTokenSubmitted(Vec), + + /// Name: no-conflicting-lock (precondition) + /// + /// Use with: Typically 423 Locked + /// + /// Purpose: A LOCK request failed due the presence of an already + /// existing conflicting lock. Note that a lock can be in conflict + /// although the resource to which the request was directed is only + /// indirectly locked. In this case, the precondition code can be + /// used to inform the client about the resource that is the root of + /// the conflicting lock, avoiding a separate lookup of the + /// "lockdiscovery" property. + /// + /// + NoConflictingLock(Vec), + + /// Name: no-external-entities + /// + /// Use with: 403 Forbidden + /// + /// Purpose: (precondition) -- If the server rejects a client request + /// because the request body contains an external entity, the server + /// SHOULD use this error. + NoExternalEntities, + + /// Name: preserved-live-properties + /// + /// Use with: 409 Conflict + /// + /// Purpose: (postcondition) -- The server received an otherwise-valid + /// MOVE or COPY request, but cannot maintain the live properties with + /// the same behavior at the destination. It may be that the server + /// only supports some live properties in some parts of the + /// repository, or simply has an internal error. + PreservedLiveProperties, + + /// Name: propfind-finite-depth + /// + /// Use with: 403 Forbidden + /// + /// Purpose: (precondition) -- This server does not allow infinite-depth + /// PROPFIND requests on collections. + PropfindFiniteDepth, + + + /// Name: cannot-modify-protected-property + /// + /// Use with: 403 Forbidden + /// + /// Purpose: (precondition) -- The client attempted to set a protected + /// property in a PROPPATCH (such as DAV:getetag). See also + /// [RFC3253], Section 3.12. + CannotModifyProtectedProperty, + + /// Specific errors + Extensions(T), +} + /// 14.6. exclusive XML Element /// /// Name: exclusive @@ -97,14 +168,180 @@ pub enum Depth { /// pub struct Exclusive {} -pub struct Href(String); +/// 14.7. href XML Element +/// +/// Name: href +/// +/// Purpose: MUST contain a URI or a relative reference. +/// +/// Description: There may be limits on the value of 'href' depending +/// on the context of its use. Refer to the specification text where +/// 'href' is used to see what limitations apply in each case. +/// +/// Value: Simple-ref +/// +/// +pub struct Href(pub String); -pub struct Status(String); -pub struct ResponseDescription(String); +/// 14.8. include XML Element +/// +/// Name: include +/// +/// Purpose: Any child element represents the name of a property to be +/// included in the PROPFIND response. All elements inside an +/// 'include' XML element MUST define properties related to the +/// resource, although possible property names are in no way limited +/// to those property names defined in this document or other +/// standards. This element MUST NOT contain text or mixed content. +/// +/// +pub struct Include(Vec); +/// 14.9. location XML Element +/// +/// Name: location +/// +/// Purpose: HTTP defines the "Location" header (see [RFC2616], Section +/// 14.30) for use with some status codes (such as 201 and the 300 +/// series codes). When these codes are used inside a 'multistatus' +/// element, the 'location' element can be used to provide the +/// accompanying Location header value. +/// +/// Description: Contains a single href element with the same value +/// that would be used in a Location header. +/// +/// pub struct Location(Href); +/// 14.10. lockentry XML Element +/// +/// Name: lockentry +/// +/// Purpose: Defines the types of locks that can be used with the +/// resource. +/// +/// +pub struct LockEntry { + lokscope: LockScope, + locktype: LockType, +} + +/// 14.11. lockinfo XML Element +/// +/// Name: lockinfo +/// +/// Purpose: The 'lockinfo' XML element is used with a LOCK method to +/// specify the type of lock the client wishes to have created. +/// +/// +pub struct LockInfo { + lockscope: LockScope, + locktype: LockType, + owner: Option, +} + +/// 14.12. lockroot XML Element +/// +/// Name: lockroot +/// +/// Purpose: Contains the root URL of the lock, which is the URL +/// through which the resource was addressed in the LOCK request. +/// +/// Description: The href element contains the root of the lock. The +/// server SHOULD include this in all DAV:lockdiscovery property +/// values and the response to LOCK requests. +/// +/// +pub struct LockRoot(Href); + +/// 14.13. lockscope XML Element +/// +/// Name: lockscope +/// +/// Purpose: Specifies whether a lock is an exclusive lock, or a shared +/// lock. +/// +pub enum LockScope { + Exclusive, + Shared +} + +/// 14.14. locktoken XML Element +/// +/// Name: locktoken +/// +/// Purpose: The lock token associated with a lock. +/// +/// Description: The href contains a single lock token URI, which +/// refers to the lock. +/// +/// +pub struct LockToken(Href); + +/// 14.15. locktype XML Element +/// +/// Name: locktype +/// +/// Purpose: Specifies the access type of a lock. At present, this +/// specification only defines one lock type, the write lock. +/// +/// +pub enum LockType { + /// 14.30. write XML Element + /// + /// Name: write + /// + /// Purpose: Specifies a write lock. + /// + /// + /// + Write +} + +/// 14.16. multistatus XML Element +/// +/// Name: multistatus +/// +/// Purpose: Contains multiple response messages. +/// +/// Description: The 'responsedescription' element at the top level is +/// used to provide a general message describing the overarching +/// nature of the response. If this value is available, an +/// application may use it instead of presenting the individual +/// response descriptions contained within the responses. +/// +/// +pub struct Multistatus { + responses: Vec>, + responsedescription: Option, +} + +/// 14.17. owner XML Element +/// +/// Name: owner +/// +/// Purpose: Holds client-supplied information about the creator of a +/// lock. +/// +/// Description: Allows a client to provide information sufficient for +/// either directly contacting a principal (such as a telephone number +/// or Email URI), or for discovering the principal (such as the URL +/// of a homepage) who created a lock. The value provided MUST be +/// treated as a dead property in terms of XML Information Item +/// preservation. The server MUST NOT alter the value unless the +/// owner value provided by the client is empty. For a certain amount +/// of interoperability between different client implementations, if +/// clients have URI-formatted contact information for the lock +/// creator suitable for user display, then clients SHOULD put those +/// URIs in 'href' child elements of the 'owner' element. +/// +/// Extensibility: MAY be extended with child elements, mixed content, +/// text content or attributes. +/// +/// +pub struct Owner(String); + /// 14.18. prop XML Element /// /// Name: prop @@ -119,11 +356,52 @@ pub struct Location(Href); /// text or mixed content. /// /// -pub struct Prop { - something: u64, +pub struct Prop(Vec); + +/// 14.19. propertyupdate XML Element +/// +/// Name: propertyupdate +/// +/// Purpose: Contains a request to alter the properties on a resource. +/// +/// Description: This XML element is a container for the information +/// required to modify the properties on the resource. +/// +/// +pub struct PropertyUpdate(Vec); +pub enum PropertyUpdateItem { + Remove(Remove), + Set(Set), +} + +/// 14.20. propfind XML Element +/// +/// Name: propfind +/// +/// Purpose: Specifies the properties to be returned from a PROPFIND +/// method. Four special elements are specified for use with +/// 'propfind': 'prop', 'allprop', 'include', and 'propname'. If +/// 'prop' is used inside 'propfind', it MUST NOT contain property +/// values. +/// +/// +pub enum PropFind { + PropName(PropName), + AllProp(AllProp, Option), + Prop(Prop), } -/// propstat XML Element +/// 14.21. propname XML Element +/// +/// Name: propname +/// +/// Purpose: Specifies that only a list of property names on the +/// resource is to be returned. +/// +/// +pub struct PropName {} + +/// 14.22 propstat XML Element /// /// Name: propstat /// @@ -138,13 +416,28 @@ pub struct Prop { /// the properties named in 'prop'. /// /// -pub struct PropStat { +pub struct PropStat { prop: Prop, status: Status, - error: Option, + error: Option>, responsedescription: Option, } +/// 14.23. remove XML Element +/// +/// Name: remove +/// +/// Purpose: Lists the properties to be removed from a resource. +/// +/// Description: Remove instructs that the properties specified in prop +/// should be removed. Specifying the removal of a property that does +/// not exist is not an error. All the XML elements in a 'prop' XML +/// element inside of a 'remove' XML element MUST be empty, as only +/// the names of properties to be removed are required. +/// +/// +pub struct Remove(Prop); + /// 14.24. response XML Element /// /// Name: response @@ -167,31 +460,397 @@ pub struct PropStat { /// /// -pub struct Response { +pub struct Response { href: Vec, status: Status, - propstat: Vec, - error: Option, + propstat: Vec>, + error: Option>, responsedescription: Option, location: Option, } -/// 14.16. multistatus XML Element +/// 14.25. responsedescription XML Element /// -/// Name: multistatus +/// Name: responsedescription /// -/// Purpose: Contains multiple response messages. +/// Purpose: Contains information about a status response within a +/// Multi-Status. /// -/// Description: The 'responsedescription' element at the top level is -/// used to provide a general message describing the overarching -/// nature of the response. If this value is available, an -/// application may use it instead of presenting the individual -/// response descriptions contained within the responses. +/// Description: Provides information suitable to be presented to a +/// user. /// -/// -pub struct Multistatus { - responses: Vec, - responsedescription: Option, +/// +pub struct ResponseDescription(String); + +/// 14.26. set XML Element +/// +/// Name: set +/// +/// Purpose: Lists the property values to be set for a resource. +/// +/// Description: The 'set' element MUST contain only a 'prop' element. +/// The elements contained by the 'prop' element inside the 'set' +/// element MUST specify the name and value of properties that are set +/// on the resource identified by Request-URI. If a property already +/// exists, then its value is replaced. Language tagging information +/// appearing in the scope of the 'prop' element (in the "xml:lang" +/// attribute, if present) MUST be persistently stored along with the +/// property, and MUST be subsequently retrievable using PROPFIND. +/// +/// +pub struct Set(Prop); + +/// 14.27. shared XML Element +/// +/// Name: shared +/// +/// Purpose: Specifies a shared lock. +/// +/// +/// +pub struct Shared {} + + +/// 14.28. status XML Element +/// +/// Name: status +/// +/// Purpose: Holds a single HTTP status-line. +/// +/// Value: status-line (defined in Section 6.1 of [RFC2616]) +/// +/// +//@FIXME: Better typing is possible with an enum for example +pub struct Status(http::status::StatusCode); + +/// 14.29. timeout XML Element +/// +/// Name: timeout +/// +/// Purpose: The number of seconds remaining before a lock expires. +/// +/// Value: TimeType (defined in Section 10.7) +/// +/// +/// +pub struct Timeout(u64); + + +/// 15. DAV Properties +/// +/// For DAV properties, the name of the property is also the same as the +/// name of the XML element that contains its value. In the section +/// below, the final line of each section gives the element type +/// declaration using the format defined in [REC-XML]. The "Value" +/// field, where present, specifies further restrictions on the allowable +/// contents of the XML element using BNF (i.e., to further restrict the +/// values of a PCDATA element). +/// +/// A protected property is one that cannot be changed with a PROPPATCH +/// request. There may be other requests that would result in a change +/// to a protected property (as when a LOCK request affects the value of +/// DAV:lockdiscovery). Note that a given property could be protected on +/// one type of resource, but not protected on another type of resource. +/// +/// A computed property is one with a value defined in terms of a +/// computation (based on the content and other properties of that +/// resource, or even of some other resource). A computed property is +/// always a protected property. +/// +/// COPY and MOVE behavior refers to local COPY and MOVE operations. +/// +/// For properties defined based on HTTP GET response headers (DAV:get*), +/// the header value could include LWS as defined in [RFC2616], Section +/// 4.2. Server implementors SHOULD strip LWS from these values before +/// using as WebDAV property values. +pub enum PropertyRequest { + CreationDate, + DisplayName, + GetContentLanguage, + GetContentLength, + GetContentType, + GetEtag, + GetLastModified, + LockDiscovery, + ResourceType, +} +pub enum Property { + /// 15.1. creationdate Property + /// + /// Name: creationdate + /// + /// Purpose: Records the time and date the resource was created. + /// + /// Value: date-time (defined in [RFC3339], see the ABNF in Section + /// 5.6.) + /// + /// Protected: MAY be protected. Some servers allow DAV:creationdate + /// to be changed to reflect the time the document was created if that + /// is more meaningful to the user (rather than the time it was + /// uploaded). Thus, clients SHOULD NOT use this property in + /// synchronization logic (use DAV:getetag instead). + /// + /// COPY/MOVE behavior: This property value SHOULD be kept during a + /// MOVE operation, but is normally re-initialized when a resource is + /// created with a COPY. It should not be set in a COPY. + /// + /// Description: The DAV:creationdate property SHOULD be defined on all + /// DAV compliant resources. If present, it contains a timestamp of + /// the moment when the resource was created. Servers that are + /// incapable of persistently recording the creation date SHOULD + /// instead leave it undefined (i.e. report "Not Found"). + /// + /// + CreationDate(DateTime), + + /// 15.2. displayname Property + /// + /// Name: displayname + /// + /// Purpose: Provides a name for the resource that is suitable for + /// presentation to a user. + /// + /// Value: Any text. + /// + /// Protected: SHOULD NOT be protected. Note that servers implementing + /// [RFC2518] might have made this a protected property as this is a + /// new requirement. + /// + /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY + /// and MOVE operations. + /// + /// Description: Contains a description of the resource that is + /// suitable for presentation to a user. This property is defined on + /// the resource, and hence SHOULD have the same value independent of + /// the Request-URI used to retrieve it (thus, computing this property + /// based on the Request-URI is deprecated). While generic clients + /// might display the property value to end users, client UI designers + /// must understand that the method for identifying resources is still + /// the URL. Changes to DAV:displayname do not issue moves or copies + /// to the server, but simply change a piece of meta-data on the + /// individual resource. Two resources can have the same DAV: + /// displayname value even within the same collection. + /// + /// + DisplayName(String), + + + /// 15.3. getcontentlanguage Property + /// + /// Name: getcontentlanguage + /// + /// Purpose: Contains the Content-Language header value (from Section + /// 14.12 of [RFC2616]) as it would be returned by a GET without + /// accept headers. + /// + /// Value: language-tag (language-tag is defined in Section 3.10 of + /// [RFC2616]) + /// + /// Protected: SHOULD NOT be protected, so that clients can reset the + /// language. Note that servers implementing [RFC2518] might have + /// made this a protected property as this is a new requirement. + /// + /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY + /// and MOVE operations. + /// + /// Description: The DAV:getcontentlanguage property MUST be defined on + /// any DAV-compliant resource that returns the Content-Language + /// header on a GET. + /// + /// + GetContentLanguage(String), + + /// 15.4. getcontentlength Property + /// + /// Name: getcontentlength + /// + /// Purpose: Contains the Content-Length header returned by a GET + /// without accept headers. + /// + /// Value: See Section 14.13 of [RFC2616]. + /// + /// Protected: This property is computed, therefore protected. + /// + /// Description: The DAV:getcontentlength property MUST be defined on + /// any DAV-compliant resource that returns the Content-Length header + /// in response to a GET. + /// + /// COPY/MOVE behavior: This property value is dependent on the size of + /// the destination resource, not the value of the property on the + /// source resource. + /// + /// + GetContentLength(u64), + + /// 15.5. getcontenttype Property + /// + /// Name: getcontenttype + /// + /// Purpose: Contains the Content-Type header value (from Section 14.17 + /// of [RFC2616]) as it would be returned by a GET without accept + /// headers. + /// + /// Value: media-type (defined in Section 3.7 of [RFC2616]) + /// + /// Protected: Potentially protected if the server prefers to assign + /// content types on its own (see also discussion in Section 9.7.1). + /// + /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY + /// and MOVE operations. + /// + /// Description: This property MUST be defined on any DAV-compliant + /// resource that returns the Content-Type header in response to a + /// GET. + /// + /// + GetContentType(String), + + /// 15.6. getetag Property + /// + /// Name: getetag + /// + /// Purpose: Contains the ETag header value (from Section 14.19 of + /// [RFC2616]) as it would be returned by a GET without accept + /// headers. + /// + /// Value: entity-tag (defined in Section 3.11 of [RFC2616]) + /// + /// Protected: MUST be protected because this value is created and + /// controlled by the server. + /// + /// COPY/MOVE behavior: This property value is dependent on the final + /// state of the destination resource, not the value of the property + /// on the source resource. Also note the considerations in + /// Section 8.8. + /// + /// Description: The getetag property MUST be defined on any DAV- + /// compliant resource that returns the Etag header. Refer to Section + /// 3.11 of RFC 2616 for a complete definition of the semantics of an + /// ETag, and to Section 8.6 for a discussion of ETags in WebDAV. + /// + /// + GetEtag(String), + + /// 15.7. getlastmodified Property + /// + /// Name: getlastmodified + /// + /// Purpose: Contains the Last-Modified header value (from Section + /// 14.29 of [RFC2616]) as it would be returned by a GET method + /// without accept headers. + /// + /// Value: rfc1123-date (defined in Section 3.3.1 of [RFC2616]) + /// + /// Protected: SHOULD be protected because some clients may rely on the + /// value for appropriate caching behavior, or on the value of the + /// Last-Modified header to which this property is linked. + /// + /// COPY/MOVE behavior: This property value is dependent on the last + /// modified date of the destination resource, not the value of the + /// property on the source resource. Note that some server + /// implementations use the file system date modified value for the + /// DAV:getlastmodified value, and this can be preserved in a MOVE + /// even when the HTTP Last-Modified value SHOULD change. Note that + /// since [RFC2616] requires clients to use ETags where provided, a + /// server implementing ETags can count on clients using a much better + /// mechanism than modification dates for offline synchronization or + /// cache control. Also note the considerations in Section 8.8. + /// + /// Description: The last-modified date on a resource SHOULD only + /// reflect changes in the body (the GET responses) of the resource. + /// A change in a property only SHOULD NOT cause the last-modified + /// date to change, because clients MAY rely on the last-modified date + /// to know when to overwrite the existing body. The DAV: + /// getlastmodified property MUST be defined on any DAV-compliant + /// resource that returns the Last-Modified header in response to a + /// GET. + /// + /// + GetLastModified(DateTime), + + /// 15.8. lockdiscovery Property + /// + /// Name: lockdiscovery + /// + /// Purpose: Describes the active locks on a resource + /// + /// Protected: MUST be protected. Clients change the list of locks + /// through LOCK and UNLOCK, not through PROPPATCH. + /// + /// COPY/MOVE behavior: The value of this property depends on the lock + /// state of the destination, not on the locks of the source resource. + /// Recall that locks are not moved in a MOVE operation. + /// + /// Description: Returns a listing of who has a lock, what type of lock + /// he has, the timeout type and the time remaining on the timeout, + /// and the associated lock token. Owner information MAY be omitted + /// if it is considered sensitive. If there are no locks, but the + /// server supports locks, the property will be present but contain + /// zero 'activelock' elements. If there are one or more locks, an + /// 'activelock' element appears for each lock on the resource. This + /// property is NOT lockable with respect to write locks (Section 7). + /// + /// + LockDiscovery(Vec), + + + /// 15.9. resourcetype Property + /// + /// Name: resourcetype + /// + /// Purpose: Specifies the nature of the resource. + /// + /// Protected: SHOULD be protected. Resource type is generally decided + /// through the operation creating the resource (MKCOL vs PUT), not by + /// PROPPATCH. + /// + /// COPY/MOVE behavior: Generally a COPY/MOVE of a resource results in + /// the same type of resource at the destination. + /// + /// Description: MUST be defined on all DAV-compliant resources. Each + /// child element identifies a specific type the resource belongs to, + /// such as 'collection', which is the only resource type defined by + /// this specification (see Section 14.3). If the element contains + /// the 'collection' child element plus additional unrecognized + /// elements, it should generally be treated as a collection. If the + /// element contains no recognized child elements, it should be + /// treated as a non-collection resource. The default value is empty. + /// This element MUST NOT contain text or mixed content. Any custom + /// child element is considered to be an identifier for a resource + /// type. + /// + /// Example: (fictional example to show extensibility) + /// + /// + /// + /// + /// + ResourceType(Collection), + + /// 15.10. supportedlock Property + /// + /// Name: supportedlock + /// + /// Purpose: To provide a listing of the lock capabilities supported by + /// the resource. + /// + /// Protected: MUST be protected. Servers, not clients, determine what + /// lock mechanisms are supported. + /// COPY/MOVE behavior: This property value is dependent on the kind of + /// locks supported at the destination, not on the value of the + /// property at the source resource. Servers attempting to COPY to a + /// destination should not attempt to set this property at the + /// destination. + /// + /// Description: Returns a listing of the combinations of scope and + /// access types that may be specified in a lock request on the + /// resource. Note that the actual contents are themselves controlled + /// by access controls, so a server is not required to provide + /// information the client is not authorized to see. This property is + /// NOT lockable with respect to write locks (Section 7). + /// + /// + SupportedLock(Vec), } -- cgit v1.2.3 From 9146537aaf9c8aef504dc3ed050992e97d907edd Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 29 Feb 2024 10:17:46 +0100 Subject: WIP XML encoder --- src/dav/encoder.rs | 59 +++++++++++++++++++++++++++++++++++++++++++++++++++--- src/dav/types.rs | 7 ++++--- 2 files changed, 60 insertions(+), 6 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 28f807a..552f183 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -1,6 +1,7 @@ use std::io::Cursor; -use anyhow::Result; +use futures::stream::{StreamExt, TryStreamExt}; +use quick_xml::Error; use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; @@ -12,19 +13,52 @@ use super::types::*; //So instead of writing many cursed workarounds - I tried, I am just hardcoding the namespaces... pub trait Encode { - async fn write(&self, xml: &mut Writer) -> Result<()>; + async fn write(&self, xml: &mut Writer) -> Result<(), Error>; } impl Encode for Href { - async fn write(&self, xml: &mut Writer) -> Result<()> { + async fn write(&self, xml: &mut Writer) -> Result<(), Error> { xml.create_element("D:href") .write_text_content_async(BytesText::new(&self.0)) .await?; + Ok(()) + } +} +impl Encode for Multistatus { + async fn write(&self, xml: &mut Writer) -> Result<(), Error> { + xml.create_element("D:multistatus") + .with_attribute(("xmlns:D", "DAV:")) + .write_inner_content_async::<_, _, quick_xml::Error>(|inner_xml| async move { + for response in self.responses.iter() { + response.write(inner_xml).await?; + } + + if let Some(description) = &self.responsedescription { + description.write(inner_xml).await?; + } + + Ok(inner_xml) + }) + .await?; Ok(()) } } +impl Encode for Response { + async fn write(&self, xml: &mut Writer) -> Result<(), Error> { + unimplemented!(); + } +} + +impl Encode for ResponseDescription { + async fn write(&self, xml: &mut Writer) -> Result<(), Error> { + xml.create_element("D:responsedescription") + .write_text_content_async(BytesText::new(&self.0)) + .await?; + Ok(()) + } +} #[cfg(test)] mod tests { @@ -45,4 +79,23 @@ mod tests { assert_eq!(buffer.as_slice(), &b"/SOGo/dav/so/"[..]); } + + + #[tokio::test] + async fn test_multistatus() { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + + let xml: Multistatus = Multistatus { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; + xml.write(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + + let expected = r#" + Hello world +"#; + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + + assert_eq!(got, expected); + } } diff --git a/src/dav/types.rs b/src/dav/types.rs index 1ff690f..7bbea8e 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -313,8 +313,8 @@ pub enum LockType { /// /// pub struct Multistatus { - responses: Vec>, - responsedescription: Option, + pub responses: Vec>, + pub responsedescription: Option, } /// 14.17. owner XML Element @@ -480,7 +480,7 @@ pub struct Response { /// user. /// /// -pub struct ResponseDescription(String); +pub struct ResponseDescription(pub String); /// 14.26. set XML Element /// @@ -573,6 +573,7 @@ pub enum PropertyRequest { GetLastModified, LockDiscovery, ResourceType, + SupportedLock, } pub enum Property { /// 15.1. creationdate Property -- cgit v1.2.3 From 1e3737a590e2b329afc2b5531cf4ae67fb48a571 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 29 Feb 2024 20:40:40 +0100 Subject: At least it compiles --- src/dav/calencoder.rs | 100 ++++++++++++++++++++++++ src/dav/caltypes.rs | 41 ++++++++++ src/dav/encoder.rs | 208 ++++++++++++++++++++++++++++++++++++++++++-------- src/dav/mod.rs | 2 + src/dav/types.rs | 64 ++++++++++++---- src/main.rs | 1 + 6 files changed, 369 insertions(+), 47 deletions(-) create mode 100644 src/dav/calencoder.rs create mode 100644 src/dav/caltypes.rs diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs new file mode 100644 index 0000000..918083d --- /dev/null +++ b/src/dav/calencoder.rs @@ -0,0 +1,100 @@ +use super::encoder::{QuickWritable, Context}; +use super::caltypes::*; +use super::types::Extension; + +use quick_xml::Error as QError; +use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; +use quick_xml::writer::{ElementWriter, Writer}; +use quick_xml::name::PrefixDeclaration; +use tokio::io::AsyncWrite; + +/*pub trait CalWriter: DavWriter { + fn create_cal_element(&mut self, name: &str) -> ElementWriter; +} + +impl<'a, W: AsyncWrite+Unpin> DavWriter for Writer<'a, W, CalExtension> { + fn create_dav_element(&mut self, name: &str) -> ElementWriter { + self.create_ns_element(name, Namespace::Dav) + } + fn child(w: &'a mut QWriter) -> impl DavWriter { + Self::child(w) + } + async fn error(&mut self, err: &Violation) -> Result<(), QError> { + err.write(self).await + } +} + +impl<'a, W: AsyncWrite+Unpin> CalWriter for Writer<'a, W, CalExtension> { + fn create_cal_element(&mut self, name: &str) -> ElementWriter { + self.create_ns_element(name, Namespace::CalDav) + } +}*/ + +pub struct CalCtx { + root: bool +} +impl Context for CalCtx { + fn child(&self) -> Self { + Self { root: false } + } + fn create_dav_element(&self, name: &str) -> BytesStart { + let mut start = BytesStart::new(format!("D:{}", name)); + if self.root { + start.push_attribute(("xmlns:D", "DAV:")); + start.push_attribute(("xmlns:C", "urn:ietf:params:xml:ns:caldav")); + } + start + } + + async fn hook_error(&self, err: &Violation, xml: &mut Writer) -> Result<(), QError> { + err.write(xml, self.child()).await + } +} + +impl QuickWritable for Violation { + async fn write(&self, xml: &mut Writer, _ctx: CalCtx) -> Result<(), QError> { + match self { + Self::SupportedFilter => xml + .create_element("supported-filter") + .write_empty_async().await?, + }; + Ok(()) + } +} + +/* + + + + + + +*/ + +#[cfg(test)] +mod tests { + use super::*; + use crate::dav::types::{Error, Violation as DavViolation}; + use tokio::io::AsyncWriteExt; + + #[tokio::test] + async fn test_violation() { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + + let res: Error = Error(vec![ + DavViolation::Extension(Violation::SupportedFilter), + ]); + + res.write(&mut writer, CalCtx{ root: true }).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + + let expected = r#" + +"#; + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + + assert_eq!(got, expected); + } +} diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs new file mode 100644 index 0000000..ed8496a --- /dev/null +++ b/src/dav/caltypes.rs @@ -0,0 +1,41 @@ +use super::types::*; + +pub enum Namespace { + Dav, + CalDav, +} + +pub struct CalExtension {} +impl Extension for CalExtension { + type Error = Violation; + type Namespace = Namespace; + + fn namespaces() -> &'static [(&'static str, &'static str)] { + return &[ ("D", "DAV:"), ("C", "urn:ietf:params:xml:ns:caldav") ][..] + } + + fn short_ns(ns: Self::Namespace) -> &'static str { + match ns { + Namespace::Dav => "D", + Namespace::CalDav => "C", + } + } +} + +pub enum Violation { + /// (CALDAV:supported-filter): The CALDAV:comp-filter (see + /// Section 9.7.1), CALDAV:prop-filter (see Section 9.7.2), and + /// CALDAV:param-filter (see Section 9.7.3) XML elements used in the + /// CALDAV:filter XML element (see Section 9.7) in the REPORT request + /// only make reference to components, properties, and parameters for + /// which queries are supported by the server, i.e., if the CALDAV: + /// filter element attempts to reference an unsupported component, + /// property, or parameter, this precondition is violated. Servers + /// SHOULD report the CALDAV:comp-filter, CALDAV:prop-filter, or + /// CALDAV:param-filter for which it does not provide support. + /// + /// + SupportedFilter, +} diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 552f183..ddef533 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -1,61 +1,205 @@ use std::io::Cursor; -use futures::stream::{StreamExt, TryStreamExt}; -use quick_xml::Error; +use quick_xml::Error as QError; use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; use super::types::*; -//@FIXME a cleaner way to manager namespace would be great -//but at the same time, the quick-xml library is not cooperating. -//So instead of writing many cursed workarounds - I tried, I am just hardcoding the namespaces... -pub trait Encode { - async fn write(&self, xml: &mut Writer) -> Result<(), Error>; +//-------------- TRAITS ---------------------- +/*pub trait DavWriter { + fn create_dav_element(&mut self, name: &str) -> ElementWriter; + fn child(w: &mut QWriter) -> impl DavWriter; + async fn error(&mut self, err: &E::Error) -> Result<(), QError>; +}*/ + +/// Basic encode trait to make a type encodable +pub trait QuickWritable> { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError>; +} + +pub trait Context { + fn child(&self) -> Self; + fn create_dav_element(&self, name: &str) -> BytesStart; + async fn hook_error(&self, err: &E::Error, xml: &mut Writer) -> Result<(), QError>; +} + +pub struct NoExtCtx { + root: bool +} +impl Context for NoExtCtx { + fn child(&self) -> Self { + Self { root: false } + } + fn create_dav_element(&self, name: &str) -> BytesStart { + let mut start = BytesStart::new(format!("D:{}", name)); + if self.root { + start.push_attribute(("xmlns:D", "DAV:")); + } + start + } + async fn hook_error(&self, err: &Disabled, xml: &mut Writer) -> Result<(), QError> { + unreachable!(); + } } -impl Encode for Href { - async fn write(&self, xml: &mut Writer) -> Result<(), Error> { - xml.create_element("D:href") + +//--------------------- ENCODING -------------------- + +// --- XML ROOTS +impl> QuickWritable for Multistatus { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("multistatus"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for response in self.responses.iter() { + response.write(xml, ctx.child()).await?; + } + if let Some(description) = &self.responsedescription { + description.write(xml, ctx.child()).await?; + } + + xml.write_event_async(Event::End(end)).await?; + Ok(()) + } +} + + +// --- XML inner elements +impl> QuickWritable for Href { + async fn write(&self, xml: &mut Writer, _ctx: C) -> Result<(), QError> { + xml.create_element("href") .write_text_content_async(BytesText::new(&self.0)) .await?; Ok(()) } } -impl Encode for Multistatus { - async fn write(&self, xml: &mut Writer) -> Result<(), Error> { - xml.create_element("D:multistatus") - .with_attribute(("xmlns:D", "DAV:")) - .write_inner_content_async::<_, _, quick_xml::Error>(|inner_xml| async move { - for response in self.responses.iter() { - response.write(inner_xml).await?; +impl> QuickWritable for Response { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + xml.create_element("response") + .write_inner_content_async::<_, _, QError>(|inner_xml| async move { + self.href.write(inner_xml, ctx.child()).await?; + self.status_or_propstat.write(inner_xml, ctx.child()).await?; + if let Some(error) = &self.error { + error.write(inner_xml, ctx.child()).await?; } - - if let Some(description) = &self.responsedescription { - description.write(inner_xml).await?; + if let Some(responsedescription) = &self.responsedescription { + responsedescription.write(inner_xml, ctx.child()).await?; } - + if let Some(location) = &self.location { + location.write(inner_xml, ctx.child()).await?; + } + Ok(inner_xml) }) + .await?; + + Ok(()) + } +} + +impl> QuickWritable for StatusOrPropstat { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + match self { + Self::Status(status) => status.write(xml, ctx.child()).await, + Self::PropStat(propstat_list) => { + for propstat in propstat_list.iter() { + propstat.write(xml, ctx.child()).await?; + } + + Ok(()) + } + } + } +} + +impl> QuickWritable for Status { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + xml.create_element("status") + .write_text_content_async( + BytesText::new(&format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason"))) + ) .await?; Ok(()) } } -impl Encode for Response { - async fn write(&self, xml: &mut Writer) -> Result<(), Error> { +impl> QuickWritable for ResponseDescription { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("responsedescription"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.write_event_async(Event::End(end)).await?; + + Ok(()) + } +} + +impl> QuickWritable for Location { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { unimplemented!(); } } -impl Encode for ResponseDescription { - async fn write(&self, xml: &mut Writer) -> Result<(), Error> { - xml.create_element("D:responsedescription") - .write_text_content_async(BytesText::new(&self.0)) - .await?; +impl> QuickWritable for PropStat { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl> QuickWritable for Error { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + xml.create_element("error") + .write_inner_content_async::<_, _, QError>(|inner_xml| async move { + for violation in &self.0 { + violation.write(inner_xml, ctx.child()).await?; + } + + Ok(inner_xml) + }) + .await?; + + Ok(()) + } +} + +impl> QuickWritable for Violation { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + match self { + Violation::LockTokenMatchesRequestUri => xml.create_element("lock-token-matches-request-uri").write_empty_async().await?, + Violation::LockTokenSubmitted(hrefs) => xml + .create_element("lock-token-submitted") + .write_inner_content_async::<_, _, QError>(|inner_xml| async move { + for href in hrefs { + href.write(inner_xml, ctx.child()).await?; + } + Ok(inner_xml) + } + ).await?, + Violation::NoConflictingLock(hrefs) => xml + .create_element("no-conflicting-lock") + .write_inner_content_async::<_, _, QError>(|inner_xml| async move { + for href in hrefs { + href.write(inner_xml, ctx.child()).await?; + } + Ok(inner_xml) + } + ).await?, + Violation::NoExternalEntities => xml.create_element("no-external-entities").write_empty_async().await?, + Violation::PreservedLiveProperties => xml.create_element("preserved-live-properties").write_empty_async().await?, + Violation::PropfindFiniteDepth => xml.create_element("propfind-finite-depth").write_empty_async().await?, + Violation::CannotModifyProtectedProperty => xml.create_element("cannot-modify-protected-property").write_empty_async().await?, + Violation::Extension(inner) => { + ctx.hook_error(inner, xml).await?; + xml + }, + }; Ok(()) } } @@ -74,7 +218,8 @@ mod tests { let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - Href("/SOGo/dav/so/".into()).write(&mut writer).await.expect("xml serialization"); + let ctx = NoExtCtx{ root: true }; + Href("/SOGo/dav/so/".into()).write(&mut writer, ctx).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); assert_eq!(buffer.as_slice(), &b"/SOGo/dav/so/"[..]); @@ -87,8 +232,9 @@ mod tests { let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let xml: Multistatus = Multistatus { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; - xml.write(&mut writer).await.expect("xml serialization"); + let ctx = NoExtCtx{ root: true }; + let xml: Multistatus = Multistatus { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; + xml.write(&mut writer, ctx).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let expected = r#" diff --git a/src/dav/mod.rs b/src/dav/mod.rs index a542bbb..98d6965 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,5 +1,7 @@ mod types; +mod caltypes; mod encoder; +mod calencoder; use std::net::SocketAddr; diff --git a/src/dav/types.rs b/src/dav/types.rs index 7bbea8e..69ddf52 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -2,6 +2,34 @@ use chrono::{DateTime,FixedOffset}; +/// Extension utilities +pub struct Disabled(()); +pub trait Extension { + type Error; + type Namespace; + + fn namespaces() -> &'static [(&'static str, &'static str)]; + fn short_ns(ns: Self::Namespace) -> &'static str; +} + +/// No extension +pub struct NoExtension {} +pub enum Namespace { + Dav +} +impl Extension for NoExtension { + type Error = Disabled; + type Namespace = Namespace; + + fn namespaces() -> &'static [(&'static str, &'static str)] { + return &[ ("D", "DAV:") ][..] + } + + fn short_ns(ns: Self::Namespace) -> &'static str { + "D" + } +} + /// 14.1. activelock XML Element /// /// Name: activelock @@ -10,11 +38,11 @@ use chrono::{DateTime,FixedOffset}; /// pub struct ActiveLock { - lockscope: u64, - locktype: u64, + lockscope: LockScope, + locktype: LockType, depth: Depth, - owner: Option, - timeout: Option, + owner: Option, + timeout: Option, } /// 14.2 allprop XML Element @@ -72,7 +100,8 @@ pub enum Depth { /// postcondition code. Unrecognized elements MUST be ignored. /// /// -pub enum Error { +pub struct Error(pub Vec>); +pub enum Violation { /// Name: lock-token-matches-request-uri /// /// Use with: 409 Conflict @@ -156,7 +185,7 @@ pub enum Error { CannotModifyProtectedProperty, /// Specific errors - Extensions(T), + Extension(T::Error), } /// 14.6. exclusive XML Element @@ -312,7 +341,7 @@ pub enum LockType { /// response descriptions contained within the responses. /// /// -pub struct Multistatus { +pub struct Multistatus { pub responses: Vec>, pub responsedescription: Option, } @@ -416,7 +445,7 @@ pub struct PropName {} /// the properties named in 'prop'. /// /// -pub struct PropStat { +pub struct PropStat { prop: Prop, status: Status, error: Option>, @@ -460,13 +489,16 @@ pub struct Remove(Prop); /// /// -pub struct Response { - href: Vec, - status: Status, - propstat: Vec>, - error: Option>, - responsedescription: Option, - location: Option, +pub enum StatusOrPropstat { + Status(Status), + PropStat(Vec>), +} +pub struct Response { + pub href: Href, // It's wrong according to the spec, but I don't understand why there is an href* + pub status_or_propstat: StatusOrPropstat, + pub error: Option>, + pub responsedescription: Option, + pub location: Option, } /// 14.25. responsedescription XML Element @@ -521,7 +553,7 @@ pub struct Shared {} /// /// //@FIXME: Better typing is possible with an enum for example -pub struct Status(http::status::StatusCode); +pub struct Status(pub http::status::StatusCode); /// 14.29. timeout XML Element /// diff --git a/src/main.rs b/src/main.rs index 5f5089f..4f874b9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +#![feature(type_alias_impl_trait)] #![feature(async_fn_in_trait)] mod auth; -- cgit v1.2.3 From fadadffc927015948d38824ea1d70810392182b9 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 29 Feb 2024 22:32:07 +0100 Subject: Fixed tests --- src/dav/calencoder.rs | 25 ++++++---- src/dav/encoder.rs | 128 ++++++++++++++++++++++++++------------------------ 2 files changed, 84 insertions(+), 69 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 918083d..fbd696d 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -38,25 +38,34 @@ impl Context for CalCtx { Self { root: false } } fn create_dav_element(&self, name: &str) -> BytesStart { - let mut start = BytesStart::new(format!("D:{}", name)); + self.create_ns_element("D", name) + } + + async fn hook_error(&self, err: &Violation, xml: &mut Writer) -> Result<(), QError> { + err.write(xml, self.child()).await + } +} +impl CalCtx { + fn create_ns_element(&self, ns: &str, name: &str) -> BytesStart { + let mut start = BytesStart::new(format!("{}:{}", ns, name)); if self.root { start.push_attribute(("xmlns:D", "DAV:")); start.push_attribute(("xmlns:C", "urn:ietf:params:xml:ns:caldav")); } start } - - async fn hook_error(&self, err: &Violation, xml: &mut Writer) -> Result<(), QError> { - err.write(xml, self.child()).await + fn create_cal_element(&self, name: &str) -> BytesStart { + self.create_ns_element("C", name) } } impl QuickWritable for Violation { - async fn write(&self, xml: &mut Writer, _ctx: CalCtx) -> Result<(), QError> { + async fn write(&self, xml: &mut Writer, ctx: CalCtx) -> Result<(), QError> { match self { - Self::SupportedFilter => xml - .create_element("supported-filter") - .write_empty_async().await?, + Self::SupportedFilter => { + let start = ctx.create_cal_element("supported-filter"); + xml.write_event_async(Event::Empty(start)).await?; + }, }; Ok(()) } diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index ddef533..e77e072 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -9,23 +9,21 @@ use super::types::*; //-------------- TRAITS ---------------------- -/*pub trait DavWriter { - fn create_dav_element(&mut self, name: &str) -> ElementWriter; - fn child(w: &mut QWriter) -> impl DavWriter; - async fn error(&mut self, err: &E::Error) -> Result<(), QError>; -}*/ /// Basic encode trait to make a type encodable pub trait QuickWritable> { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError>; } +/// Encoding context pub trait Context { fn child(&self) -> Self; fn create_dav_element(&self, name: &str) -> BytesStart; async fn hook_error(&self, err: &E::Error, xml: &mut Writer) -> Result<(), QError>; } +/// -------------- NoExtension Encoding Context +/// (Might be tied to the type maybe) pub struct NoExtCtx { root: bool } @@ -70,33 +68,36 @@ impl> QuickWritable for Multistatus { // --- XML inner elements impl> QuickWritable for Href { - async fn write(&self, xml: &mut Writer, _ctx: C) -> Result<(), QError> { - xml.create_element("href") - .write_text_content_async(BytesText::new(&self.0)) - .await?; + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("href"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.write_event_async(Event::End(end)).await?; + Ok(()) } } impl> QuickWritable for Response { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - xml.create_element("response") - .write_inner_content_async::<_, _, QError>(|inner_xml| async move { - self.href.write(inner_xml, ctx.child()).await?; - self.status_or_propstat.write(inner_xml, ctx.child()).await?; - if let Some(error) = &self.error { - error.write(inner_xml, ctx.child()).await?; - } - if let Some(responsedescription) = &self.responsedescription { - responsedescription.write(inner_xml, ctx.child()).await?; - } - if let Some(location) = &self.location { - location.write(inner_xml, ctx.child()).await?; - } + let start = ctx.create_dav_element("href"); + let end = start.to_end(); - Ok(inner_xml) - }) - .await?; + xml.write_event_async(Event::Start(start.clone())).await?; + self.href.write(xml, ctx.child()).await?; + self.status_or_propstat.write(xml, ctx.child()).await?; + if let Some(error) = &self.error { + error.write(xml, ctx.child()).await?; + } + if let Some(responsedescription) = &self.responsedescription { + responsedescription.write(xml, ctx.child()).await?; + } + if let Some(location) = &self.location { + location.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; Ok(()) } @@ -119,11 +120,16 @@ impl> QuickWritable for StatusOrPropstat { impl> QuickWritable for Status { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - xml.create_element("status") - .write_text_content_async( - BytesText::new(&format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason"))) - ) - .await?; + let start = ctx.create_dav_element("status"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + + let txt = format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason")); + xml.write_event_async(Event::Text(BytesText::new(&txt))).await?; + + xml.write_event_async(Event::End(end)).await?; + Ok(()) } } @@ -155,15 +161,14 @@ impl> QuickWritable for PropStat { impl> QuickWritable for Error { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - xml.create_element("error") - .write_inner_content_async::<_, _, QError>(|inner_xml| async move { - for violation in &self.0 { - violation.write(inner_xml, ctx.child()).await?; - } + let start = ctx.create_dav_element("error"); + let end = start.to_end(); - Ok(inner_xml) - }) - .await?; + xml.write_event_async(Event::Start(start.clone())).await?; + for violation in &self.0 { + violation.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; Ok(()) } @@ -172,32 +177,33 @@ impl> QuickWritable for Error { impl> QuickWritable for Violation { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { - Violation::LockTokenMatchesRequestUri => xml.create_element("lock-token-matches-request-uri").write_empty_async().await?, - Violation::LockTokenSubmitted(hrefs) => xml - .create_element("lock-token-submitted") - .write_inner_content_async::<_, _, QError>(|inner_xml| async move { - for href in hrefs { - href.write(inner_xml, ctx.child()).await?; - } - Ok(inner_xml) + Violation::LockTokenMatchesRequestUri => xml.write_event_async(Event::Empty(ctx.create_dav_element("lock-token-matches-request-uri"))).await?, + Violation::LockTokenSubmitted(hrefs) => { + let start = ctx.create_dav_element("lock-token-submitted"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for href in hrefs { + href.write(xml, ctx.child()).await?; } - ).await?, - Violation::NoConflictingLock(hrefs) => xml - .create_element("no-conflicting-lock") - .write_inner_content_async::<_, _, QError>(|inner_xml| async move { - for href in hrefs { - href.write(inner_xml, ctx.child()).await?; - } - Ok(inner_xml) + xml.write_event_async(Event::End(end)).await?; + }, + Violation::NoConflictingLock(hrefs) => { + let start = ctx.create_dav_element("no-conflicting-lock"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for href in hrefs { + href.write(xml, ctx.child()).await?; } - ).await?, - Violation::NoExternalEntities => xml.create_element("no-external-entities").write_empty_async().await?, - Violation::PreservedLiveProperties => xml.create_element("preserved-live-properties").write_empty_async().await?, - Violation::PropfindFiniteDepth => xml.create_element("propfind-finite-depth").write_empty_async().await?, - Violation::CannotModifyProtectedProperty => xml.create_element("cannot-modify-protected-property").write_empty_async().await?, + xml.write_event_async(Event::End(end)).await?; + }, + Violation::NoExternalEntities => xml.write_event_async(Event::Empty(ctx.create_dav_element("no-external-entities"))).await?, + Violation::PreservedLiveProperties => xml.write_event_async(Event::Empty(ctx.create_dav_element("preserved-live-properties"))).await?, + Violation::PropfindFiniteDepth => xml.write_event_async(Event::Empty(ctx.create_dav_element("propfind-finite-depth"))).await?, + Violation::CannotModifyProtectedProperty => xml.write_event_async(Event::Empty(ctx.create_dav_element("cannot-modify-protected-property"))).await?, Violation::Extension(inner) => { ctx.hook_error(inner, xml).await?; - xml }, }; Ok(()) @@ -218,7 +224,7 @@ mod tests { let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ctx = NoExtCtx{ root: true }; + let ctx = NoExtCtx{ root: false }; Href("/SOGo/dav/so/".into()).write(&mut writer, ctx).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); -- cgit v1.2.3 From 33a02ff695c57fe88d394ad4d556bb390934ccd6 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 29 Feb 2024 23:02:02 +0100 Subject: WIP encoder --- src/dav/calencoder.rs | 22 -------------- src/dav/caltypes.rs | 24 +++++---------- src/dav/encoder.rs | 56 ++++++++++++++++++++++++++++++++-- src/dav/types.rs | 83 ++++++++++++++++++++++++--------------------------- 4 files changed, 101 insertions(+), 84 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index fbd696d..aabdf36 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -8,28 +8,6 @@ use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; -/*pub trait CalWriter: DavWriter { - fn create_cal_element(&mut self, name: &str) -> ElementWriter; -} - -impl<'a, W: AsyncWrite+Unpin> DavWriter for Writer<'a, W, CalExtension> { - fn create_dav_element(&mut self, name: &str) -> ElementWriter { - self.create_ns_element(name, Namespace::Dav) - } - fn child(w: &'a mut QWriter) -> impl DavWriter { - Self::child(w) - } - async fn error(&mut self, err: &Violation) -> Result<(), QError> { - err.write(self).await - } -} - -impl<'a, W: AsyncWrite+Unpin> CalWriter for Writer<'a, W, CalExtension> { - fn create_cal_element(&mut self, name: &str) -> ElementWriter { - self.create_ns_element(name, Namespace::CalDav) - } -}*/ - pub struct CalCtx { root: bool } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index ed8496a..d913f95 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1,25 +1,11 @@ use super::types::*; -pub enum Namespace { - Dav, - CalDav, -} pub struct CalExtension {} impl Extension for CalExtension { type Error = Violation; - type Namespace = Namespace; - - fn namespaces() -> &'static [(&'static str, &'static str)] { - return &[ ("D", "DAV:"), ("C", "urn:ietf:params:xml:ns:caldav") ][..] - } - - fn short_ns(ns: Self::Namespace) -> &'static str { - match ns { - Namespace::Dav => "D", - Namespace::CalDav => "C", - } - } + type Property = Property; + type PropertyRequest = Property; //@FIXME } pub enum Violation { @@ -39,3 +25,9 @@ pub enum Violation { /// param-filter*)> SupportedFilter, } + + +pub enum Property { + CalendarDescription, + CalendarTimezone, +} diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index e77e072..2c9fdac 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -149,16 +149,68 @@ impl> QuickWritable for ResponseDescription { impl> QuickWritable for Location { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("location"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await?; + + Ok(()) } } impl> QuickWritable for PropStat { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("propstat"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.prop.write(xml, ctx.child()).await?; + self.status.write(xml, ctx.child()).await?; + if let Some(error) = &self.error { + error.write(xml, ctx.child()).await?; + } + if let Some(description) = &self.responsedescription { + description.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + + Ok(()) } } +impl> QuickWritable for Prop { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("prop"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for property in &self.0 { + property.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + + Ok(()) + } +} + + +impl> QuickWritable for Property { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + use Property::*; + match self { + CreationDate(date) => unimplemented!(), + DisplayName(name) => unimplemented!(), + //@FIXME not finished + _ => unimplemented!(), + }; + Ok(()) + } +} + + + impl> QuickWritable for Error { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("error"); diff --git a/src/dav/types.rs b/src/dav/types.rs index 69ddf52..b0c0083 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -6,10 +6,8 @@ use chrono::{DateTime,FixedOffset}; pub struct Disabled(()); pub trait Extension { type Error; - type Namespace; - - fn namespaces() -> &'static [(&'static str, &'static str)]; - fn short_ns(ns: Self::Namespace) -> &'static str; + type Property; + type PropertyRequest; } /// No extension @@ -19,16 +17,9 @@ pub enum Namespace { } impl Extension for NoExtension { type Error = Disabled; - type Namespace = Namespace; - - fn namespaces() -> &'static [(&'static str, &'static str)] { - return &[ ("D", "DAV:") ][..] - } - - fn short_ns(ns: Self::Namespace) -> &'static str { - "D" - } -} + type Property = Disabled; + type PropertyRequest = Disabled; + } /// 14.1. activelock XML Element /// @@ -38,11 +29,11 @@ impl Extension for NoExtension { /// pub struct ActiveLock { - lockscope: LockScope, - locktype: LockType, - depth: Depth, - owner: Option, - timeout: Option, + pub lockscope: LockScope, + pub locktype: LockType, + pub depth: Depth, + pub owner: Option, + pub timeout: Option, } /// 14.2 allprop XML Element @@ -225,7 +216,7 @@ pub struct Href(pub String); /// standards. This element MUST NOT contain text or mixed content. /// /// -pub struct Include(Vec); +pub struct Include(pub Vec>); /// 14.9. location XML Element /// @@ -241,7 +232,7 @@ pub struct Include(Vec); /// that would be used in a Location header. /// /// -pub struct Location(Href); +pub struct Location(pub Href); /// 14.10. lockentry XML Element /// @@ -252,8 +243,8 @@ pub struct Location(Href); /// /// pub struct LockEntry { - lokscope: LockScope, - locktype: LockType, + pub lokscope: LockScope, + pub locktype: LockType, } /// 14.11. lockinfo XML Element @@ -265,9 +256,9 @@ pub struct LockEntry { /// /// pub struct LockInfo { - lockscope: LockScope, - locktype: LockType, - owner: Option, + pub lockscope: LockScope, + pub locktype: LockType, + pub owner: Option, } /// 14.12. lockroot XML Element @@ -282,7 +273,7 @@ pub struct LockInfo { /// values and the response to LOCK requests. /// /// -pub struct LockRoot(Href); +pub struct LockRoot(pub Href); /// 14.13. lockscope XML Element /// @@ -369,7 +360,7 @@ pub struct Multistatus { /// text content or attributes. /// /// -pub struct Owner(String); +pub struct Owner(pub String); /// 14.18. prop XML Element /// @@ -385,7 +376,7 @@ pub struct Owner(String); /// text or mixed content. /// /// -pub struct Prop(Vec); +pub struct Prop(pub Vec>); /// 14.19. propertyupdate XML Element /// @@ -397,10 +388,10 @@ pub struct Prop(Vec); /// required to modify the properties on the resource. /// /// -pub struct PropertyUpdate(Vec); -pub enum PropertyUpdateItem { - Remove(Remove), - Set(Set), +pub struct PropertyUpdate(Vec>); +pub enum PropertyUpdateItem { + Remove(Remove), + Set(Set), } /// 14.20. propfind XML Element @@ -414,10 +405,10 @@ pub enum PropertyUpdateItem { /// values. /// /// -pub enum PropFind { +pub enum PropFind { PropName(PropName), - AllProp(AllProp, Option), - Prop(Prop), + AllProp(AllProp, Option>), + Prop(Prop), } /// 14.21. propname XML Element @@ -446,10 +437,10 @@ pub struct PropName {} /// /// pub struct PropStat { - prop: Prop, - status: Status, - error: Option>, - responsedescription: Option, + pub prop: Prop, + pub status: Status, + pub error: Option>, + pub responsedescription: Option, } /// 14.23. remove XML Element @@ -465,7 +456,7 @@ pub struct PropStat { /// the names of properties to be removed are required. /// /// -pub struct Remove(Prop); +pub struct Remove(pub Prop); /// 14.24. response XML Element /// @@ -530,7 +521,7 @@ pub struct ResponseDescription(pub String); /// property, and MUST be subsequently retrievable using PROPFIND. /// /// -pub struct Set(Prop); +pub struct Set(pub Prop); /// 14.27. shared XML Element /// @@ -595,7 +586,7 @@ pub struct Timeout(u64); /// the header value could include LWS as defined in [RFC2616], Section /// 4.2. Server implementors SHOULD strip LWS from these values before /// using as WebDAV property values. -pub enum PropertyRequest { +pub enum PropertyRequest { CreationDate, DisplayName, GetContentLanguage, @@ -606,8 +597,9 @@ pub enum PropertyRequest { LockDiscovery, ResourceType, SupportedLock, + Extension(T::PropertyRequest), } -pub enum Property { +pub enum Property { /// 15.1. creationdate Property /// /// Name: creationdate @@ -884,6 +876,9 @@ pub enum Property { /// /// SupportedLock(Vec), + + /// Any extension + Extension(T::Property), } -- cgit v1.2.3 From e88e448179239fcc41aa03efcbfb2cc2ab0c922e Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 08:32:02 +0100 Subject: Simplify code --- src/dav/calencoder.rs | 16 +++++++--------- src/dav/caltypes.rs | 4 +++- src/dav/encoder.rs | 42 +++++++++++++++++++----------------------- src/dav/types.rs | 5 ++--- 4 files changed, 31 insertions(+), 36 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index aabdf36..378d2bd 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -8,10 +8,7 @@ use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; -pub struct CalCtx { - root: bool -} -impl Context for CalCtx { +impl Context for CalExtension { fn child(&self) -> Self { Self { root: false } } @@ -23,7 +20,8 @@ impl Context for CalCtx { err.write(xml, self.child()).await } } -impl CalCtx { + +impl CalExtension { fn create_ns_element(&self, ns: &str, name: &str) -> BytesStart { let mut start = BytesStart::new(format!("{}:{}", ns, name)); if self.root { @@ -37,8 +35,8 @@ impl CalCtx { } } -impl QuickWritable for Violation { - async fn write(&self, xml: &mut Writer, ctx: CalCtx) -> Result<(), QError> { +impl QuickWritable for Violation { + async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { match self { Self::SupportedFilter => { let start = ctx.create_cal_element("supported-filter"); @@ -70,11 +68,11 @@ mod tests { let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let res: Error = Error(vec![ + let res = Error(vec![ DavViolation::Extension(Violation::SupportedFilter), ]); - res.write(&mut writer, CalCtx{ root: true }).await.expect("xml serialization"); + res.write(&mut writer, CalExtension { root: true }).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let expected = r#" diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index d913f95..c672370 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1,7 +1,9 @@ use super::types::*; -pub struct CalExtension {} +pub struct CalExtension { + pub root: bool +} impl Extension for CalExtension { type Error = Violation; type Property = Property; diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 2c9fdac..399a92e 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -11,23 +11,19 @@ use super::types::*; //-------------- TRAITS ---------------------- /// Basic encode trait to make a type encodable -pub trait QuickWritable> { +pub trait QuickWritable { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError>; } /// Encoding context -pub trait Context { +pub trait Context: Extension { fn child(&self) -> Self; fn create_dav_element(&self, name: &str) -> BytesStart; - async fn hook_error(&self, err: &E::Error, xml: &mut Writer) -> Result<(), QError>; + async fn hook_error(&self, err: &Self::Error, xml: &mut Writer) -> Result<(), QError>; } /// -------------- NoExtension Encoding Context -/// (Might be tied to the type maybe) -pub struct NoExtCtx { - root: bool -} -impl Context for NoExtCtx { +impl Context for NoExtension { fn child(&self) -> Self { Self { root: false } } @@ -47,7 +43,7 @@ impl Context for NoExtCtx { //--------------------- ENCODING -------------------- // --- XML ROOTS -impl> QuickWritable for Multistatus { +impl QuickWritable for Multistatus { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("multistatus"); let end = start.to_end(); @@ -67,7 +63,7 @@ impl> QuickWritable for Multistatus { // --- XML inner elements -impl> QuickWritable for Href { +impl QuickWritable for Href { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("href"); let end = start.to_end(); @@ -80,7 +76,7 @@ impl> QuickWritable for Href { } } -impl> QuickWritable for Response { +impl QuickWritable for Response { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("href"); let end = start.to_end(); @@ -103,7 +99,7 @@ impl> QuickWritable for Response { } } -impl> QuickWritable for StatusOrPropstat { +impl QuickWritable for StatusOrPropstat { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { Self::Status(status) => status.write(xml, ctx.child()).await, @@ -118,7 +114,7 @@ impl> QuickWritable for StatusOrPropstat { } } -impl> QuickWritable for Status { +impl QuickWritable for Status { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("status"); let end = start.to_end(); @@ -134,7 +130,7 @@ impl> QuickWritable for Status { } } -impl> QuickWritable for ResponseDescription { +impl QuickWritable for ResponseDescription { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("responsedescription"); let end = start.to_end(); @@ -147,7 +143,7 @@ impl> QuickWritable for ResponseDescription { } } -impl> QuickWritable for Location { +impl QuickWritable for Location { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("location"); let end = start.to_end(); @@ -160,7 +156,7 @@ impl> QuickWritable for Location { } } -impl> QuickWritable for PropStat { +impl QuickWritable for PropStat { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("propstat"); let end = start.to_end(); @@ -180,7 +176,7 @@ impl> QuickWritable for PropStat { } } -impl> QuickWritable for Prop { +impl QuickWritable for Prop { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("prop"); let end = start.to_end(); @@ -196,7 +192,7 @@ impl> QuickWritable for Prop { } -impl> QuickWritable for Property { +impl QuickWritable for Property { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { use Property::*; match self { @@ -211,7 +207,7 @@ impl> QuickWritable for Property { -impl> QuickWritable for Error { +impl QuickWritable for Error { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("error"); let end = start.to_end(); @@ -226,7 +222,7 @@ impl> QuickWritable for Error { } } -impl> QuickWritable for Violation { +impl QuickWritable for Violation { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { Violation::LockTokenMatchesRequestUri => xml.write_event_async(Event::Empty(ctx.create_dav_element("lock-token-matches-request-uri"))).await?, @@ -276,7 +272,7 @@ mod tests { let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ctx = NoExtCtx{ root: false }; + let ctx = NoExtension { root: false }; Href("/SOGo/dav/so/".into()).write(&mut writer, ctx).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); @@ -290,8 +286,8 @@ mod tests { let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ctx = NoExtCtx{ root: true }; - let xml: Multistatus = Multistatus { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; + let ctx = NoExtension { root: true }; + let xml = Multistatus { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; xml.write(&mut writer, ctx).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); diff --git a/src/dav/types.rs b/src/dav/types.rs index b0c0083..8807658 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -11,9 +11,8 @@ pub trait Extension { } /// No extension -pub struct NoExtension {} -pub enum Namespace { - Dav +pub struct NoExtension { + pub root: bool } impl Extension for NoExtension { type Error = Disabled; -- cgit v1.2.3 From 8691c98f44762d518ad0d34ba714180c79a9e506 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 08:43:37 +0100 Subject: WIP property --- src/dav/caltypes.rs | 1 + src/dav/encoder.rs | 11 +++++++++-- src/dav/types.rs | 9 +++++++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index c672370..a68936c 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -8,6 +8,7 @@ impl Extension for CalExtension { type Error = Violation; type Property = Property; type PropertyRequest = Property; //@FIXME + type ResourceType = Property; //@FIXME } pub enum Violation { diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 399a92e..5392b18 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -198,8 +198,15 @@ impl QuickWritable for Property { match self { CreationDate(date) => unimplemented!(), DisplayName(name) => unimplemented!(), - //@FIXME not finished - _ => unimplemented!(), + GetContentLanguage(lang) => unimplemented!(), + GetContentLength(len) => unimplemented!(), + GetContentType(ct) => unimplemented!(), + GetEtag(et) => unimplemented!(), + GetLastModified(dt) => unimplemented!(), + LockDiscovery(locks) => unimplemented!(), + ResourceType(res) => unimplemented!(), + SupportedLock(sup) => unimplemented!(), + Extension(inner) => unimplemented!(), }; Ok(()) } diff --git a/src/dav/types.rs b/src/dav/types.rs index 8807658..59cfcd6 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -8,6 +8,7 @@ pub trait Extension { type Error; type Property; type PropertyRequest; + type ResourceType; } /// No extension @@ -18,6 +19,7 @@ impl Extension for NoExtension { type Error = Disabled; type Property = Disabled; type PropertyRequest = Disabled; + type ResourceType = Disabled; } /// 14.1. activelock XML Element @@ -849,7 +851,7 @@ pub enum Property { /// /// /// - ResourceType(Collection), + ResourceType(Vec>), /// 15.10. supportedlock Property /// @@ -880,4 +882,7 @@ pub enum Property { Extension(T::Property), } - +pub enum ResourceType { + Collection, + Extension(T::ResourceType), +} -- cgit v1.2.3 From 929a185f371a7f369c4437d47b5c9e9f414678cf Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 10:12:19 +0100 Subject: Add a property hook --- src/dav/calencoder.rs | 20 ++++--- src/dav/encoder.rs | 146 ++++++++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 147 insertions(+), 19 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 378d2bd..14b7903 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -19,6 +19,10 @@ impl Context for CalExtension { async fn hook_error(&self, err: &Violation, xml: &mut Writer) -> Result<(), QError> { err.write(xml, self.child()).await } + + async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError> { + prop.write(xml, self.child()).await + } } impl CalExtension { @@ -47,14 +51,14 @@ impl QuickWritable for Violation { } } -/* - - - - - - -*/ + +impl QuickWritable for Property { + async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { + unimplemented!(); + } +} + + #[cfg(test)] mod tests { diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 5392b18..332c13a 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -20,6 +20,7 @@ pub trait Context: Extension { fn child(&self) -> Self; fn create_dav_element(&self, name: &str) -> BytesStart; async fn hook_error(&self, err: &Self::Error, xml: &mut Writer) -> Result<(), QError>; + async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError>; } /// -------------- NoExtension Encoding Context @@ -37,6 +38,9 @@ impl Context for NoExtension { async fn hook_error(&self, err: &Disabled, xml: &mut Writer) -> Result<(), QError> { unreachable!(); } + async fn hook_property(&self, prop: &Disabled, xml: &mut Writer) -> Result<(), QError> { + unreachable!(); + } } @@ -196,23 +200,143 @@ impl QuickWritable for Property { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { use Property::*; match self { - CreationDate(date) => unimplemented!(), - DisplayName(name) => unimplemented!(), - GetContentLanguage(lang) => unimplemented!(), - GetContentLength(len) => unimplemented!(), - GetContentType(ct) => unimplemented!(), - GetEtag(et) => unimplemented!(), - GetLastModified(dt) => unimplemented!(), - LockDiscovery(locks) => unimplemented!(), - ResourceType(res) => unimplemented!(), - SupportedLock(sup) => unimplemented!(), - Extension(inner) => unimplemented!(), + CreationDate(date) => { + // 1997-12-01T17:42:21-08:00 + let start = ctx.create_dav_element("creationdate"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + DisplayName(name) => { + // Example collection + let start = ctx.create_dav_element("displayname"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(name))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + GetContentLanguage(lang) => { + let start = ctx.create_dav_element("getcontentlanguage"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(lang))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + GetContentLength(len) => { + // 4525 + let start = ctx.create_dav_element("getcontentlength"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + GetContentType(ct) => { + // text/html + let start = ctx.create_dav_element("getcontenttype"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(&ct))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + GetEtag(et) => { + // "zzyzx" + let start = ctx.create_dav_element("getetag"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(et))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + GetLastModified(date) => { + // Mon, 12 Jan 1998 09:25:56 GMT + let start = ctx.create_dav_element("getlastmodified"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; + xml.write_event_async(Event::End(end)).await?; + }, + LockDiscovery(many_locks) => { + // ... + let start = ctx.create_dav_element("lockdiscovery"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for lock in many_locks.iter() { + lock.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + }, + ResourceType(many_types) => { + // + + // + + // + // + // + // + + let start = ctx.create_dav_element("resourcetype"); + if many_types.is_empty() { + xml.write_event_async(Event::Empty(start)).await?; + } else { + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + for restype in many_types.iter() { + restype.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + } + }, + SupportedLock(many_entries) => { + // + + // ... + + let start = ctx.create_dav_element("supportedlock"); + if many_entries.is_empty() { + xml.write_event_async(Event::Empty(start)).await?; + } else { + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + for entry in many_entries.iter() { + entry.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + } + }, + Extension(inner) => { + ctx.hook_property(inner, xml).await?; + }, }; Ok(()) } } +impl QuickWritable for ActiveLock { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for ResourceType { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} +impl QuickWritable for LockEntry { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} impl QuickWritable for Error { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { -- cgit v1.2.3 From c52a6591512825d297b0636cc228ee0915974404 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 10:29:16 +0100 Subject: hook resource type --- src/dav/calencoder.rs | 12 +++++++++++- src/dav/caltypes.rs | 6 +++++- src/dav/encoder.rs | 14 +++++++++++--- 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 14b7903..fc380ac 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -23,6 +23,10 @@ impl Context for CalExtension { async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError> { prop.write(xml, self.child()).await } + + async fn hook_resourcetype(&self, restype: &Self::ResourceType, xml: &mut Writer) -> Result<(), QError> { + restype.write(xml, self.child()).await + } } impl CalExtension { @@ -58,7 +62,13 @@ impl QuickWritable for Property { } } - +impl QuickWritable for ResourceType { + async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { + match self { + Self::Calendar => xml.write_event_async(Event::Empty(ctx.create_dav_element("calendar"))).await, + } + } +} #[cfg(test)] mod tests { diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index a68936c..9e4cb47 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -8,7 +8,7 @@ impl Extension for CalExtension { type Error = Violation; type Property = Property; type PropertyRequest = Property; //@FIXME - type ResourceType = Property; //@FIXME + type ResourceType = ResourceType; } pub enum Violation { @@ -34,3 +34,7 @@ pub enum Property { CalendarDescription, CalendarTimezone, } + +pub enum ResourceType { + Calendar, +} diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 332c13a..72d815b 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -21,6 +21,7 @@ pub trait Context: Extension { fn create_dav_element(&self, name: &str) -> BytesStart; async fn hook_error(&self, err: &Self::Error, xml: &mut Writer) -> Result<(), QError>; async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError>; + async fn hook_resourcetype(&self, prop: &Self::ResourceType, xml: &mut Writer) -> Result<(), QError>; } /// -------------- NoExtension Encoding Context @@ -41,6 +42,9 @@ impl Context for NoExtension { async fn hook_property(&self, prop: &Disabled, xml: &mut Writer) -> Result<(), QError> { unreachable!(); } + async fn hook_resourcetype(&self, restype: &Disabled, xml: &mut Writer) -> Result<(), QError> { + unreachable!(); + } } @@ -320,13 +324,17 @@ impl QuickWritable for Property { } } -impl QuickWritable for ActiveLock { +impl QuickWritable for ResourceType { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::Collection => xml.write_event_async(Event::Empty(ctx.create_dav_element("collection"))).await?, + Self::Extension(inner) => ctx.hook_resourcetype(inner, xml).await?, + }; + Ok(()) } } -impl QuickWritable for ResourceType { +impl QuickWritable for ActiveLock { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { unimplemented!(); } -- cgit v1.2.3 From cd48825275a99ccc8ccdadde2169cfd5b7dad15f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 10:56:05 +0100 Subject: WIP DAV encoder --- src/dav/encoder.rs | 133 +++++++++++++++++++++++++++++++++++++++++++++-------- src/dav/types.rs | 2 + 2 files changed, 115 insertions(+), 20 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 72d815b..8534db1 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -51,6 +51,16 @@ impl Context for NoExtension { //--------------------- ENCODING -------------------- // --- XML ROOTS + +/// PROPFIND REQUEST +impl QuickWritable for PropFind { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +/// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE +/// DELETE RESPONSE, impl QuickWritable for Multistatus { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("multistatus"); @@ -69,6 +79,28 @@ impl QuickWritable for Multistatus { } } +/// LOCK REQUEST +impl QuickWritable for LockInfo { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +/// SOME LOCK RESPONSES +impl QuickWritable for Prop { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("prop"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for property in &self.0 { + property.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + + Ok(()) + } +} // --- XML inner elements impl QuickWritable for Href { @@ -184,22 +216,6 @@ impl QuickWritable for PropStat { } } -impl QuickWritable for Prop { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("prop"); - let end = start.to_end(); - - xml.write_event_async(Event::Start(start.clone())).await?; - for property in &self.0 { - property.write(xml, ctx.child()).await?; - } - xml.write_event_async(Event::End(end)).await?; - - Ok(()) - } -} - - impl QuickWritable for Property { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { use Property::*; @@ -327,14 +343,89 @@ impl QuickWritable for Property { impl QuickWritable for ResourceType { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { - Self::Collection => xml.write_event_async(Event::Empty(ctx.create_dav_element("collection"))).await?, - Self::Extension(inner) => ctx.hook_resourcetype(inner, xml).await?, - }; - Ok(()) + Self::Collection => xml.write_event_async(Event::Empty(ctx.create_dav_element("collection"))).await, + Self::Extension(inner) => ctx.hook_resourcetype(inner, xml).await, + } } } impl QuickWritable for ActiveLock { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + // + // + // + // infinity + // + // http://example.org/~ejw/contact.html + // + // Second-604800 + // + // urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4 + // + // + // http://example.com/workspace/webdav/proposal.doc + // + // + let start = ctx.create_dav_element("activelock"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.locktype.write(xml, ctx.child()).await?; + self.lockscope.write(xml, ctx.child()).await?; + self.depth.write(xml, ctx.child()).await?; + if let Some(owner) = &self.owner { + owner.write(xml, ctx.child()).await?; + } + if let Some(timeout) = &self.timeout { + timeout.write(xml, ctx.child()).await?; + } + if let Some(locktoken) = &self.locktoken { + locktoken.write(xml, ctx.child()).await?; + } + self.lockroot.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await?; + + Ok(()) + } +} + +impl QuickWritable for LockType { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for LockScope { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Owner { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Depth { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Timeout { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for LockToken { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for LockRoot { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { unimplemented!(); } @@ -342,6 +433,8 @@ impl QuickWritable for ActiveLock { impl QuickWritable for LockEntry { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("lockentry"); + let end = start.to_end(); unimplemented!(); } } diff --git a/src/dav/types.rs b/src/dav/types.rs index 59cfcd6..50f88e3 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -35,6 +35,8 @@ pub struct ActiveLock { pub depth: Depth, pub owner: Option, pub timeout: Option, + pub locktoken: Option, + pub lockroot: LockRoot, } /// 14.2 allprop XML Element -- cgit v1.2.3 From 8d7c8713b69883632cad84521d604cb9eb9a40d4 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 13:21:19 +0100 Subject: Finalized encode ActiveLock --- src/dav/encoder.rs | 75 ++++++++++++++++++++++++++++++++++++++++++++++++------ src/dav/types.rs | 30 +++++++++++++++++++--- 2 files changed, 93 insertions(+), 12 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 8534db1..72d9b91 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -391,43 +391,98 @@ impl QuickWritable for ActiveLock { impl QuickWritable for LockType { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("locktype"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Write => xml.write_event_async(Event::Empty(ctx.create_dav_element("write"))).await?, + }; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for LockScope { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("lockscope"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Exclusive => xml.write_event_async(Event::Empty(ctx.create_dav_element("exclusive"))).await?, + Self::Shared => xml.write_event_async(Event::Empty(ctx.create_dav_element("shared"))).await?, + }; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for Owner { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("owner"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + if let Some(txt) = &self.txt { + xml.write_event_async(Event::Text(BytesText::new(&txt))).await?; + } + if let Some(href) = &self.url { + href.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for Depth { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("depth"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Zero => xml.write_event_async(Event::Text(BytesText::new("0"))).await?, + Self::One => xml.write_event_async(Event::Text(BytesText::new("1"))).await?, + Self::Infinity => xml.write_event_async(Event::Text(BytesText::new("infinity"))).await?, + }; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for Timeout { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("timeout"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Seconds(count) => { + let txt = format!("Second-{}", count); + xml.write_event_async(Event::Text(BytesText::new(&txt))).await? + }, + Self::Infinite => xml.write_event_async(Event::Text(BytesText::new("Infinite"))).await? + }; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for LockToken { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("locktoken"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for LockRoot { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("lockroot"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } @@ -435,7 +490,11 @@ impl QuickWritable for LockEntry { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("lockentry"); let end = start.to_end(); - unimplemented!(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.lockscope.write(xml, ctx.child()).await?; + self.locktype.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } diff --git a/src/dav/types.rs b/src/dav/types.rs index 50f88e3..7f22385 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -246,7 +246,7 @@ pub struct Location(pub Href); /// /// pub struct LockEntry { - pub lokscope: LockScope, + pub lockscope: LockScope, pub locktype: LockType, } @@ -300,7 +300,7 @@ pub enum LockScope { /// refers to the lock. /// /// -pub struct LockToken(Href); +pub struct LockToken(pub Href); /// 14.15. locktype XML Element /// @@ -363,7 +363,11 @@ pub struct Multistatus { /// text content or attributes. /// /// -pub struct Owner(pub String); +//@FIXME might need support for an extension +pub struct Owner { + pub txt: Option, + pub url: Option, +} /// 14.18. prop XML Element /// @@ -559,7 +563,25 @@ pub struct Status(pub http::status::StatusCode); /// /// /// -pub struct Timeout(u64); +/// +/// TimeOut = "Timeout" ":" 1#TimeType +/// TimeType = ("Second-" DAVTimeOutVal | "Infinite") +/// ; No LWS allowed within TimeType +/// DAVTimeOutVal = 1*DIGIT +/// +/// Clients MAY include Timeout request headers in their LOCK requests. +/// However, the server is not required to honor or even consider these +/// requests. Clients MUST NOT submit a Timeout request header with any +/// method other than a LOCK method. +/// +/// The "Second" TimeType specifies the number of seconds that will +/// elapse between granting of the lock at the server, and the automatic +/// removal of the lock. The timeout value for TimeType "Second" MUST +/// NOT be greater than 2^32-1. +pub enum Timeout { + Seconds(u32), + Infinite, +} /// 15. DAV Properties -- cgit v1.2.3 From 2b30c97084a63afa53278e35dbbcc620192c8c3f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 14:28:36 +0100 Subject: fully serialize webdav core? --- src/dav/calencoder.rs | 10 +++++++ src/dav/caltypes.rs | 4 +++ src/dav/encoder.rs | 74 +++++++++++++++++++++++++++++++++++++++++++++++++-- src/dav/types.rs | 52 +++++++++++++++++++----------------- src/main.rs | 1 + 5 files changed, 115 insertions(+), 26 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index fc380ac..c7708eb 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -27,6 +27,10 @@ impl Context for CalExtension { async fn hook_resourcetype(&self, restype: &Self::ResourceType, xml: &mut Writer) -> Result<(), QError> { restype.write(xml, self.child()).await } + + async fn hook_propertyrequest(&self, propreq: &Self::PropertyRequest, xml: &mut Writer) -> Result<(), QError> { + propreq.write(xml, self.child()).await + } } impl CalExtension { @@ -62,6 +66,12 @@ impl QuickWritable for Property { } } +impl QuickWritable for PropertyRequest { + async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { + unimplemented!(); + } +} + impl QuickWritable for ResourceType { async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { match self { diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 9e4cb47..55f4f93 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -29,6 +29,10 @@ pub enum Violation { SupportedFilter, } +pub enum PropertyRequest { + CalendarDescription, + CalendarTimezone, +} pub enum Property { CalendarDescription, diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 72d9b91..0ad8949 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -21,6 +21,7 @@ pub trait Context: Extension { fn create_dav_element(&self, name: &str) -> BytesStart; async fn hook_error(&self, err: &Self::Error, xml: &mut Writer) -> Result<(), QError>; async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError>; + async fn hook_propertyrequest(&self, prop: &Self::PropertyRequest, xml: &mut Writer) -> Result<(), QError>; async fn hook_resourcetype(&self, prop: &Self::ResourceType, xml: &mut Writer) -> Result<(), QError>; } @@ -42,6 +43,9 @@ impl Context for NoExtension { async fn hook_property(&self, prop: &Disabled, xml: &mut Writer) -> Result<(), QError> { unreachable!(); } + async fn hook_propertyrequest(&self, prop: &Disabled, xml: &mut Writer) -> Result<(), QError> { + unreachable!(); + } async fn hook_resourcetype(&self, restype: &Disabled, xml: &mut Writer) -> Result<(), QError> { unreachable!(); } @@ -55,7 +59,30 @@ impl Context for NoExtension { /// PROPFIND REQUEST impl QuickWritable for PropFind { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("propfind"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::PropName => xml.write_event_async(Event::Empty(ctx.create_dav_element("propname"))).await?, + Self::AllProp(maybe_include) => { + xml.write_event_async(Event::Empty(ctx.create_dav_element("allprop"))).await?; + if let Some(include) = maybe_include { + include.write(xml, ctx.child()).await?; + } + }, + Self::Prop(many_propreq) => { + let start = ctx.create_dav_element("prop"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for propreq in many_propreq.iter() { + propreq.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await?; + }, + } + xml.write_event_async(Event::End(end)).await } } @@ -82,7 +109,16 @@ impl QuickWritable for Multistatus { /// LOCK REQUEST impl QuickWritable for LockInfo { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("lockinfo"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.lockscope.write(xml, ctx.child()).await?; + self.locktype.write(xml, ctx.child()).await?; + if let Some(owner) = &self.owner { + owner.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } @@ -349,6 +385,40 @@ impl QuickWritable for ResourceType { } } +impl QuickWritable for Include { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("include"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for prop in self.0.iter() { + prop.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await + } +} + +impl QuickWritable for PropertyRequest { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + use PropertyRequest::*; + let mut atom = (async |c| xml.write_event_async(Event::Empty(ctx.create_dav_element(c))).await); + + match self { + CreationDate => atom("creationdate").await, + DisplayName => atom("displayname").await, + GetContentLanguage => atom("getcontentlanguage").await, + GetContentLength => atom("getcontentlength").await, + GetContentType => atom("getcontenttype").await, + GetEtag => atom("getetag").await, + GetLastModified => atom("getlastmodified").await, + LockDiscovery => atom("lockdiscovery").await, + ResourceType => atom("resourcetype").await, + SupportedLock => atom("supportedlock").await, + Extension(inner) => ctx.hook_propertyrequest(inner, xml).await, + } + } +} + impl QuickWritable for ActiveLock { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { // diff --git a/src/dav/types.rs b/src/dav/types.rs index 7f22385..95fe749 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -39,17 +39,6 @@ pub struct ActiveLock { pub lockroot: LockRoot, } -/// 14.2 allprop XML Element -/// -/// Name: allprop -/// -/// Purpose: Specifies that all names and values of dead properties and -/// the live properties defined by this document existing on the -/// resource are to be returned. -/// -/// -pub struct AllProp{} - /// 14.3 collection XML Element /// /// Name: collection @@ -219,7 +208,7 @@ pub struct Href(pub String); /// standards. This element MUST NOT contain text or mixed content. /// /// -pub struct Include(pub Vec>); +pub struct Include(pub Vec>); /// 14.9. location XML Element /// @@ -401,6 +390,29 @@ pub enum PropertyUpdateItem { Set(Set), } +/// 14.2 allprop XML Element +/// +/// Name: allprop +/// +/// Purpose: Specifies that all names and values of dead properties and +/// the live properties defined by this document existing on the +/// resource are to be returned. +/// +/// +/// +/// --- +/// +/// 14.21. propname XML Element +/// +/// Name: propname +/// +/// Purpose: Specifies that only a list of property names on the +/// resource is to be returned. +/// +/// +/// +/// --- +/// /// 14.20. propfind XML Element /// /// Name: propfind @@ -413,20 +425,12 @@ pub enum PropertyUpdateItem { /// /// pub enum PropFind { - PropName(PropName), - AllProp(AllProp, Option>), - Prop(Prop), + PropName, + AllProp(Option>), + Prop(Vec>), } -/// 14.21. propname XML Element -/// -/// Name: propname -/// -/// Purpose: Specifies that only a list of property names on the -/// resource is to be returned. -/// -/// -pub struct PropName {} + /// 14.22 propstat XML Element /// diff --git a/src/main.rs b/src/main.rs index 4f874b9..c9ce42d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,6 @@ #![feature(type_alias_impl_trait)] #![feature(async_fn_in_trait)] +#![feature(async_closure)] mod auth; mod bayou; -- cgit v1.2.3 From c15f8856a83341b2a1ba692b8317004c2f7542f5 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 15:32:40 +0100 Subject: propname tests --- src/dav/encoder.rs | 183 ++++++++++++++++++++++++++++++++++++++++++++++------- src/dav/types.rs | 5 +- 2 files changed, 165 insertions(+), 23 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 0ad8949..6475da8 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -61,6 +61,7 @@ impl QuickWritable for PropFind { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("propfind"); let end = start.to_end(); + let ctx = ctx.child(); xml.write_event_async(Event::Start(start.clone())).await?; match self { @@ -129,9 +130,18 @@ impl QuickWritable for Prop { let end = start.to_end(); xml.write_event_async(Event::Start(start.clone())).await?; - for property in &self.0 { - property.write(xml, ctx.child()).await?; - } + match self { + Self::Name(many_names) => { + for propname in many_names { + propname.write(xml, ctx.child()).await?; + } + }, + Self::Value(many_values) => { + for propval in many_values { + propval.write(xml, ctx.child()).await?; + } + } + }; xml.write_event_async(Event::End(end)).await?; Ok(()) @@ -154,7 +164,7 @@ impl QuickWritable for Href { impl QuickWritable for Response { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("href"); + let start = ctx.create_dav_element("response"); let end = start.to_end(); xml.write_event_async(Event::Start(start.clone())).await?; @@ -463,6 +473,7 @@ impl QuickWritable for LockType { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("locktype"); let end = start.to_end(); + let ctx = ctx.child(); xml.write_event_async(Event::Start(start.clone())).await?; match self { @@ -476,6 +487,7 @@ impl QuickWritable for LockScope { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("lockscope"); let end = start.to_end(); + let ctx = ctx.child(); xml.write_event_async(Event::Start(start.clone())).await?; match self { @@ -626,37 +638,164 @@ mod tests { /// To run only the unit tests and avoid the behavior ones: /// cargo test --bin aerogramme - - #[tokio::test] - async fn test_href() { + + async fn serialize>(ctx: C, elem: &Q) -> String { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - - let ctx = NoExtension { root: false }; - Href("/SOGo/dav/so/".into()).write(&mut writer, ctx).await.expect("xml serialization"); + elem.write(&mut writer, ctx).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - assert_eq!(buffer.as_slice(), &b"/SOGo/dav/so/"[..]); + return got.into() } - #[tokio::test] - async fn test_multistatus() { - let mut buffer = Vec::new(); - let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); - let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + async fn basic_href() { - let ctx = NoExtension { root: true }; - let xml = Multistatus { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; - xml.write(&mut writer, ctx).await.expect("xml serialization"); - tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = serialize( + NoExtension { root: false }, + &Href("/SOGo/dav/so/".into()) + ).await; + let expected = "/SOGo/dav/so/"; + + assert_eq!(&got, expected); + } + + + #[tokio::test] + async fn basic_multistatus() { + let got = serialize( + NoExtension { root: true }, + &Multistatus { + responses: vec![], + responsedescription: Some(ResponseDescription("Hello world".into())) + }, + ).await; let expected = r#" Hello world "#; - let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - assert_eq!(got, expected); + assert_eq!(&got, expected); + } + + + #[tokio::test] + async fn rfc_error_delete_locked() { + let got = serialize( + NoExtension { root: true }, + &Error(vec![ + Violation::LockTokenSubmitted(vec![ + Href("/locked/".into()) + ]) + ]), + ).await; + + let expected = r#" + + /locked/ + +"#; + + assert_eq!(&got, expected); + } + + #[tokio::test] + async fn rfc_propname_req() { + let got = serialize( + NoExtension { root: true }, + &PropFind::PropName, + ).await; + + let expected = r#" + +"#; + + assert_eq!(&got, expected); + } + + #[tokio::test] + async fn rfc_propname_res() { + let got = serialize( + NoExtension { root: true }, + &Multistatus { + responses: vec![ + Response { + href: Href("http://www.example.com/container/".into()), + status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { + prop: Prop::Name(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }]), + error: None, + responsedescription: None, + location: None, + }, + Response { + href: Href("http://www.example.com/container/front.html".into()), + status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { + prop: Prop::Name(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }]), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + }, + ).await; + + let expected = r#" + + http://www.example.com/container/ + + + + + + + + HTTP/1.1 200 OK + + + + http://www.example.com/container/front.html + + + + + + + + + + + + HTTP/1.1 200 OK + + +"#; + + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } } diff --git a/src/dav/types.rs b/src/dav/types.rs index 95fe749..55e9e86 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -372,7 +372,10 @@ pub struct Owner { /// text or mixed content. /// /// -pub struct Prop(pub Vec>); +pub enum Prop { + Name(Vec>), + Value(Vec>), +} /// 14.19. propertyupdate XML Element /// -- cgit v1.2.3 From 0cadcbea98357d4b3a3b12fbba87da975677deca Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 16:24:39 +0100 Subject: Test Allprop --- src/dav/encoder.rs | 163 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 162 insertions(+), 1 deletion(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 6475da8..a16f498 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -325,7 +325,7 @@ impl QuickWritable for Property { let end = start.to_end(); xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; + xml.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; xml.write_event_async(Event::End(end)).await?; }, LockDiscovery(many_locks) => { @@ -796,6 +796,167 @@ mod tests { "#; + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_allprop_req() { + let got = serialize( + NoExtension { root: true }, + &PropFind::AllProp(None), + ).await; + + let expected = r#" + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_allprop_res() { + use chrono::{DateTime,FixedOffset,TimeZone}; + let got = serialize( + NoExtension { root: true }, + &Multistatus { + responses: vec![ + Response { + href: Href("/container/".into()), + status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { + prop: Prop::Value(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) + .unwrap()), + Property::DisplayName("Example collection".into()), + Property::ResourceType(vec![ResourceType::Collection]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }]), + error: None, + responsedescription: None, + location: None, + }, + Response { + href: Href("/container/front.html".into()), + status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { + prop: Prop::Value(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) + .unwrap()), + Property::DisplayName("Example HTML resource".into()), + Property::GetContentLength(4525), + Property::GetContentType("text/html".into()), + Property::GetEtag(r#""zzyzx""#.into()), + Property::GetLastModified(FixedOffset::east_opt(0) + .unwrap() + .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) + .unwrap()), + Property::ResourceType(vec![]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }]), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + } + ).await; + + let expected = r#" + + /container/ + + + 1997-12-01T17:42:21-08:00 + Example collection + + + + + + + + + + + + + + + + + + + + + + + HTTP/1.1 200 OK + + + + /container/front.html + + + 1997-12-01T18:27:21-08:00 + Example HTML resource + 4525 + text/html + "zzyzx" + Mon, 12 Jan 1998 09:25:56 +0000 + + + + + + + + + + + + + + + + + + + + + HTTP/1.1 200 OK + + +"#; + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } } -- cgit v1.2.3 From 77e2f8abbb2d70413ba571baf405f10572299fd9 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 16:37:27 +0100 Subject: test include --- src/dav/encoder.rs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index a16f498..9e907dc 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -959,4 +959,26 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } + + + #[tokio::test] + async fn rfc_allprop_include() { + let got = serialize( + NoExtension { root: true }, + &PropFind::AllProp(Some(Include(vec![ + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + ]))), + ).await; + + let expected = r#" + + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } } -- cgit v1.2.3 From dee970afe5db1886b3e1816703ef841acbd07463 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 17:17:51 +0100 Subject: type refactor on --- src/dav/encoder.rs | 105 ++++++++++++++++++++++++++++++++++++----------------- src/dav/types.rs | 18 +++++---- 2 files changed, 81 insertions(+), 42 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 9e907dc..4a9bcf4 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -72,21 +72,20 @@ impl QuickWritable for PropFind { include.write(xml, ctx.child()).await?; } }, - Self::Prop(many_propreq) => { - let start = ctx.create_dav_element("prop"); - let end = start.to_end(); - - xml.write_event_async(Event::Start(start.clone())).await?; - for propreq in many_propreq.iter() { - propreq.write(xml, ctx.child()).await?; - } - xml.write_event_async(Event::End(end)).await?; - }, + Self::Prop(propname) => propname.write(xml, ctx.child()).await?, } xml.write_event_async(Event::End(end)).await } } +/// PROPPATCH REQUEST +impl QuickWritable for PropertyUpdate { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + + /// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE /// DELETE RESPONSE, impl QuickWritable for Multistatus { @@ -124,31 +123,43 @@ impl QuickWritable for LockInfo { } /// SOME LOCK RESPONSES -impl QuickWritable for Prop { +impl QuickWritable for PropValue { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("prop"); let end = start.to_end(); xml.write_event_async(Event::Start(start.clone())).await?; + for propval in &self.0 { + propval.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await + } +} + +// --- XML inner elements +impl QuickWritable for AnyProp { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { - Self::Name(many_names) => { - for propname in many_names { - propname.write(xml, ctx.child()).await?; - } - }, - Self::Value(many_values) => { - for propval in many_values { - propval.write(xml, ctx.child()).await?; - } - } - }; - xml.write_event_async(Event::End(end)).await?; + Self::Name(propname) => propname.write(xml, ctx).await, + Self::Value(propval) => propval.write(xml, ctx).await, + } + } +} - Ok(()) +impl QuickWritable for PropName { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("prop"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for propname in &self.0 { + propname.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } -// --- XML inner elements + impl QuickWritable for Href { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let start = ctx.create_dav_element("href"); @@ -411,7 +422,7 @@ impl QuickWritable for Include { impl QuickWritable for PropertyRequest { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { use PropertyRequest::*; - let mut atom = (async |c| xml.write_event_async(Event::Empty(ctx.create_dav_element(c))).await); + let mut atom = async |c| xml.write_event_async(Event::Empty(ctx.create_dav_element(c))).await; match self { CreationDate => atom("creationdate").await, @@ -724,12 +735,12 @@ mod tests { Response { href: Href("http://www.example.com/container/".into()), status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { - prop: Prop::Name(vec![ + prop: AnyProp::Name(PropName(vec![ PropertyRequest::CreationDate, PropertyRequest::DisplayName, PropertyRequest::ResourceType, PropertyRequest::SupportedLock, - ]), + ])), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -741,7 +752,7 @@ mod tests { Response { href: Href("http://www.example.com/container/front.html".into()), status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { - prop: Prop::Name(vec![ + prop: AnyProp::Name(PropName(vec![ PropertyRequest::CreationDate, PropertyRequest::DisplayName, PropertyRequest::GetContentLength, @@ -750,7 +761,7 @@ mod tests { PropertyRequest::GetLastModified, PropertyRequest::ResourceType, PropertyRequest::SupportedLock, - ]), + ])), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -823,7 +834,7 @@ mod tests { Response { href: Href("/container/".into()), status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { - prop: Prop::Value(vec![ + prop: AnyProp::Value(PropValue(vec![ Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) @@ -840,7 +851,7 @@ mod tests { locktype: LockType::Write, }, ]), - ]), + ])), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -852,7 +863,7 @@ mod tests { Response { href: Href("/container/front.html".into()), status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { - prop: Prop::Value(vec![ + prop: AnyProp::Value(PropValue(vec![ Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) @@ -876,7 +887,7 @@ mod tests { locktype: LockType::Write, }, ]), - ]), + ])), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -981,4 +992,30 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } + + #[tokio::test] + async fn rfc_propertyupdate() { + let got = serialize( + NoExtension { root: true }, + &PropertyUpdate(vec![ + PropertyUpdateItem::Set(Set(PropValue(vec![ ]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![]))), + ]), + ).await; + + let expected = r#" + + + fr-FR + + + + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } } diff --git a/src/dav/types.rs b/src/dav/types.rs index 55e9e86..4487de7 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -372,10 +372,12 @@ pub struct Owner { /// text or mixed content. /// /// -pub enum Prop { - Name(Vec>), - Value(Vec>), +pub enum AnyProp { + Name(PropName), + Value(PropValue), } +pub struct PropName(pub Vec>); +pub struct PropValue(pub Vec>); /// 14.19. propertyupdate XML Element /// @@ -387,7 +389,7 @@ pub enum Prop { /// required to modify the properties on the resource. /// /// -pub struct PropertyUpdate(Vec>); +pub struct PropertyUpdate(pub Vec>); pub enum PropertyUpdateItem { Remove(Remove), Set(Set), @@ -430,7 +432,7 @@ pub enum PropertyUpdateItem { pub enum PropFind { PropName, AllProp(Option>), - Prop(Vec>), + Prop(PropName), } @@ -451,7 +453,7 @@ pub enum PropFind { /// /// pub struct PropStat { - pub prop: Prop, + pub prop: AnyProp, pub status: Status, pub error: Option>, pub responsedescription: Option, @@ -470,7 +472,7 @@ pub struct PropStat { /// the names of properties to be removed are required. /// /// -pub struct Remove(pub Prop); +pub struct Remove(pub PropName); /// 14.24. response XML Element /// @@ -535,7 +537,7 @@ pub struct ResponseDescription(pub String); /// property, and MUST be subsequently retrievable using PROPFIND. /// /// -pub struct Set(pub Prop); +pub struct Set(pub PropValue); /// 14.27. shared XML Element /// -- cgit v1.2.3 From 4490afb1bfef7af4afe8ada9c99f9bf7925ad40e Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 18:20:51 +0100 Subject: Implement propertyupdate --- src/dav/encoder.rs | 47 ++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 4a9bcf4..41e93f3 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -81,7 +81,14 @@ impl QuickWritable for PropFind { /// PROPPATCH REQUEST impl QuickWritable for PropertyUpdate { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_dav_element("propertyupdate"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for update in self.0.iter() { + update.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } @@ -137,6 +144,36 @@ impl QuickWritable for PropValue { } // --- XML inner elements +impl QuickWritable for PropertyUpdateItem { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + match self { + Self::Set(set) => set.write(xml, ctx).await, + Self::Remove(rm) => rm.write(xml, ctx).await, + } + } +} + +impl QuickWritable for Set { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("set"); + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + } +} + +impl QuickWritable for Remove { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let start = ctx.create_dav_element("remove"); + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + } +} + + impl QuickWritable for AnyProp { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { @@ -998,8 +1035,12 @@ mod tests { let got = serialize( NoExtension { root: true }, &PropertyUpdate(vec![ - PropertyUpdateItem::Set(Set(PropValue(vec![ ]))), - PropertyUpdateItem::Remove(Remove(PropName(vec![]))), + PropertyUpdateItem::Set(Set(PropValue(vec![ + Property::GetContentLanguage("fr-FR".into()), + ]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![ + PropertyRequest::DisplayName, + ]))), ]), ).await; -- cgit v1.2.3 From 9200b449415abc08a7db21d146f380a8999eda14 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 18:33:46 +0100 Subject: Fix some logic on locked --- src/dav/encoder.rs | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 41e93f3..6bccc78 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -647,6 +647,9 @@ impl QuickWritable for Violation { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { Violation::LockTokenMatchesRequestUri => xml.write_event_async(Event::Empty(ctx.create_dav_element("lock-token-matches-request-uri"))).await?, + Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => { + xml.write_event_async(Event::Empty(ctx.create_dav_element("lock-token-submitted"))).await? + }, Violation::LockTokenSubmitted(hrefs) => { let start = ctx.create_dav_element("lock-token-submitted"); let end = start.to_end(); @@ -657,6 +660,9 @@ impl QuickWritable for Violation { } xml.write_event_async(Event::End(end)).await?; }, + Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => { + xml.write_event_async(Event::Empty(ctx.create_dav_element("no-conflicting-lock"))).await? + }, Violation::NoConflictingLock(hrefs) => { let start = ctx.create_dav_element("no-conflicting-lock"); let end = start.to_end(); @@ -1059,4 +1065,33 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } + + #[tokio::test] + async fn rfc_delete_locked2() { + let got = serialize( + NoExtension { root: true }, + &Multistatus { + responses: vec![Response { + href: Href("http://www.example.com/container/resource3".into()), + status_or_propstat: StatusOrPropstat::Status(Status(http::status::StatusCode::from_u16(423).unwrap())), + error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), + responsedescription: None, + location: None, + }], + responsedescription: None, + }, + ).await; + + let expected = r#" + + http://www.example.com/container/resource3 + HTTP/1.1 423 Locked + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } } -- cgit v1.2.3 From 8b948916e7a5aa01e913abe97a8b01a14d39a7a3 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 1 Mar 2024 18:50:06 +0100 Subject: simple lock tests --- src/dav/encoder.rs | 78 ++++++++++++++++++++++++++++++++++++++++++++++++++---- src/dav/types.rs | 6 ++--- 2 files changed, 76 insertions(+), 8 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 6bccc78..f842734 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -552,11 +552,9 @@ impl QuickWritable for Owner { let end = start.to_end(); xml.write_event_async(Event::Start(start.clone())).await?; - if let Some(txt) = &self.txt { - xml.write_event_async(Event::Text(BytesText::new(&txt))).await?; - } - if let Some(href) = &self.url { - href.write(xml, ctx.child()).await?; + match self { + Self::Txt(txt) => xml.write_event_async(Event::Text(BytesText::new(&txt))).await?, + Self::Href(href) => href.write(xml, ctx.child()).await?, } xml.write_event_async(Event::End(end)).await } @@ -1094,4 +1092,74 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } + + #[tokio::test] + async fn rfc_simple_lock_request() { + let got = serialize( + NoExtension { root: true }, + &LockInfo { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + }, + ).await; + + let expected = r#" + + + + + + + + http://example.org/~ejw/contact.html + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_simple_lock_response() { + let got = serialize( + NoExtension { root: true }, + &PropValue(vec![ + Property::LockDiscovery(vec![ActiveLock { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + depth: Depth::Infinity, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + timeout: Some(Timeout::Seconds(604800)), + locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), + lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), + }]), + ]), + ).await; + + let expected = r#" + + + + + + + + + infinity + + http://example.org/~ejw/contact.html + + Second-604800 + + urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4 + + + http://example.com/workspace/webdav/proposal.doc + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } } diff --git a/src/dav/types.rs b/src/dav/types.rs index 4487de7..ffb08d4 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -353,9 +353,9 @@ pub struct Multistatus { /// /// //@FIXME might need support for an extension -pub struct Owner { - pub txt: Option, - pub url: Option, +pub enum Owner { + Txt(String), + Href(Href), } /// 14.18. prop XML Element -- cgit v1.2.3 From 6688dcc3833f8edbe69f6d6dbf9ae8407d8d62f8 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 10:08:51 +0100 Subject: WIP CalDAV types --- src/dav/calencoder.rs | 9 +- src/dav/caltypes.rs | 688 +++++++++++++++++++++++++++++++++++++++++++++++++- src/dav/mod.rs | 2 + src/dav/types.rs | 2 - 4 files changed, 684 insertions(+), 17 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index c7708eb..815946e 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -50,10 +50,11 @@ impl CalExtension { impl QuickWritable for Violation { async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { match self { - Self::SupportedFilter => { - let start = ctx.create_cal_element("supported-filter"); + Self::ResourceMustBeNull => { + let start = ctx.create_cal_element("resource-must-be-null"); xml.write_event_async(Event::Empty(start)).await?; }, + _ => unimplemented!(), }; Ok(()) } @@ -93,14 +94,14 @@ mod tests { let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); let res = Error(vec![ - DavViolation::Extension(Violation::SupportedFilter), + DavViolation::Extension(Violation::ResourceMustBeNull), ]); res.write(&mut writer, CalExtension { root: true }).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let expected = r#" - + "#; let got = std::str::from_utf8(buffer.as_slice()).unwrap(); diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 55f4f93..db9484a 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1,17 +1,588 @@ -use super::types::*; +use chrono::{DateTime,Utc}; +use super::types as Dav; + +//@FIXME for now, we skip the ACL part pub struct CalExtension { pub root: bool } -impl Extension for CalExtension { +impl Dav::Extension for CalExtension { type Error = Violation; type Property = Property; type PropertyRequest = Property; //@FIXME type ResourceType = ResourceType; } +// ----- Root elements ----- + +/// If a request body is included, it MUST be a CALDAV:mkcalendar XML +/// element. Instruction processing MUST occur in the order +/// instructions are received (i.e., from top to bottom). +/// Instructions MUST either all be executed or none executed. Thus, +/// if any error occurs during processing, all executed instructions +/// MUST be undone and a proper error result returned. Instruction +/// processing details can be found in the definition of the DAV:set +/// instruction in Section 12.13.2 of [RFC2518]. +/// +/// +pub struct MkCalendar(Dav::Set); + + +/// If a response body for a successful request is included, it MUST +/// be a CALDAV:mkcalendar-response XML element. +/// +/// +pub struct MkCalendarResponse(()); + +/// Name: calendar-query +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Defines a report for querying calendar object resources. +/// +/// Description: See Section 7.8. +/// +/// Definition: +/// +/// +pub struct CalendarQuery { + selector: Option>, + filter: Filter, + timezone: Option, +} + +// ----- Hooks ----- +pub enum ResourceType { + Calendar, +} + +pub enum PropertyRequest { + CalendarDescription, + CalendarTimezone, +} +pub enum Property { + /// Name: calendar-description + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a human-readable description of the calendar + /// collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MAY be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). An xml:lang attribute indicating the human + /// language of the description SHOULD be set for this property by + /// clients or through server provisioning. Servers MUST return any + /// xml:lang attribute if set for the property. + /// + /// Description: If present, the property contains a description of the + /// calendar collection that is suitable for presentation to a user. + /// If not present, the client should assume no description for the + /// calendar collection. + /// + /// Definition: + /// + /// + /// PCDATA value: string + /// + /// Example: + /// + /// Calendrier de Mathilde Desruisseaux + CalendarDescription(String), + + /// 5.2.2. CALDAV:calendar-timezone Property + /// + /// Name: calendar-timezone + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specifies a time zone on a calendar collection. + /// + /// Conformance: This property SHOULD be defined on all calendar + /// collections. If defined, it SHOULD NOT be returned by a PROPFIND + /// DAV:allprop request (as defined in Section 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:calendar-timezone property is used to + /// specify the time zone the server should rely on to resolve "date" + /// values and "date with local time" values (i.e., floating time) to + /// "date with UTC time" values. The server will require this + /// information to determine if a calendar component scheduled with + /// "date" values or "date with local time" values overlaps a CALDAV: + /// time-range specified in a CALDAV:calendar-query REPORT. The + /// server will also require this information to compute the proper + /// FREEBUSY time period as "date with UTC time" in the VFREEBUSY + /// component returned in a response to a CALDAV:free-busy-query + /// REPORT request that takes into account calendar components + /// scheduled with "date" values or "date with local time" values. In + /// the absence of this property, the server MAY rely on the time zone + /// of their choice. + /// + /// Note: The iCalendar data embedded within the CALDAV:calendar- + /// timezone XML element MUST follow the standard XML character data + /// encoding rules, including use of <, >, & etc. entity + /// encoding or the use of a construct. In the + /// later case, the iCalendar data cannot contain the character + /// sequence "]]>", which is the end delimiter for the CDATA section. + /// + /// Definition: + /// + /// + /// PCDATA value: an iCalendar object with exactly one VTIMEZONE component. + /// + /// Example: + /// + /// BEGIN:VCALENDAR + /// PRODID:-//Example Corp.//CalDAV Client//EN + /// VERSION:2.0 + /// BEGIN:VTIMEZONE + /// TZID:US-Eastern + /// LAST-MODIFIED:19870101T000000Z + /// BEGIN:STANDARD + /// DTSTART:19671029T020000 + /// RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 + /// TZOFFSETFROM:-0400 + /// TZOFFSETTO:-0500 + /// TZNAME:Eastern Standard Time (US & Canada) + /// END:STANDARD + /// BEGIN:DAYLIGHT + /// DTSTART:19870405T020000 + /// RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 + /// TZOFFSETFROM:-0500 + /// TZOFFSETTO:-0400 + /// TZNAME:Eastern Daylight Time (US & Canada) + /// END:DAYLIGHT + /// END:VTIMEZONE + /// END:VCALENDAR + /// + //@FIXME we might want to put a buffer here or an iCal parsed object + CalendarTimezone(String), + + /// Name: supported-calendar-component-set + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specifies the calendar component types (e.g., VEVENT, + /// VTODO, etc.) that calendar object resources can contain in the + /// calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:supported-calendar-component-set property is + /// used to specify restrictions on the calendar component types that + /// calendar object resources may contain in a calendar collection. + /// Any attempt by the client to store calendar object resources with + /// component types not listed in this property, if it exists, MUST + /// result in an error, with the CALDAV:supported-calendar-component + /// precondition (Section 5.3.2.1) being violated. Since this + /// property is protected, it cannot be changed by clients using a + /// PROPPATCH request. However, clients can initialize the value of + /// this property when creating a new calendar collection with + /// MKCALENDAR. The empty-element tag MUST + /// only be specified if support for calendar object resources that + /// only contain VTIMEZONE components is provided or desired. Support + /// for VTIMEZONE components in calendar object resources that contain + /// VEVENT or VTODO components is always assumed. In the absence of + /// this property, the server MUST accept all component types, and the + /// client can assume that all component types are accepted. + /// + /// Definition: + /// + /// + /// + /// Example: + /// + /// + /// + /// + /// + SupportedCalendarComponentSet(Vec), + + /// Name: supported-calendar-data + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specifies what media types are allowed for calendar object + /// resources in a calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:supported-calendar-data property is used to + /// specify the media type supported for the calendar object resources + /// contained in a given calendar collection (e.g., iCalendar version + /// 2.0). Any attempt by the client to store calendar object + /// resources with a media type not listed in this property MUST + /// result in an error, with the CALDAV:supported-calendar-data + /// precondition (Section 5.3.2.1) being violated. In the absence of + /// this property, the server MUST only accept data with the media + /// type "text/calendar" and iCalendar version 2.0, and clients can + /// assume that the server will only accept this data. + /// + /// Definition: + /// + /// + /// + /// Example: + /// + /// + /// + /// + /// + /// ----- + /// + /// + /// + /// when nested in the CALDAV:supported-calendar-data property + /// to specify a supported media type for calendar object + /// resources; + SupportedCalendarData(Vec), + + /// Name: max-resource-size + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a numeric value indicating the maximum size of a + /// resource in octets that the server is willing to accept when a + /// calendar object resource is stored in a calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-resource-size is used to specify a + /// numeric value that represents the maximum size in octets that the + /// server is willing to accept when a calendar object resource is + /// stored in a calendar collection. Any attempt to store a calendar + /// object resource exceeding this size MUST result in an error, with + /// the CALDAV:max-resource-size precondition (Section 5.3.2.1) being + /// violated. In the absence of this property, the client can assume + /// that the server will allow storing a resource of any reasonable + /// size. + /// + /// Definition: + /// + /// + /// PCDATA value: a numeric value (positive integer) + /// + /// Example: + /// + /// + /// 102400 + /// + MaxResourceSize(u64), + + /// CALDAV:min-date-time Property + /// + /// Name: min-date-time + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a DATE-TIME value indicating the earliest date and + /// time (in UTC) that the server is willing to accept for any DATE or + /// DATE-TIME value in a calendar object resource stored in a calendar + /// collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:min-date-time is used to specify an + /// iCalendar DATE-TIME value in UTC that indicates the earliest + /// inclusive date that the server is willing to accept for any + /// explicit DATE or DATE-TIME value in a calendar object resource + /// stored in a calendar collection. Any attempt to store a calendar + /// object resource using a DATE or DATE-TIME value earlier than this + /// value MUST result in an error, with the CALDAV:min-date-time + /// precondition (Section 5.3.2.1) being violated. Note that servers + /// MUST accept recurring components that specify instances beyond + /// this limit, provided none of those instances have been overridden. + /// In that case, the server MAY simply ignore those instances outside + /// of the acceptable range when processing reports on the calendar + /// object resource. In the absence of this property, the client can + /// assume any valid iCalendar date may be used at least up to the + /// CALDAV:max-date-time value, if that is defined. + /// + /// Definition: + /// + /// + /// PCDATA value: an iCalendar format DATE-TIME value in UTC + /// + /// Example: + /// + /// + /// 19000101T000000Z + /// + MinDateTime(DateTime), + + /// CALDAV:max-date-time Property + /// + /// Name: max-date-time + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a DATE-TIME value indicating the latest date and + /// time (in UTC) that the server is willing to accept for any DATE or + /// DATE-TIME value in a calendar object resource stored in a calendar + /// collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-date-time is used to specify an + /// iCalendar DATE-TIME value in UTC that indicates the inclusive + /// latest date that the server is willing to accept for any date or + /// time value in a calendar object resource stored in a calendar + /// collection. Any attempt to store a calendar object resource using + /// a DATE or DATE-TIME value later than this value MUST result in an + /// error, with the CALDAV:max-date-time precondition + /// (Section 5.3.2.1) being violated. Note that servers MUST accept + /// recurring components that specify instances beyond this limit, + /// provided none of those instances have been overridden. In that + /// case, the server MAY simply ignore those instances outside of the + /// acceptable range when processing reports on the calendar object + /// resource. In the absence of this property, the client can assume + /// any valid iCalendar date may be used at least down to the CALDAV: + /// min-date-time value, if that is defined. + /// + /// Definition: + /// + /// + /// PCDATA value: an iCalendar format DATE-TIME value in UTC + /// + /// Example: + /// + /// + /// 20491231T235959Z + /// + MaxDateTime(DateTime), + + /// CALDAV:max-instances Property + /// + /// Name: max-instances + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a numeric value indicating the maximum number of + /// recurrence instances that a calendar object resource stored in a + /// calendar collection can generate. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-instances is used to specify a numeric + /// value that indicates the maximum number of recurrence instances + /// that a calendar object resource stored in a calendar collection + /// can generate. Any attempt to store a calendar object resource + /// with a recurrence pattern that generates more instances than this + /// value MUST result in an error, with the CALDAV:max-instances + /// precondition (Section 5.3.2.1) being violated. In the absence of + /// this property, the client can assume that the server has no limits + /// on the number of recurrence instances it can handle or expand. + /// + /// Definition: + /// + /// + /// PCDATA value: a numeric value (integer greater than zero) + /// + /// Example: + /// + /// + /// 100 + /// + MaxInstances(u64), + + /// CALDAV:max-attendees-per-instance Property + /// + /// Name: max-attendees-per-instance + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a numeric value indicating the maximum number of + /// ATTENDEE properties in any instance of a calendar object resource + /// stored in a calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-attendees-per-instance is used to + /// specify a numeric value that indicates the maximum number of + /// iCalendar ATTENDEE properties on any one instance of a calendar + /// object resource stored in a calendar collection. Any attempt to + /// store a calendar object resource with more ATTENDEE properties per + /// instance than this value MUST result in an error, with the CALDAV: + /// max-attendees-per-instance precondition (Section 5.3.2.1) being + /// violated. In the absence of this property, the client can assume + /// that the server can handle any number of ATTENDEE properties in a + /// calendar component. + /// + /// Definition: + /// + /// + /// PCDATA value: a numeric value (integer greater than zero) + /// + /// Example: + /// + /// + /// 25 + /// + MaxAttendeesPerInstance(u64), + + /// Name: supported-collation-set + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Identifies the set of collations supported by the server + /// for text matching operations. + /// + /// Conformance: This property MUST be defined on any resource that + /// supports a report that does text matching. If defined, it MUST be + /// protected and SHOULD NOT be returned by a PROPFIND DAV:allprop + /// request (as defined in Section 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:supported-collation-set property contains + /// zero or more CALDAV:supported-collation elements, which specify + /// the collection identifiers of the collations supported by the + /// server. + /// + /// Definition: + /// + /// + /// + /// + /// Example: + /// + /// + /// i;ascii-casemap + /// i;octet + /// + SupportedCollationSet(Vec), +} + pub enum Violation { + /// (DAV:resource-must-be-null): A resource MUST NOT exist at the + /// Request-URI; + ResourceMustBeNull, + + /// (CALDAV:calendar-collection-location-ok): The Request-URI MUST + /// identify a location where a calendar collection can be created; + CalendarCollectionLocationOk, + + /// (CALDAV:valid-calendar-data): The time zone specified in CALDAV: + /// calendar-timezone property MUST be a valid iCalendar object + /// containing a single valid VTIMEZONE component. + ValidCalendarData, + + /// (DAV:needs-privilege): The DAV:bind privilege MUST be granted to + /// the current user on the parent collection of the Request-URI. + NeedsPrivilege, + + /// (CALDAV:initialize-calendar-collection): A new calendar collection + /// exists at the Request-URI. The DAV:resourcetype of the calendar + /// collection MUST contain both DAV:collection and CALDAV:calendar + /// XML elements. + InitializeCalendarCollection, + + /// (CALDAV:supported-calendar-data): The resource submitted in the + /// PUT request, or targeted by a COPY or MOVE request, MUST be a + /// supported media type (i.e., iCalendar) for calendar object + /// resources; + SupportedCalendarData, + + /// (CALDAV:valid-calendar-object-resource): The resource submitted in + /// the PUT request, or targeted by a COPY or MOVE request, MUST obey + /// all restrictions specified in Section 4.1 (e.g., calendar object + /// resources MUST NOT contain more than one type of calendar + /// component, calendar object resources MUST NOT specify the + /// iCalendar METHOD property, etc.); + ValidCalendarObjectResource, + + /// (CALDAV:supported-calendar-component): The resource submitted in + /// the PUT request, or targeted by a COPY or MOVE request, MUST + /// contain a type of calendar component that is supported in the + /// targeted calendar collection; + SupportedCalendarComponent, + + /// (CALDAV:no-uid-conflict): The resource submitted in the PUT + /// request, or targeted by a COPY or MOVE request, MUST NOT specify + /// an iCalendar UID property value already in use in the targeted + /// calendar collection or overwrite an existing calendar object + /// resource with one that has a different UID property value. + /// Servers SHOULD report the URL of the resource that is already + /// making use of the same UID property value in the DAV:href element; + /// + /// + NoUidConflict(Dav::Href), + + /// (CALDAV:max-resource-size): The resource submitted in the PUT + /// request, or targeted by a COPY or MOVE request, MUST have an octet + /// size less than or equal to the value of the CALDAV:max-resource- + /// size property value (Section 5.2.5) on the calendar collection + /// where the resource will be stored; + MaxResourceSize, + + /// (CALDAV:min-date-time): The resource submitted in the PUT request, + /// or targeted by a COPY or MOVE request, MUST have all of its + /// iCalendar DATE or DATE-TIME property values (for each recurring + /// instance) greater than or equal to the value of the CALDAV:min- + /// date-time property value (Section 5.2.6) on the calendar + /// collection where the resource will be stored; + MinDateTime, + + /// (CALDAV:max-date-time): The resource submitted in the PUT request, + /// or targeted by a COPY or MOVE request, MUST have all of its + /// iCalendar DATE or DATE-TIME property values (for each recurring + /// instance) less than the value of the CALDAV:max-date-time property + /// value (Section 5.2.7) on the calendar collection where the + /// resource will be stored; + MaxDateTime, + + /// (CALDAV:max-instances): The resource submitted in the PUT request, + /// or targeted by a COPY or MOVE request, MUST generate a number of + /// recurring instances less than or equal to the value of the CALDAV: + /// max-instances property value (Section 5.2.8) on the calendar + /// collection where the resource will be stored; + MaxInstances, + + /// (CALDAV:max-attendees-per-instance): The resource submitted in the + /// PUT request, or targeted by a COPY or MOVE request, MUST have a + /// number of ATTENDEE properties on any one instance less than or + /// equal to the value of the CALDAV:max-attendees-per-instance + /// property value (Section 5.2.9) on the calendar collection where + /// the resource will be stored; + MaxAttendeesPerInstance, + + /// The CALDAV:filter XML element (see + /// Section 9.7) specified in the REPORT request MUST be valid. For + /// instance, a CALDAV:filter cannot nest a + /// element in a element, and a CALDAV:filter + /// cannot nest a element in a + /// element. + ValidFilter, + /// (CALDAV:supported-filter): The CALDAV:comp-filter (see /// Section 9.7.1), CALDAV:prop-filter (see Section 9.7.2), and /// CALDAV:param-filter (see Section 9.7.3) XML elements used in the @@ -26,19 +597,114 @@ pub enum Violation { /// - SupportedFilter, + SupportedFilter { + comp: Vec, + prop: Vec, + param: Vec, + }, + + /// (DAV:number-of-matches-within-limits): The number of matching + /// calendar object resources must fall within server-specific, + /// predefined limits. For example, this condition might be triggered + /// if a search specification would cause the return of an extremely + /// large number of responses. + NumberOfMatchesWithinLimits, } -pub enum PropertyRequest { - CalendarDescription, - CalendarTimezone, +// -------- Inner XML elements --------- + +/// Some of the reports defined in this section do text matches of +/// character strings provided by the client and are compared to stored +/// calendar data. Since iCalendar data is, by default, encoded in the +/// UTF-8 charset and may include characters outside the US-ASCII charset +/// range in some property and parameter values, there is a need to +/// ensure that text matching follows well-defined rules. +/// +/// To deal with this, this specification makes use of the IANA Collation +/// Registry defined in [RFC4790] to specify collations that may be used +/// to carry out the text comparison operations with a well-defined rule. +/// +/// The comparisons used in CalDAV are all "substring" matches, as per +/// [RFC4790], Section 4.2. Collations supported by the server MUST +/// support "substring" match operations. +/// +/// CalDAV servers are REQUIRED to support the "i;ascii-casemap" and +/// "i;octet" collations, as described in [RFC4790], and MAY support +/// other collations. +/// +/// Servers MUST advertise the set of collations that they support via +/// the CALDAV:supported-collation-set property defined on any resource +/// that supports reports that use collations. +/// +/// Clients MUST only use collations from the list advertised by the +/// server. +/// +/// In the absence of a collation explicitly specified by the client, or +/// if the client specifies the "default" collation identifier (as +/// defined in [RFC4790], Section 3.1), the server MUST default to using +/// "i;ascii-casemap" as the collation. +/// +/// Wildcards (as defined in [RFC4790], Section 3.2) MUST NOT be used in +/// the collation identifier. +/// +/// If the client chooses a collation not supported by the server, the +/// server MUST respond with a CALDAV:supported-collation precondition +/// error response. +pub struct SupportedCollation(String); + +/// calendar-data specialization for Property +pub struct CalendarDataSupport { + content_type: String, + version: String, } -pub enum Property { - CalendarDescription, - CalendarTimezone, +pub enum CalendarSelector { + AllProp, + PropName, + Prop(Dav::PropName), } -pub enum ResourceType { - Calendar, +pub struct CompFilter {} + +pub struct ParamFilter {} + +pub struct PropFilter {} + +/// Name: timezone +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies the time zone component to use when determining +/// the results of a report. +/// +/// Description: The CALDAV:timezone XML element specifies that for a +/// given calendaring REPORT request, the server MUST rely on the +/// specified VTIMEZONE component instead of the CALDAV:calendar- +/// timezone property of the calendar collection, in which the +/// calendar object resource is contained to resolve "date" values and +/// "date with local time" values (i.e., floating time) to "date with +/// UTC time" values. The server will require this information to +/// determine if a calendar component scheduled with "date" values or +/// "date with local time" values intersects a CALDAV:time-range +/// specified in a CALDAV:calendar-query REPORT. +/// +/// Note: The iCalendar data embedded within the CALDAV:timezone XML +/// element MUST follow the standard XML character data encoding +/// rules, including use of <, >, & etc. entity encoding or +/// the use of a construct. In the later case, the +/// +/// iCalendar data cannot contain the character sequence "]]>", which +/// is the end delimiter for the CDATA section. +/// +/// Definition: +/// +/// +/// PCDATA value: an iCalendar object with exactly one VTIMEZONE +pub struct TimeZone(String); + +pub struct Filter {} + +pub enum Component { + VEvent, + VTodo, } diff --git a/src/dav/mod.rs b/src/dav/mod.rs index 98d6965..4044895 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,5 +1,7 @@ mod types; mod caltypes; +mod acltypes; +mod versioningtypes; mod encoder; mod calencoder; diff --git a/src/dav/types.rs b/src/dav/types.rs index ffb08d4..7e3eb1c 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -435,8 +435,6 @@ pub enum PropFind { Prop(PropName), } - - /// 14.22 propstat XML Element /// /// Name: propstat -- cgit v1.2.3 From 4d325a2f7b088276f805b29dfa9817b6f6783b69 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 15:52:26 +0100 Subject: CalDAV many types --- src/dav/acltypes.rs | 4 + src/dav/caltypes.rs | 636 ++++++++++++++++++++++++++++++++++++++++++++- src/dav/versioningtypes.rs | 3 + 3 files changed, 632 insertions(+), 11 deletions(-) create mode 100644 src/dav/acltypes.rs create mode 100644 src/dav/versioningtypes.rs diff --git a/src/dav/acltypes.rs b/src/dav/acltypes.rs new file mode 100644 index 0000000..f356813 --- /dev/null +++ b/src/dav/acltypes.rs @@ -0,0 +1,4 @@ +//@FIXME required for a full DAV implementation +// See section 6. of the CalDAV RFC +// It seems mainly required for free-busy that I will not implement now. +// It can also be used for discovering main calendar, not sure it is used. diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index db9484a..6f6bd3f 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1,8 +1,11 @@ -use chrono::{DateTime,Utc}; +#![allow(dead_code)] +use chrono::{DateTime,Utc}; use super::types as Dav; -//@FIXME for now, we skip the ACL part +//@FIXME ACL part is missing, required +//@FIXME Versioning part is missing, required +//@FIXME WebDAV sync (rfc6578) is missing, optional pub struct CalExtension { pub root: bool @@ -10,7 +13,7 @@ pub struct CalExtension { impl Dav::Extension for CalExtension { type Error = Violation; type Property = Property; - type PropertyRequest = Property; //@FIXME + type PropertyRequest = PropertyRequest; type ResourceType = ResourceType; } @@ -33,7 +36,16 @@ pub struct MkCalendar(Dav::Set); /// be a CALDAV:mkcalendar-response XML element. /// /// -pub struct MkCalendarResponse(()); +/// +/// ---- +/// +/// ANY is not satisfying, so looking at RFC5689 +/// https://www.rfc-editor.org/rfc/rfc5689.html#section-5.2 +/// +/// Definition: +/// +/// +pub struct MkCalendarResponse(Vec>); /// Name: calendar-query /// @@ -59,9 +71,19 @@ pub enum ResourceType { Calendar, } +/// Check the matching Property object for documentation pub enum PropertyRequest { CalendarDescription, CalendarTimezone, + SupportedCalendarComponentSet, + SupportedCalendarData, + MaxResourceSize, + MinDateTime, + MaxDateTime, + MaxInstances, + MaxAttendeesPerInstance, + SupportedCollationSet, + CalendarData(CalendarDataRequest), } pub enum Property { /// Name: calendar-description @@ -206,7 +228,7 @@ pub enum Property { /// /// /// - SupportedCalendarComponentSet(Vec), + SupportedCalendarComponentSet(Vec), /// Name: supported-calendar-data /// @@ -249,7 +271,7 @@ pub enum Property { /// when nested in the CALDAV:supported-calendar-data property /// to specify a supported media type for calendar object /// resources; - SupportedCalendarData(Vec), + SupportedCalendarData(Vec), /// Name: max-resource-size /// @@ -480,6 +502,59 @@ pub enum Property { /// i;octet /// SupportedCollationSet(Vec), + + /// Name: calendar-data + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specified one of the following: + /// + /// 1. A supported media type for calendar object resources when + /// nested in the CALDAV:supported-calendar-data property; + /// + /// 2. The parts of a calendar object resource should be returned by + /// a calendaring report; + /// + /// 3. The content of a calendar object resource in a response to a + /// calendaring report. + /// + /// Description: When nested in the CALDAV:supported-calendar-data + /// property, the CALDAV:calendar-data XML element specifies a media + /// type supported by the CalDAV server for calendar object resources. + /// + /// When used in a calendaring REPORT request, the CALDAV:calendar- + /// data XML element specifies which parts of calendar object + /// resources need to be returned in the response. If the CALDAV: + /// calendar-data XML element doesn't contain any CALDAV:comp element, + /// calendar object resources will be returned in their entirety. + /// + /// Finally, when used in a calendaring REPORT response, the CALDAV: + /// calendar-data XML element specifies the content of a calendar + /// object resource. Given that XML parsers normalize the two- + /// character sequence CRLF (US-ASCII decimal 13 and US-ASCII decimal + /// 10) to a single LF character (US-ASCII decimal 10), the CR + /// character (US-ASCII decimal 13) MAY be omitted in calendar object + /// resources specified in the CALDAV:calendar-data XML element. + /// Furthermore, calendar object resources specified in the CALDAV: + /// calendar-data XML element MAY be invalid per their media type + /// specification if the CALDAV:calendar-data XML element part of the + /// calendaring REPORT request did not specify required properties + /// (e.g., UID, DTSTAMP, etc.), or specified a CALDAV:prop XML element + /// with the "novalue" attribute set to "yes". + /// + /// Note: The CALDAV:calendar-data XML element is specified in requests + /// and responses inside the DAV:prop XML element as if it were a + /// WebDAV property. However, the CALDAV:calendar-data XML element is + /// not a WebDAV property and, as such, is not returned in PROPFIND + /// responses, nor used in PROPPATCH requests. + /// + /// Note: The iCalendar data embedded within the CALDAV:calendar-data + /// XML element MUST follow the standard XML character data encoding + /// rules, including use of <, >, & etc. entity encoding or + /// the use of a construct. In the later case, the + /// iCalendar data cannot contain the character sequence "]]>", which + /// is the end delimiter for the CDATA section. + CalendarData(CalendarDataPayload), } pub enum Violation { @@ -650,25 +725,522 @@ pub enum Violation { /// If the client chooses a collation not supported by the server, the /// server MUST respond with a CALDAV:supported-collation precondition /// error response. -pub struct SupportedCollation(String); +pub struct SupportedCollation(Collation); +#[derive(Default)] +pub enum Collation { + #[default] + AsciiCaseMap, + Octet, + Unknown(String), +} + + +/// +/// PCDATA value: iCalendar object +/// +/// when nested in the DAV:prop XML element in a calendaring +/// REPORT response to specify the content of a returned +/// calendar object resource. +pub struct CalendarDataPayload(String); + +/// +/// +/// when nested in the DAV:prop XML element in a calendaring +/// REPORT request to specify which parts of calendar object +/// resources should be returned in the response; +pub struct CalendarDataRequest { + comp: Option, + reccurence: Option, + limit_freebusy_set: Option, +} /// calendar-data specialization for Property +/// +/// +/// +/// when nested in the CALDAV:supported-calendar-data property +/// to specify a supported media type for calendar object +/// resources; +pub struct CalendarDataEmpty(Option); + +/// +/// content-type value: a MIME media type +/// version value: a version string +/// attributes can be used on all three variants of the +/// CALDAV:calendar-data XML element. pub struct CalendarDataSupport { content_type: String, version: String, } +/// Name: comp +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Defines which component types to return. +/// +/// Description: The name value is a calendar component name (e.g., +/// VEVENT). +/// +/// Definition: +/// +/// +/// +/// name value: a calendar component name +/// +/// Note: The CALDAV:prop and CALDAV:allprop elements have the same name +/// as the DAV:prop and DAV:allprop elements defined in [RFC2518]. +/// However, the CALDAV:prop and CALDAV:allprop elements are defined +/// in the "urn:ietf:params:xml:ns:caldav" namespace instead of the +/// "DAV:" namespace. +pub struct Comp { + name: Component, + prop_kind: PropKind, + comp_kind: CompKind, +} + +/// For SupportedCalendarComponentSet +/// +/// Definition: +/// +/// +/// +/// Example: +/// +/// +/// +/// +/// +pub struct CompSupport(Component); + +/// Name: allcomp +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies that all components shall be returned. +/// +/// Description: The CALDAV:allcomp XML element can be used when the +/// client wants all types of components returned by a calendaring +/// REPORT request. +/// +/// Definition: +/// +/// +pub enum CompKind { + AllComp, + Comp(Vec), +} + +/// Name: allprop +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies that all properties shall be returned. +/// +/// Description: The CALDAV:allprop XML element can be used when the +/// client wants all properties of components returned by a +/// calendaring REPORT request. +/// +/// Definition: +/// +/// +/// +/// Note: The CALDAV:allprop element has the same name as the DAV: +/// allprop element defined in [RFC2518]. However, the CALDAV:allprop +/// element is defined in the "urn:ietf:params:xml:ns:caldav" +/// namespace instead of the "DAV:" namespace. +pub enum PropKind { + AllProp, + Prop(Vec), +} + +/// Name: prop +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Defines which properties to return in the response. +/// +/// Description: The "name" attribute specifies the name of the calendar +/// property to return (e.g., ATTENDEE). The "novalue" attribute can +/// be used by clients to request that the actual value of the +/// property not be returned (if the "novalue" attribute is set to +/// "yes"). In that case, the server will return just the iCalendar +/// property name and any iCalendar parameters and a trailing ":" +/// without the subsequent value data. +/// +/// Definition: +/// +/// +/// name value: a calendar property name +/// novalue value: "yes" or "no" +/// +/// Note: The CALDAV:prop element has the same name as the DAV:prop +/// element defined in [RFC2518]. However, the CALDAV:prop element is +/// defined in the "urn:ietf:params:xml:ns:caldav" namespace instead +/// of the "DAV:" namespace. +pub struct CalProp { + name: ComponentProperty, + novalue: bool, +} + +pub enum RecurrenceModifier { + Expand(Expand), + LimitRecurrenceSet(LimitRecurrenceSet), +} + +/// Name: expand +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Forces the server to expand recurring components into +/// individual recurrence instances. +/// +/// Description: The CALDAV:expand XML element specifies that for a +/// given calendaring REPORT request, the server MUST expand the +/// recurrence set into calendar components that define exactly one +/// recurrence instance, and MUST return only those whose scheduled +/// time intersect a specified time range. +/// +/// The "start" attribute specifies the inclusive start of the time +/// range, and the "end" attribute specifies the non-inclusive end of +/// the time range. Both attributes are specified as date with UTC +/// time value. The value of the "end" attribute MUST be greater than +/// the value of the "start" attribute. +/// +/// The server MUST use the same logic as defined for CALDAV:time- +/// range to determine if a recurrence instance intersects the +/// specified time range. +/// +/// Recurring components, other than the initial instance, MUST +/// include a RECURRENCE-ID property indicating which instance they +/// refer to. +/// +/// The returned calendar components MUST NOT use recurrence +/// properties (i.e., EXDATE, EXRULE, RDATE, and RRULE) and MUST NOT +/// have reference to or include VTIMEZONE components. Date and local +/// time with reference to time zone information MUST be converted +/// into date with UTC time. +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +pub struct Expand(DateTime, DateTime); + +/// CALDAV:limit-recurrence-set XML Element +/// +/// Name: limit-recurrence-set +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a time range to limit the set of "overridden +/// components" returned by the server. +/// +/// Description: The CALDAV:limit-recurrence-set XML element specifies +/// that for a given calendaring REPORT request, the server MUST +/// return, in addition to the "master component", only the +/// "overridden components" that impact a specified time range. An +/// overridden component impacts a time range if its current start and +/// end times overlap the time range, or if the original start and end +/// times -- the ones that would have been used if the instance were +/// not overridden -- overlap the time range. +/// +/// The "start" attribute specifies the inclusive start of the time +/// range, and the "end" attribute specifies the non-inclusive end of +/// the time range. Both attributes are specified as date with UTC +/// time value. The value of the "end" attribute MUST be greater than +/// the value of the "start" attribute. +/// +/// The server MUST use the same logic as defined for CALDAV:time- +/// range to determine if the current or original scheduled time of an +/// "overridden" recurrence instance intersects the specified time +/// range. +/// +/// Overridden components that have a RANGE parameter on their +/// RECURRENCE-ID property may specify one or more instances in the +/// recurrence set, and some of those instances may fall within the +/// specified time range or may have originally fallen within the +/// specified time range prior to being overridden. If that is the +/// case, the overridden component MUST be included in the results, as +/// it has a direct impact on the interpretation of instances within +/// the specified time range. +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +pub struct LimitRecurrenceSet(DateTime, DateTime); + +/// Name: limit-freebusy-set +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a time range to limit the set of FREEBUSY values +/// returned by the server. +/// +/// Description: The CALDAV:limit-freebusy-set XML element specifies +/// that for a given calendaring REPORT request, the server MUST only +/// return the FREEBUSY property values of a VFREEBUSY component that +/// intersects a specified time range. +/// +/// The "start" attribute specifies the inclusive start of the time +/// range, and the "end" attribute specifies the non-inclusive end of +/// the time range. Both attributes are specified as "date with UTC +/// time" value. The value of the "end" attribute MUST be greater +/// than the value of the "start" attribute. +/// +/// The server MUST use the same logic as defined for CALDAV:time- +/// range to determine if a FREEBUSY property value intersects the +/// specified time range. +/// +/// Definition: +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +pub struct LimitFreebusySet(DateTime, DateTime); + + pub enum CalendarSelector { AllProp, PropName, Prop(Dav::PropName), } -pub struct CompFilter {} +/// Name: comp-filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies search criteria on calendar components. +/// +/// Description: The CALDAV:comp-filter XML element specifies a query +/// targeted at the calendar object (i.e., VCALENDAR) or at a specific +/// calendar component type (e.g., VEVENT). The scope of the +/// CALDAV:comp-filter XML element is the calendar object when used as +/// a child of the CALDAV:filter XML element. The scope of the +/// CALDAV:comp-filter XML element is the enclosing calendar component +/// when used as a child of another CALDAV:comp-filter XML element. A +/// CALDAV:comp-filter is said to match if: +/// +/// * The CALDAV:comp-filter XML element is empty and the calendar +/// object or calendar component type specified by the "name" +/// attribute exists in the current scope; +/// +/// or: +/// +/// * The CALDAV:comp-filter XML element contains a CALDAV:is-not- +/// defined XML element and the calendar object or calendar +/// component type specified by the "name" attribute does not exist +/// in the current scope; +/// +/// or: +/// +/// * The CALDAV:comp-filter XML element contains a CALDAV:time-range +/// XML element and at least one recurrence instance in the +/// targeted calendar component is scheduled to overlap the +/// specified time range, and all specified CALDAV:prop-filter and +/// CALDAV:comp-filter child XML elements also match the targeted +/// calendar component; +/// +/// or: +/// +/// * The CALDAV:comp-filter XML element only contains CALDAV:prop- +/// filter and CALDAV:comp-filter child XML elements that all match +/// the targeted calendar component. +/// +/// Definition: +/// +/// +/// +/// name value: a calendar object or calendar component +/// type (e.g., VEVENT) +pub struct CompFilter { + name: Component, + inner: CompFilterInner, +} +pub enum CompFilterInner { + // Option 1 + Empty, + // Option 2 + IsNotDefined, + // Options 3 & 4 + Matches(CompFilterMatch), +} +pub struct CompFilterMatch { + time_range: Option, + prop_filter: Vec, + comp_filter: Vec, +} + +/// Name: prop-filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies search criteria on calendar properties. +/// +/// Description: The CALDAV:prop-filter XML element specifies a query +/// targeted at a specific calendar property (e.g., CATEGORIES) in the +/// scope of the enclosing calendar component. A calendar property is +/// said to match a CALDAV:prop-filter if: +/// +/// * The CALDAV:prop-filter XML element is empty and a property of +/// the type specified by the "name" attribute exists in the +/// enclosing calendar component; +/// +/// or: +/// +/// * The CALDAV:prop-filter XML element contains a CALDAV:is-not- +/// defined XML element and no property of the type specified by +/// the "name" attribute exists in the enclosing calendar +/// component; +/// +/// or: +/// +/// * The CALDAV:prop-filter XML element contains a CALDAV:time-range +/// XML element and the property value overlaps the specified time +/// range, and all specified CALDAV:param-filter child XML elements +/// also match the targeted property; +/// +/// or: +/// +/// * The CALDAV:prop-filter XML element contains a CALDAV:text-match +/// XML element and the property value matches it, and all +/// specified CALDAV:param-filter child XML elements also match the +/// targeted property; +/// +/// Definition: +/// +/// +/// +/// +/// name value: a calendar property name (e.g., ATTENDEE) +pub struct PropFilter { + name: Component, + inner: PropFilterInner, +} +pub enum PropFilterInner { + // Option 1 + Empty, + // Option 2 + IsNotDefined, + // Options 3 & 4 + Match(PropFilterMatch), +} +pub struct PropFilterMatch { + time_range: Option, + time_or_text: Option, + param_filter: Vec, +} +pub enum TimeOrText { + Time(TimeRange), + Text(TextMatch), +} + +/// Name: text-match +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a substring match on a property or parameter +/// value. +/// +/// Description: The CALDAV:text-match XML element specifies text used +/// for a substring match against the property or parameter value +/// specified in a calendaring REPORT request. +/// +/// The "collation" attribute is used to select the collation that the +/// server MUST use for character string matching. In the absence of +/// this attribute, the server MUST use the "i;ascii-casemap" +/// collation. +/// +/// The "negate-condition" attribute is used to indicate that this +/// test returns a match if the text matches when the attribute value +/// is set to "no", or return a match if the text does not match, if +/// the attribute value is set to "yes". For example, this can be +/// used to match components with a STATUS property not set to +/// CANCELLED. +/// +/// Definition: +/// +/// PCDATA value: string +/// +pub struct TextMatch { + collation: Option, + negate_condition: bool, + text: String, +} + +/// Name: param-filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Limits the search to specific parameter values. +/// +/// Description: The CALDAV:param-filter XML element specifies a query +/// targeted at a specific calendar property parameter (e.g., +/// PARTSTAT) in the scope of the calendar property on which it is +/// defined. A calendar property parameter is said to match a CALDAV: +/// param-filter if: +/// +/// * The CALDAV:param-filter XML element is empty and a parameter of +/// the type specified by the "name" attribute exists on the +/// calendar property being examined; +/// +/// or: +/// +/// * The CALDAV:param-filter XML element contains a CALDAV:is-not- +/// defined XML element and no parameter of the type specified by +/// the "name" attribute exists on the calendar property being +/// examined; +/// +/// Definition: +/// +/// +/// +/// +/// name value: a property parameter name (e.g., PARTSTAT) +pub struct ParamFilter { + name: PropertyParameter, + inner: Option, +} +pub enum ParamFilterMatch { + IsNotDefined, + Match(TextMatch), +} + +/// CALDAV:is-not-defined XML Element +/// +/// Name: is-not-defined +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies that a match should occur if the enclosing +/// component, property, or parameter does not exist. +/// +/// Description: The CALDAV:is-not-defined XML element specifies that a +/// match occurs if the enclosing component, property, or parameter +/// value specified in a calendaring REPORT request does not exist in +/// the calendar data being tested. +/// +/// Definition: +/// +/* CURRENTLY INLINED */ -pub struct ParamFilter {} -pub struct PropFilter {} /// Name: timezone /// @@ -702,9 +1274,51 @@ pub struct PropFilter {} /// PCDATA value: an iCalendar object with exactly one VTIMEZONE pub struct TimeZone(String); -pub struct Filter {} +/// Name: filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a filter to limit the set of calendar components +/// returned by the server. +/// +/// Description: The CALDAV:filter XML element specifies the search +/// filter used to limit the calendar components returned by a +/// calendaring REPORT request. +/// +/// Definition: +/// +pub struct Filter(CompFilter); +/// Known components pub enum Component { + VCalendar, + VJournal, + VFreeBusy, VEvent, VTodo, + VAlarm, + Unknown(String), +} + +/// name="VERSION", name="SUMMARY", etc. +/// Can be set on different objects: VCalendar, VEvent, etc. +/// Might be replaced by an enum later +pub struct ComponentProperty(String); + +/// like PARSTAT +pub struct PropertyParameter(String); + +/// Name: time-range +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +pub enum TimeRange { + OnlyStart(DateTime), + OnlyEnd(DateTime), + FullRange(DateTime, DateTime), } diff --git a/src/dav/versioningtypes.rs b/src/dav/versioningtypes.rs new file mode 100644 index 0000000..6c1c204 --- /dev/null +++ b/src/dav/versioningtypes.rs @@ -0,0 +1,3 @@ +//@FIXME required for a full DAV implementation +// See section 7.1 of the CalDAV RFC +// It seems it's mainly due to the fact that the REPORT method is re-used. -- cgit v1.2.3 From f1861e3f1265a47513779bd251298d034a9011a0 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 16:10:41 +0100 Subject: Finalize caldav types iteration --- src/dav/caltypes.rs | 47 +++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 43 insertions(+), 4 deletions(-) diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 6f6bd3f..a08f550 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -3,9 +3,13 @@ use chrono::{DateTime,Utc}; use super::types as Dav; -//@FIXME ACL part is missing, required -//@FIXME Versioning part is missing, required +//@FIXME ACL (rfc3744) is missing, required +//@FIXME Versioning (rfc3253) is missing, required //@FIXME WebDAV sync (rfc6578) is missing, optional +// For reference, SabreDAV guide gives high-level & real-world overview: +// https://sabre.io/dav/building-a-caldav-client/ +// For reference, non-official extensions documented by SabreDAV: +// https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions pub struct CalExtension { pub root: bool @@ -66,6 +70,38 @@ pub struct CalendarQuery { timezone: Option, } +/// Name: calendar-multiget +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: CalDAV report used to retrieve specific calendar object +/// resources. +/// +/// Description: See Section 7.9. +/// +/// Definition: +/// +/// +pub struct CalendarMultiget { + selector: Option>, + href: Vec, +} + +/// Name: free-busy-query +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: CalDAV report used to generate a VFREEBUSY to determine +/// busy time over a specific time range. +/// +/// Description: See Section 7.10. +/// +/// Definition: +/// +pub struct FreeBusyQuery(TimeRange); + // ----- Hooks ----- pub enum ResourceType { Calendar, @@ -116,7 +152,10 @@ pub enum Property { /// Calendrier de Mathilde Desruisseaux - CalendarDescription(String), + CalendarDescription { + lang: Option, + text: String, + }, /// 5.2.2. CALDAV:calendar-timezone Property /// @@ -1011,7 +1050,7 @@ pub struct LimitRecurrenceSet(DateTime, DateTime); /// end value: an iCalendar "date with UTC time" pub struct LimitFreebusySet(DateTime, DateTime); - +/// Used by CalendarQuery & CalendarMultiget pub enum CalendarSelector { AllProp, PropName, -- cgit v1.2.3 From 9514af8f525a3c70075274f5c4ca95ac03eecedf Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 16:52:52 +0100 Subject: Calendar skeleton --- src/dav/calencoder.rs | 266 +++++++++++++++++++++++++++++++++++++++++++++++--- src/dav/caltypes.rs | 4 + 2 files changed, 257 insertions(+), 13 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 815946e..a2d9a74 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -8,6 +8,12 @@ use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; +// =============== Calendar Trait =========================== +pub trait CalContext: Context { + fn create_cal_element(&self, name: &str) -> BytesStart; +} + +// =============== CalDAV Extension Setup =================== impl Context for CalExtension { fn child(&self) -> Self { Self { root: false } @@ -33,6 +39,12 @@ impl Context for CalExtension { } } +impl CalContext for CalExtension { + fn create_cal_element(&self, name: &str) -> BytesStart { + self.create_ns_element("C", name) + } +} + impl CalExtension { fn create_ns_element(&self, ns: &str, name: &str) -> BytesStart { let mut start = BytesStart::new(format!("{}:{}", ns, name)); @@ -42,13 +54,67 @@ impl CalExtension { } start } - fn create_cal_element(&self, name: &str) -> BytesStart { - self.create_ns_element("C", name) +} + +// ==================== Calendar Types Serialization ========================= + +// -------------------- MKCALENDAR METHOD ------------------------------------ +impl QuickWritable for MkCalendar { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for MkCalendarResponse { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +// ----------------------- REPORT METHOD ------------------------------------- + +impl QuickWritable for CalendarQuery { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CalendarMultiget { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for FreeBusyQuery { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +// -------------------------- DAV::prop -------------------------------------- +impl QuickWritable for Property { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} +impl QuickWritable for PropertyRequest { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +// ---------------------- DAV::resourcetype ---------------------------------- +impl QuickWritable for ResourceType { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + match self { + Self::Calendar => xml.write_event_async(Event::Empty(ctx.create_dav_element("calendar"))).await, + } } } -impl QuickWritable for Violation { - async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { +// --------------------------- DAV::error ------------------------------------ +impl QuickWritable for Violation { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { match self { Self::ResourceMustBeNull => { let start = ctx.create_cal_element("resource-must-be-null"); @@ -61,26 +127,200 @@ impl QuickWritable for Violation { } -impl QuickWritable for Property { - async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { +// ---------------------------- Inner XML ------------------------------------ +impl QuickWritable for SupportedCollation { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { unimplemented!(); } } -impl QuickWritable for PropertyRequest { - async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { +impl QuickWritable for Collation { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { unimplemented!(); } } -impl QuickWritable for ResourceType { - async fn write(&self, xml: &mut Writer, ctx: CalExtension) -> Result<(), QError> { - match self { - Self::Calendar => xml.write_event_async(Event::Empty(ctx.create_dav_element("calendar"))).await, - } +impl QuickWritable for CalendarDataPayload { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CalendarDataRequest { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CalendarDataEmpty { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CalendarDataSupport { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Comp { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CompSupport { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CompKind { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for PropKind { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CalProp { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for RecurrenceModifier { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Expand { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for LimitRecurrenceSet { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for LimitFreebusySet { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CalendarSelector { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CompFilter { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CompFilterInner { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for CompFilterMatch { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for PropFilter { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for PropFilterInner { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for PropFilterMatch { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); } } +impl QuickWritable for TimeOrText { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for TextMatch { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for ParamFilter { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for ParamFilterMatch { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for TimeZone { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Filter { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for Component { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for ComponentProperty { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for PropertyParameter { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + +impl QuickWritable for TimeRange { + async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + unimplemented!(); + } +} + + #[cfg(test)] mod tests { use super::*; diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index a08f550..3b65fe2 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -23,6 +23,8 @@ impl Dav::Extension for CalExtension { // ----- Root elements ----- +// --- (MKCALENDAR PART) --- + /// If a request body is included, it MUST be a CALDAV:mkcalendar XML /// element. Instruction processing MUST occur in the order /// instructions are received (i.e., from top to bottom). @@ -51,6 +53,8 @@ pub struct MkCalendar(Dav::Set); /// pub struct MkCalendarResponse(Vec>); +// --- (REPORT PART) --- + /// Name: calendar-query /// /// Namespace: urn:ietf:params:xml:ns:caldav -- cgit v1.2.3 From 2b2e3c032cb67668395c7301962669bc927bd854 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 18:19:03 +0100 Subject: Encode Calendar Properties --- src/dav/calencoder.rs | 165 +++++++++++++++++++++++++++++++++++++++++++++++--- src/dav/caltypes.rs | 22 ++++--- 2 files changed, 169 insertions(+), 18 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index a2d9a74..5cd40ef 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -8,6 +8,8 @@ use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; +const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; + // =============== Calendar Trait =========================== pub trait CalContext: Context { fn create_cal_element(&self, name: &str) -> BytesStart; @@ -61,13 +63,25 @@ impl CalExtension { // -------------------- MKCALENDAR METHOD ------------------------------------ impl QuickWritable for MkCalendar { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_cal_element("mkcalendar"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for MkCalendarResponse { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_cal_element("mkcalendar-response"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for propstat in self.0.iter() { + propstat.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } @@ -75,31 +89,164 @@ impl QuickWritable for MkCalendarResponse { impl QuickWritable for CalendarQuery { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_cal_element("calendar-query"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + if let Some(selector) = &self.selector { + selector.write(xml, ctx.child()).await?; + } + self.filter.write(xml, ctx.child()).await?; + if let Some(tz) = &self.timezone { + tz.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for CalendarMultiget { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_cal_element("calendar-multiget"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + if let Some(selector) = &self.selector { + selector.write(xml, ctx.child()).await?; + } + for href in self.href.iter() { + href.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for FreeBusyQuery { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_cal_element("free-busy-query"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } // -------------------------- DAV::prop -------------------------------------- -impl QuickWritable for Property { +impl QuickWritable for PropertyRequest { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut atom = async |c| xml.write_event_async(Event::Empty(ctx.create_cal_element(c))).await; + + match self { + Self::CalendarDescription => atom("calendar-description").await, + Self::CalendarTimezone => atom("calendar-timezone").await, + Self::SupportedCalendarComponentSet => atom("supported-calendar-component-set").await, + Self::SupportedCalendarData => atom("supported-calendar-data").await, + Self::MaxResourceSize => atom("max-resource-size").await, + Self::MinDateTime => atom("min-date-time").await, + Self::MaxDateTime => atom("max-date-time").await, + Self::MaxInstances => atom("max-instances").await, + Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, + Self::SupportedCollationSet => atom("supported-collation-set").await, + Self::CalendarData(req) => req.write(xml, ctx).await, + } } } -impl QuickWritable for PropertyRequest { +impl QuickWritable for Property { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::CalendarDescription { lang, text } => { + let mut start = ctx.create_cal_element("calendar-description"); + if let Some(the_lang) = lang { + start.push_attribute(("xml:lang", the_lang.as_str())); + } + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(text))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::CalendarTimezone(payload) => { + let start = ctx.create_cal_element("calendar-timezone"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(payload))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::SupportedCalendarComponentSet(many_comp) => { + let start = ctx.create_cal_element("supported-calendar-component-set"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for comp in many_comp.iter() { + comp.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await + }, + Self::SupportedCalendarData(many_mime) => { + let start = ctx.create_cal_element("supported-calendar-data"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for mime in many_mime.iter() { + mime.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await + }, + Self::MaxResourceSize(bytes) => { + let start = ctx.create_cal_element("max-resource-size"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::MinDateTime(dt) => { + let start = ctx.create_cal_element("min-date-time"); + let end = start.to_end(); + + let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::MaxDateTime(dt) => { + let start = ctx.create_cal_element("max-date-time"); + let end = start.to_end(); + + let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::MaxInstances(count) => { + let start = ctx.create_cal_element("max-instances"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::MaxAttendeesPerInstance(count) => { + let start = ctx.create_cal_element("max-attendees-per-instance"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::SupportedCollationSet(many_collations) => { + let start = ctx.create_cal_element("supported-collation-set"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for collation in many_collations.iter() { + collation.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await + }, + Self::CalendarData(inner) => inner.write(xml, ctx).await, + } } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 3b65fe2..c803c0c 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -35,7 +35,7 @@ impl Dav::Extension for CalExtension { /// instruction in Section 12.13.2 of [RFC2518]. /// /// -pub struct MkCalendar(Dav::Set); +pub struct MkCalendar(pub Dav::Set); /// If a response body for a successful request is included, it MUST @@ -51,7 +51,7 @@ pub struct MkCalendar(Dav::Set); /// Definition: /// /// -pub struct MkCalendarResponse(Vec>); +pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- @@ -69,9 +69,9 @@ pub struct MkCalendarResponse(Vec>); /// DAV:propname | /// DAV:prop)?, filter, timezone?)> pub struct CalendarQuery { - selector: Option>, - filter: Filter, - timezone: Option, + pub selector: Option>, + pub filter: Filter, + pub timezone: Option, } /// Name: calendar-multiget @@ -89,8 +89,8 @@ pub struct CalendarQuery { /// DAV:propname | /// DAV:prop)?, DAV:href+)> pub struct CalendarMultiget { - selector: Option>, - href: Vec, + pub selector: Option>, + pub href: Vec, } /// Name: free-busy-query @@ -104,7 +104,7 @@ pub struct CalendarMultiget { /// /// Definition: /// -pub struct FreeBusyQuery(TimeRange); +pub struct FreeBusyQuery(pub TimeRange); // ----- Hooks ----- pub enum ResourceType { @@ -784,7 +784,10 @@ pub enum Collation { /// when nested in the DAV:prop XML element in a calendaring /// REPORT response to specify the content of a returned /// calendar object resource. -pub struct CalendarDataPayload(String); +pub struct CalendarDataPayload { + pub mime: Option, + pub payload: String, +} /// , comp: Option, reccurence: Option, limit_freebusy_set: Option, -- cgit v1.2.3 From dba0dcdc4122ff73c94d733376a77e98cabd7478 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 18:35:11 +0100 Subject: Serialize CalDAV errors --- src/dav/calencoder.rs | 56 ++++++++++++++++++++++++++++++++++++++++++++------- src/dav/caltypes.rs | 8 +++++--- 2 files changed, 54 insertions(+), 10 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 5cd40ef..1016a20 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -262,14 +262,56 @@ impl QuickWritable for ResourceType { // --------------------------- DAV::error ------------------------------------ impl QuickWritable for Violation { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { + let mut atom = async |c| xml.write_event_async(Event::Empty(ctx.create_cal_element(c))).await; + match self { - Self::ResourceMustBeNull => { - let start = ctx.create_cal_element("resource-must-be-null"); - xml.write_event_async(Event::Empty(start)).await?; - }, - _ => unimplemented!(), - }; - Ok(()) + //@FIXME + // DAV elements, should not be here but in RFC3744 on ACLs + // (we do not use atom as this error is in the DAV namespace, not the caldav one) + Self::NeedPrivileges => xml.write_event_async(Event::Empty(ctx.create_dav_element("need-privileges"))).await, + + // Regular CalDAV errors + Self::ResourceMustBeNull => atom("resource-must-be-null").await, + Self::CalendarCollectionLocationOk => atom("calendar-collection-location-ok").await, + Self::ValidCalendarData => atom("valid-calendar-data").await, + Self::InitializeCalendarCollection => atom("initialize-calendar-collection").await, + Self::SupportedCalendarData => atom("supported-calendar-data").await, + Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, + Self::SupportedCalendarComponent => atom("supported-calendar-component").await, + Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, + Self::SupportedCalendarComponent => atom("SupportedCalendarComponent").await, + Self::NoUidConflict(href) => { + let start = ctx.create_cal_element("no-uid-conflict"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + href.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + }, + Self::MaxResourceSize => atom("max-resource-size").await, + Self::MinDateTime => atom("min-date-time").await, + Self::MaxDateTime => atom("max-date-time").await, + Self::MaxInstances => atom("max-instances").await, + Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, + Self::ValidFilter => atom("valid-filter").await, + Self::SupportedFilter { comp, prop, param } => { + let start = ctx.create_cal_element("supported-filter"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + for comp_item in comp.iter() { + comp_item.write(xml, ctx.child()).await?; + } + for prop_item in prop.iter() { + prop_item.write(xml, ctx.child()).await?; + } + for param_item in param.iter() { + param_item.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await + }, + Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await, + } } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index c803c0c..29dc02f 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -614,9 +614,11 @@ pub enum Violation { /// containing a single valid VTIMEZONE component. ValidCalendarData, - /// (DAV:needs-privilege): The DAV:bind privilege MUST be granted to + ///@FIXME should not be here but in RFC3744 + /// !!! ERRATA 1002 !!! + /// (DAV:need-privileges): The DAV:bind privilege MUST be granted to /// the current user on the parent collection of the Request-URI. - NeedsPrivilege, + NeedPrivileges, /// (CALDAV:initialize-calendar-collection): A new calendar collection /// exists at the Request-URI. The DAV:resourcetype of the calendar @@ -693,7 +695,7 @@ pub enum Violation { /// the resource will be stored; MaxAttendeesPerInstance, - /// The CALDAV:filter XML element (see + /// (CALDAV:valid-filter): The CALDAV:filter XML element (see /// Section 9.7) specified in the REPORT request MUST be valid. For /// instance, a CALDAV:filter cannot nest a /// element in a element, and a CALDAV:filter -- cgit v1.2.3 From 61ee5f153b01c8a0927f0e4547c8c655ede912ed Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 19:01:20 +0100 Subject: Serialize calendar-data --- src/dav/calencoder.rs | 58 +++++++++++++++++++++++++++++++++++++++++---------- src/dav/caltypes.rs | 16 +++++++------- 2 files changed, 55 insertions(+), 19 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 1016a20..73db4fa 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -319,37 +319,73 @@ impl QuickWritable for Violation { // ---------------------------- Inner XML ------------------------------------ impl QuickWritable for SupportedCollation { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let start = ctx.create_cal_element("supported-collation"); + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + } } impl QuickWritable for Collation { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let col = match self { + Self::AsciiCaseMap => "i;ascii-casemap", + Self::Octet => "i;octet", + Self::Unknown(v) => v.as_str(), + }; + + xml.write_event_async(Event::Text(BytesText::new(col))).await } } impl QuickWritable for CalendarDataPayload { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("calendar-data"); + if let Some(mime) = &self.mime { + start.push_attribute(("content-type", mime.content_type.as_str())); + start.push_attribute(("version", mime.version.as_str())); + } + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for CalendarDataRequest { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("calendar-data"); + if let Some(mime) = &self.mime { + start.push_attribute(("content-type", mime.content_type.as_str())); + start.push_attribute(("version", mime.version.as_str())); + } + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + if let Some(comp) = &self.comp { + comp.write(xml, ctx.child()).await?; + } + if let Some(recurrence) = &self.recurrence { + recurrence.write(xml, ctx.child()).await?; + } + if let Some(freebusy) = &self.limit_freebusy_set { + freebusy.write(xml, ctx.child()).await?; + } + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for CalendarDataEmpty { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); - } -} - -impl QuickWritable for CalendarDataSupport { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("calendar-data"); + if let Some(mime) = &self.0 { + empty.push_attribute(("content-type", mime.content_type.as_str())); + empty.push_attribute(("version", mime.version.as_str())); + } + xml.write_event_async(Event::Empty(empty)).await } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 29dc02f..97d75c5 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -770,7 +770,7 @@ pub enum Violation { /// If the client chooses a collation not supported by the server, the /// server MUST respond with a CALDAV:supported-collation precondition /// error response. -pub struct SupportedCollation(Collation); +pub struct SupportedCollation(pub Collation); #[derive(Default)] pub enum Collation { #[default] @@ -799,10 +799,10 @@ pub struct CalendarDataPayload { /// REPORT request to specify which parts of calendar object /// resources should be returned in the response; pub struct CalendarDataRequest { - mime: Option, - comp: Option, - reccurence: Option, - limit_freebusy_set: Option, + pub mime: Option, + pub comp: Option, + pub recurrence: Option, + pub limit_freebusy_set: Option, } /// calendar-data specialization for Property @@ -812,7 +812,7 @@ pub struct CalendarDataRequest { /// when nested in the CALDAV:supported-calendar-data property /// to specify a supported media type for calendar object /// resources; -pub struct CalendarDataEmpty(Option); +pub struct CalendarDataEmpty(pub Option); /// @@ -821,8 +821,8 @@ pub struct CalendarDataEmpty(Option); /// attributes can be used on all three variants of the /// CALDAV:calendar-data XML element. pub struct CalendarDataSupport { - content_type: String, - version: String, + pub content_type: String, + pub version: String, } /// Name: comp -- cgit v1.2.3 From 17142bd687ac2af7c325c7c3617937c56a4ca58d Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 2 Mar 2024 23:01:56 +0100 Subject: WIP encoding --- src/dav/calencoder.rs | 55 ++++++++++++++++++++++++++++++++++++++++++--------- src/dav/caltypes.rs | 29 +++++++++++++++++++-------- src/dav/encoder.rs | 8 ++++---- 3 files changed, 71 insertions(+), 21 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 73db4fa..a701fe1 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -278,8 +278,6 @@ impl QuickWritable for Violation { Self::SupportedCalendarData => atom("supported-calendar-data").await, Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, Self::SupportedCalendarComponent => atom("supported-calendar-component").await, - Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, - Self::SupportedCalendarComponent => atom("SupportedCalendarComponent").await, Self::NoUidConflict(href) => { let start = ctx.create_cal_element("no-uid-conflict"); let end = start.to_end(); @@ -391,43 +389,82 @@ impl QuickWritable for CalendarDataEmpty { impl QuickWritable for Comp { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("calendar-data"); + start.push_attribute(("name", self.name.as_str())); + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + self.prop_kind.write(xml, ctx.child()).await?; + self.comp_kind.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for CompSupport { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("comp"); + empty.push_attribute(("name", self.0.as_str())); + xml.write_event_async(Event::Empty(empty)).await } } impl QuickWritable for CompKind { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::AllComp => xml.write_event_async(Event::Empty(ctx.create_cal_element("allcomp"))).await, + Self::Comp(many_comp) => { + for comp in many_comp.iter() { + // Required: recursion in an async fn requires boxing + // rustc --explain E0733 + Box::pin(comp.write(xml, ctx.child())).await?; + } + Ok(()) + } + } } } impl QuickWritable for PropKind { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::AllProp => xml.write_event_async(Event::Empty(ctx.create_cal_element("allprop"))).await, + Self::Prop(many_prop) => { + for prop in many_prop.iter() { + prop.write(xml, ctx.child()).await?; + } + Ok(()) + } + } } } impl QuickWritable for CalProp { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("prop"); + empty.push_attribute(("name", self.name.0.as_str())); + match self.novalue { + None => (), + Some(true) => empty.push_attribute(("novalue", "yes")), + Some(false) => empty.push_attribute(("novalue", "no")), + } + xml.write_event_async(Event::Empty(empty)).await } } impl QuickWritable for RecurrenceModifier { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::Expand(exp) => exp.write(xml, ctx).await, + Self::LimitRecurrenceSet(lrs) => lrs.write(xml, ctx).await, + } } } impl QuickWritable for Expand { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("expand"); + empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + xml.write_event_async(Event::Empty(empty)).await } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 97d75c5..5668201 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -846,9 +846,9 @@ pub struct CalendarDataSupport { /// in the "urn:ietf:params:xml:ns:caldav" namespace instead of the /// "DAV:" namespace. pub struct Comp { - name: Component, - prop_kind: PropKind, - comp_kind: CompKind, + pub name: Component, + pub prop_kind: PropKind, + pub comp_kind: CompKind, } /// For SupportedCalendarComponentSet @@ -864,7 +864,7 @@ pub struct Comp { /// /// /// -pub struct CompSupport(Component); +pub struct CompSupport(pub Component); /// Name: allcomp /// @@ -932,8 +932,8 @@ pub enum PropKind { /// defined in the "urn:ietf:params:xml:ns:caldav" namespace instead /// of the "DAV:" namespace. pub struct CalProp { - name: ComponentProperty, - novalue: bool, + pub name: ComponentProperty, + pub novalue: Option, } pub enum RecurrenceModifier { @@ -981,7 +981,7 @@ pub enum RecurrenceModifier { /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -pub struct Expand(DateTime, DateTime); +pub struct Expand(pub DateTime, pub DateTime); /// CALDAV:limit-recurrence-set XML Element /// @@ -1348,11 +1348,24 @@ pub enum Component { VAlarm, Unknown(String), } +impl Component { + pub fn as_str<'a>(&'a self) -> &'a str { + match self { + Self::VCalendar => "VCALENDAR", + Self::VJournal => "VJOURNAL", + Self::VFreeBusy => "VFREEBUSY", + Self::VEvent => "VEVENT", + Self::VTodo => "VTODO", + Self::VAlarm => "VALARM", + Self::Unknown(c) => c, + } + } +} /// name="VERSION", name="SUMMARY", etc. /// Can be set on different objects: VCalendar, VEvent, etc. /// Might be replaced by an enum later -pub struct ComponentProperty(String); +pub struct ComponentProperty(pub String); /// like PARSTAT pub struct PropertyParameter(String); diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index f842734..7778d61 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -37,16 +37,16 @@ impl Context for NoExtension { } start } - async fn hook_error(&self, err: &Disabled, xml: &mut Writer) -> Result<(), QError> { + async fn hook_error(&self, _err: &Disabled, _xml: &mut Writer) -> Result<(), QError> { unreachable!(); } - async fn hook_property(&self, prop: &Disabled, xml: &mut Writer) -> Result<(), QError> { + async fn hook_property(&self, _prop: &Disabled, _xml: &mut Writer) -> Result<(), QError> { unreachable!(); } - async fn hook_propertyrequest(&self, prop: &Disabled, xml: &mut Writer) -> Result<(), QError> { + async fn hook_propertyrequest(&self, _prop: &Disabled, _xml: &mut Writer) -> Result<(), QError> { unreachable!(); } - async fn hook_resourcetype(&self, restype: &Disabled, xml: &mut Writer) -> Result<(), QError> { + async fn hook_resourcetype(&self, _restype: &Disabled, _xml: &mut Writer) -> Result<(), QError> { unreachable!(); } } -- cgit v1.2.3 From 99f8085e475f8d55ab365c71d5fbd48cf3dc94c6 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 3 Mar 2024 10:50:32 +0100 Subject: Serialize another caldav filter --- src/dav/calencoder.rs | 87 ++++++++++++++++++++++++++++++++++++++++++++------- src/dav/caltypes.rs | 40 +++++++++++------------ 2 files changed, 94 insertions(+), 33 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index a701fe1..ca314f1 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -470,61 +470,124 @@ impl QuickWritable for Expand { impl QuickWritable for LimitRecurrenceSet { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("limit-recurrence-set"); + empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + xml.write_event_async(Event::Empty(empty)).await } } impl QuickWritable for LimitFreebusySet { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("limit-freebusy-set"); + empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + xml.write_event_async(Event::Empty(empty)).await } } impl QuickWritable for CalendarSelector { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::AllProp => xml.write_event_async(Event::Empty(ctx.create_dav_element("allprop"))).await, + Self::PropName => xml.write_event_async(Event::Empty(ctx.create_dav_element("propname"))).await, + Self::Prop(prop) => prop.write(xml, ctx).await, + } } } impl QuickWritable for CompFilter { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("comp-filter"); + start.push_attribute(("name", self.name.as_str())); + + match &self.additional_rules { + None => xml.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + rules.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + } + } } } -impl QuickWritable for CompFilterInner { +impl QuickWritable for CompFilterRules { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::IsNotDefined => xml.write_event_async(Event::Empty(ctx.create_dav_element("is-not-defined"))).await, + Self::Matches(cfm) => cfm.write(xml, ctx).await, + } } } impl QuickWritable for CompFilterMatch { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + if let Some(time_range) = &self.time_range { + time_range.write(xml, ctx.child()).await?; + } + + for prop_item in self.prop_filter.iter() { + prop_item.write(xml, ctx.child()).await?; + } + for comp_item in self.comp_filter.iter() { + // Required: recursion in an async fn requires boxing + // rustc --explain E0733 + Box::pin(comp_item.write(xml, ctx.child())).await?; + } + Ok(()) } } impl QuickWritable for PropFilter { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("prop-filter"); + start.push_attribute(("name", self.name.as_str())); + + match &self.additional_rules { + None => xml.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + rules.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + } + } } } -impl QuickWritable for PropFilterInner { +impl QuickWritable for PropFilterRules { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::IsNotDefined => xml.write_event_async(Event::Empty(ctx.create_dav_element("is-not-defined"))).await, + Self::Match(prop_match) => prop_match.write(xml, ctx).await, + } } } impl QuickWritable for PropFilterMatch { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + if let Some(time_range) = &self.time_range { + time_range.write(xml, ctx.child()).await?; + } + if let Some(time_or_text) = &self.time_or_text { + time_or_text.write(xml, ctx.child()).await?; + } + for param_item in self.param_filter.iter() { + param_item.write(xml, ctx.child()).await?; + } + Ok(()) } } impl QuickWritable for TimeOrText { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::Time(time) => time.write(xml, ctx).await, + Self::Text(txt) => txt.write(xml, ctx).await, + } } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 5668201..e2ba490 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1028,7 +1028,7 @@ pub struct Expand(pub DateTime, pub DateTime); /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -pub struct LimitRecurrenceSet(DateTime, DateTime); +pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); /// Name: limit-freebusy-set /// @@ -1058,7 +1058,7 @@ pub struct LimitRecurrenceSet(DateTime, DateTime); /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -pub struct LimitFreebusySet(DateTime, DateTime); +pub struct LimitFreebusySet(pub DateTime, pub DateTime); /// Used by CalendarQuery & CalendarMultiget pub enum CalendarSelector { @@ -1116,21 +1116,20 @@ pub enum CalendarSelector { /// name value: a calendar object or calendar component /// type (e.g., VEVENT) pub struct CompFilter { - name: Component, - inner: CompFilterInner, + pub name: Component, + // Option 1 = None, Option 2, 3, 4 = Some + pub additional_rules: Option, } -pub enum CompFilterInner { - // Option 1 - Empty, +pub enum CompFilterRules { // Option 2 IsNotDefined, // Options 3 & 4 Matches(CompFilterMatch), } pub struct CompFilterMatch { - time_range: Option, - prop_filter: Vec, - comp_filter: Vec, + pub time_range: Option, + pub prop_filter: Vec, + pub comp_filter: Vec, } /// Name: prop-filter @@ -1178,21 +1177,20 @@ pub struct CompFilterMatch { /// /// name value: a calendar property name (e.g., ATTENDEE) pub struct PropFilter { - name: Component, - inner: PropFilterInner, + pub name: Component, + // None = Option 1, Some() = Option 2, 3 & 4 + pub additional_rules: Option, } -pub enum PropFilterInner { - // Option 1 - Empty, +pub enum PropFilterRules { // Option 2 IsNotDefined, // Options 3 & 4 Match(PropFilterMatch), } pub struct PropFilterMatch { - time_range: Option, - time_or_text: Option, - param_filter: Vec, + pub time_range: Option, + pub time_or_text: Option, + pub param_filter: Vec, } pub enum TimeOrText { Time(TimeRange), @@ -1228,9 +1226,9 @@ pub enum TimeOrText { /// pub struct TextMatch { - collation: Option, - negate_condition: bool, - text: String, + pub collation: Option, + pub negate_condition: Option, + pub text: String, } /// Name: param-filter -- cgit v1.2.3 From 433e1f97f6d83bc11134df36de03e47b9393582b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 3 Mar 2024 11:00:10 +0100 Subject: Param-filter encoding --- src/dav/calencoder.rs | 33 ++++++++++++++++++++++++++++++--- src/dav/caltypes.rs | 20 +++++++++++++++++--- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index ca314f1..503d48b 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -593,19 +593,46 @@ impl QuickWritable for TimeOrText { impl QuickWritable for TextMatch { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("text-match"); + if let Some(collation) = &self.collation { + start.push_attribute(("collation", collation.as_str())); + } + match self.negate_condition { + None => (), + Some(true) => start.push_attribute(("negate-condition", "yes")), + Some(false) => start.push_attribute(("negate-condition", "no")), + } + let end = start.to_end(); + + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for ParamFilter { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut start = ctx.create_cal_element("param-filter"); + start.push_attribute(("name", self.name.as_str())); + + match &self.additional_rules { + None => xml.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + rules.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + } + } } } impl QuickWritable for ParamFilterMatch { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + match self { + Self::IsNotDefined => xml.write_event_async(Event::Empty(ctx.create_dav_element("is-not-defined"))).await, + Self::Match(tm) => tm.write(xml, ctx).await, + } } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index e2ba490..5d30600 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -778,6 +778,15 @@ pub enum Collation { Octet, Unknown(String), } +impl Collation { + pub fn as_str<'a>(&'a self) -> &'a str { + match self { + Self::AsciiCaseMap => "i;ascii-casemap", + Self::Octet => "i;octet", + Self::Unknown(c) => c.as_str(), + } + } +} /// @@ -1261,8 +1270,8 @@ pub struct TextMatch { /// /// name value: a property parameter name (e.g., PARTSTAT) pub struct ParamFilter { - name: PropertyParameter, - inner: Option, + pub name: PropertyParameter, + pub additional_rules: Option, } pub enum ParamFilterMatch { IsNotDefined, @@ -1366,7 +1375,12 @@ impl Component { pub struct ComponentProperty(pub String); /// like PARSTAT -pub struct PropertyParameter(String); +pub struct PropertyParameter(pub String); +impl PropertyParameter { + pub fn as_str<'a>(&'a self) -> &'a str { + self.0.as_str() + } +} /// Name: time-range /// -- cgit v1.2.3 From 463be750e1b053f7a93eff9e91fc52e962ef3f18 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 3 Mar 2024 11:08:00 +0100 Subject: CalEncoder should be fully implemented now --- src/dav/calencoder.rs | 41 ++++++++++++++++--------------- src/dav/caltypes.rs | 68 ++++++++++++++++++++++++++------------------------- 2 files changed, 56 insertions(+), 53 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 503d48b..05d0454 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -638,37 +638,38 @@ impl QuickWritable for ParamFilterMatch { impl QuickWritable for TimeZone { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); - } -} - -impl QuickWritable for Filter { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); - } -} + let mut start = ctx.create_cal_element("timezone"); + let end = start.to_end(); -impl QuickWritable for Component { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + xml.write_event_async(Event::Start(start.clone())).await?; + xml.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; + xml.write_event_async(Event::End(end)).await } } -impl QuickWritable for ComponentProperty { +impl QuickWritable for Filter { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); - } -} + let mut start = ctx.create_cal_element("filter"); + let end = start.to_end(); -impl QuickWritable for PropertyParameter { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + xml.write_event_async(Event::Start(start.clone())).await?; + self.0.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await } } impl QuickWritable for TimeRange { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - unimplemented!(); + let mut empty = ctx.create_cal_element("time-range"); + match self { + Self::OnlyStart(start) => empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())), + Self::OnlyEnd(end) => empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())), + Self::FullRange(start, end) => { + empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); + } + } + xml.write_event_async(Event::Empty(empty)).await } } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 5d30600..9f4dfd6 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -771,23 +771,6 @@ pub enum Violation { /// server MUST respond with a CALDAV:supported-collation precondition /// error response. pub struct SupportedCollation(pub Collation); -#[derive(Default)] -pub enum Collation { - #[default] - AsciiCaseMap, - Octet, - Unknown(String), -} -impl Collation { - pub fn as_str<'a>(&'a self) -> &'a str { - match self { - Self::AsciiCaseMap => "i;ascii-casemap", - Self::Octet => "i;octet", - Self::Unknown(c) => c.as_str(), - } - } -} - /// /// PCDATA value: iCalendar object @@ -1328,7 +1311,7 @@ pub enum ParamFilterMatch { /// /// /// PCDATA value: an iCalendar object with exactly one VTIMEZONE -pub struct TimeZone(String); +pub struct TimeZone(pub String); /// Name: filter /// @@ -1343,7 +1326,24 @@ pub struct TimeZone(String); /// /// Definition: /// -pub struct Filter(CompFilter); +pub struct Filter(pub CompFilter); + +/// Name: time-range +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +pub enum TimeRange { + OnlyStart(DateTime), + OnlyEnd(DateTime), + FullRange(DateTime, DateTime), +} + +// ----------------------- ENUM ATTRIBUTES --------------------- /// Known components pub enum Component { @@ -1355,7 +1355,7 @@ pub enum Component { VAlarm, Unknown(String), } -impl Component { +impl Component { pub fn as_str<'a>(&'a self) -> &'a str { match self { Self::VCalendar => "VCALENDAR", @@ -1382,17 +1382,19 @@ impl PropertyParameter { } } -/// Name: time-range -/// -/// Definition: -/// -/// -/// -/// start value: an iCalendar "date with UTC time" -/// end value: an iCalendar "date with UTC time" -pub enum TimeRange { - OnlyStart(DateTime), - OnlyEnd(DateTime), - FullRange(DateTime, DateTime), +#[derive(Default)] +pub enum Collation { + #[default] + AsciiCaseMap, + Octet, + Unknown(String), +} +impl Collation { + pub fn as_str<'a>(&'a self) -> &'a str { + match self { + Self::AsciiCaseMap => "i;ascii-casemap", + Self::Octet => "i;octet", + Self::Unknown(c) => c.as_str(), + } + } } -- cgit v1.2.3 From 4276090314207f36a7608976e7271682157ae41d Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 3 Mar 2024 11:26:32 +0100 Subject: WIP testing --- src/dav/calencoder.rs | 75 ++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 65 insertions(+), 10 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 05d0454..f5f4924 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -677,27 +677,82 @@ impl QuickWritable for TimeRange { #[cfg(test)] mod tests { use super::*; - use crate::dav::types::{Error, Violation as DavViolation}; + use crate::dav::types as dav; use tokio::io::AsyncWriteExt; - #[tokio::test] - async fn test_violation() { + async fn serialize>(ctx: C, elem: &Q) -> String { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + elem.write(&mut writer, ctx).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - let res = Error(vec![ - DavViolation::Extension(Violation::ResourceMustBeNull), - ]); + return got.into() + } - res.write(&mut writer, CalExtension { root: true }).await.expect("xml serialization"); - tokio_buffer.flush().await.expect("tokio buffer flush"); + #[tokio::test] + async fn basic_violation() { + let got = serialize( + CalExtension { root: true }, + &dav::Error(vec![ + dav::Violation::Extension(Violation::ResourceMustBeNull), + ]) + ).await; let expected = r#" "#; - let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - assert_eq!(got, expected); + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_calendar_query1() { + let got = serialize( + CalExtension { root: true }, + &CalendarQuery { + selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + ]))), + filter: Filter(CompFilter { + name: Component::VCalendar, + additional_rules: None, + }), + timezone: None, + } + ).await; + + let expected = r#" + + + + + + + + + + + + + + + + + + + + + + + + + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } } -- cgit v1.2.3 From e127ebeaa9fc19312a03676ab24a39ac9bc02def Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 3 Mar 2024 13:07:22 +0100 Subject: Still testing CalDAV --- src/dav/calencoder.rs | 31 ++++++++++++++++++++++++++++++- src/dav/caltypes.rs | 2 ++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index f5f4924..9711f5f 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -389,7 +389,7 @@ impl QuickWritable for CalendarDataEmpty { impl QuickWritable for Comp { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("calendar-data"); + let mut start = ctx.create_cal_element("comp"); start.push_attribute(("name", self.name.as_str())); let end = start.to_end(); xml.write_event_async(Event::Start(start.clone())).await?; @@ -713,6 +713,35 @@ mod tests { CalExtension { root: true }, &CalendarQuery { selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { + mime: None, + comp: Some(Comp { + name: Component::VCalendar, + prop_kind: PropKind::Prop(vec![ + CalProp { + name: ComponentProperty("VERSION".into()), + novalue: None, + } + ]), + comp_kind: CompKind::Comp(vec![ + Comp { + name: Component::VEvent, + comp_kind: CompKind::Comp(vec![]), + prop_kind: PropKind::Prop(vec![ + CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, + ]), + }, + Comp { + name: Component::VTimeZone, + prop_kind: PropKind::Prop(vec![]), + comp_kind: CompKind::Comp(vec![]), + } + ]), + }), + recurrence: None, + limit_freebusy_set: None, + })), ]))), filter: Filter(CompFilter { name: Component::VCalendar, diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 9f4dfd6..d2e6169 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1353,6 +1353,7 @@ pub enum Component { VEvent, VTodo, VAlarm, + VTimeZone, Unknown(String), } impl Component { @@ -1364,6 +1365,7 @@ impl Component { Self::VEvent => "VEVENT", Self::VTodo => "VTODO", Self::VAlarm => "VALARM", + Self::VTimeZone => "VTIMEZONE", Self::Unknown(c) => c, } } -- cgit v1.2.3 From 352814aec9c44129c2fe917fa8dfa34adb3e8c78 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 4 Mar 2024 09:02:24 +0100 Subject: caldav encoding test passing --- src/dav/calencoder.rs | 86 +++++++++++++++++++++++++++++++++++---------------- src/dav/caltypes.rs | 3 ++ 2 files changed, 63 insertions(+), 26 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 9711f5f..12a6939 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -391,11 +391,16 @@ impl QuickWritable for Comp { async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { let mut start = ctx.create_cal_element("comp"); start.push_attribute(("name", self.name.as_str())); - let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.prop_kind.write(xml, ctx.child()).await?; - self.comp_kind.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + match &self.additional_rules { + None => xml.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + xml.write_event_async(Event::Start(start.clone())).await?; + rules.prop_kind.write(xml, ctx.child()).await?; + rules.comp_kind.write(xml, ctx.child()).await?; + xml.write_event_async(Event::End(end)).await + }, + } } } @@ -679,6 +684,7 @@ mod tests { use super::*; use crate::dav::types as dav; use tokio::io::AsyncWriteExt; + use chrono::{Utc,TimeZone,DateTime}; async fn serialize>(ctx: C, elem: &Q) -> String { let mut buffer = Vec::new(); @@ -718,26 +724,38 @@ mod tests { mime: None, comp: Some(Comp { name: Component::VCalendar, - prop_kind: PropKind::Prop(vec![ - CalProp { - name: ComponentProperty("VERSION".into()), - novalue: None, - } - ]), - comp_kind: CompKind::Comp(vec![ - Comp { - name: Component::VEvent, - comp_kind: CompKind::Comp(vec![]), - prop_kind: PropKind::Prop(vec![ - CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, - ]), - }, - Comp { - name: Component::VTimeZone, - prop_kind: PropKind::Prop(vec![]), - comp_kind: CompKind::Comp(vec![]), - } - ]), + additional_rules: Some(CompInner { + prop_kind: PropKind::Prop(vec![ + CalProp { + name: ComponentProperty("VERSION".into()), + novalue: None, + } + ]), + comp_kind: CompKind::Comp(vec![ + Comp { + name: Component::VEvent, + additional_rules: Some(CompInner { + prop_kind: PropKind::Prop(vec![ + CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, + CalProp { name: ComponentProperty("UID".into()), novalue: None }, + CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, + CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, + CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, + CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, + ]), + comp_kind: CompKind::Comp(vec![]), + }), + }, + Comp { + name: Component::VTimeZone, + additional_rules: None, + } + ]), + }), }), recurrence: None, limit_freebusy_set: None, @@ -745,7 +763,23 @@ mod tests { ]))), filter: Filter(CompFilter { name: Component::VCalendar, - additional_rules: None, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: None, + prop_filter: vec![], + comp_filter: vec![ + CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006,1,4,0,0,0).unwrap(), + Utc.with_ymd_and_hms(2006,1,5,0,0,0).unwrap(), + )), + prop_filter: vec![], + comp_filter: vec![], + })), + }, + ], + })), }), timezone: None, } diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index d2e6169..7131ec2 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -839,6 +839,9 @@ pub struct CalendarDataSupport { /// "DAV:" namespace. pub struct Comp { pub name: Component, + pub additional_rules: Option, +} +pub struct CompInner { pub prop_kind: PropKind, pub comp_kind: CompKind, } -- cgit v1.2.3 From 4d3d1c8c19f8819e4cf14a6062ef1c4cc91b8749 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 4 Mar 2024 09:29:03 +0100 Subject: Add new caldav test from RFC --- src/dav/calencoder.rs | 76 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 75 insertions(+), 1 deletion(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 12a6939..db7ece4 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -714,7 +714,7 @@ mod tests { } #[tokio::test] - async fn rfc_calendar_query1() { + async fn rfc_calendar_query1_req() { let got = serialize( CalExtension { root: true }, &CalendarQuery { @@ -816,6 +816,80 @@ mod tests { "#; + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_calendar_query1_res() { + let got = serialize( + CalExtension { root: true }, + &dav::Multistatus { + responses: vec![ + dav::Response { + href: dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), + status_or_propstat: dav::StatusOrPropstat::PropStat(vec![dav::PropStat { + prop: dav::AnyProp::Value(dav::PropValue(vec![ + dav::Property::GetEtag("\"fffff-abcd2\"".into()), + dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + mime: None, + payload: "PLACEHOLDER".into() + })), + ])), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }]), + location: None, + error: None, + responsedescription: None, + }, + dav::Response { + href: dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), + status_or_propstat: dav::StatusOrPropstat::PropStat(vec![dav::PropStat { + prop: dav::AnyProp::Value(dav::PropValue(vec![ + dav::Property::GetEtag("\"fffff-abcd3\"".into()), + dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ + mime: None, + payload: "PLACEHOLDER".into(), + })), + ])), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }]), + location: None, + error: None, + responsedescription: None, + }, + ], + responsedescription: None, + }, + ).await; + + let expected = r#" + + http://cal.example.com/bernard/work/abcd2.ics + + + "fffff-abcd2" + PLACEHOLDER + + HTTP/1.1 200 OK + + + + http://cal.example.com/bernard/work/abcd3.ics + + + "fffff-abcd3" + PLACEHOLDER + + HTTP/1.1 200 OK + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } } -- cgit v1.2.3 From ad25912a0fc85dab97207b101aa7a5b6efca71b3 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 4 Mar 2024 13:36:41 +0100 Subject: Before refactoring the reader --- src/dav/decoder.rs | 140 +++++++++++++++++++++++++++++++++++++++++++++++++++++ src/dav/mod.rs | 1 + 2 files changed, 141 insertions(+) create mode 100644 src/dav/decoder.rs diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs new file mode 100644 index 0000000..5996a05 --- /dev/null +++ b/src/dav/decoder.rs @@ -0,0 +1,140 @@ +use std::borrow::Cow; + +use im::HashMap; +use quick_xml::events::{BytesStart, BytesText}; +use quick_xml::events::attributes::AttrError; +use quick_xml::name::PrefixDeclaration; +use quick_xml::reader::Reader; +use tokio::io::AsyncBufRead; + +use super::types::*; + +pub enum ParsingError { + NamespacePrefixAlreadyUsed, + WrongToken, + QuickXml(quick_xml::Error) +} +impl From for ParsingError { + fn from(value: AttrError) -> Self { + Self::QuickXml(value.into()) + } +} +impl From for ParsingError { + fn from(value: quick_xml::Error) -> Self { + Self::QuickXml(value) + } +} + +const DAV_URN: &[u8] = b"DAV:"; +const CALDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; +const CARDDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; +const XML_URN: &[u8] = b"xml"; + +#[derive(PartialEq, Clone)] +pub enum XmlNamespace { + None, + Dav, + CalDav, + CardDav, + Xml, + Unknown(Vec), +} +impl From<&[u8]> for XmlNamespace { + fn from(value: &[u8]) -> Self { + match value { + [] => Self::None, + DAV_URN => Self::Dav, + CALDAV_URN => Self::CalDav, + CARDDAV_URN => Self::CardDav, + XML_URN => Self::Xml, + v => Self::Unknown(v.into()), + } + } +} + +/// Context must stay cheap to clone +/// as we are cloning it from one fonction to another +#[derive(Clone)] +pub struct Context<'a, E: Extension + Clone> { + pub aliases: HashMap<&'a [u8], XmlNamespace>, + phantom: std::marker::PhantomData, +} +impl<'a, E: Extension + Clone> Context<'a, E> { + /// External buffer + pub fn new() -> Self { + Self { + aliases: HashMap::new(), + phantom: std::marker::PhantomData + } + } + + pub fn ns_scan(&mut self, token: &'a BytesStart<'a>) -> Result<(XmlNamespace, &[u8]), ParsingError> { + // Register namespace aliases from attributes (aka namespace bindings) + for attr_res in token.attributes() { + let attr = attr_res?; + match attr.key.as_namespace_binding() { + None => (), + Some(PrefixDeclaration::Named(prefix)) => self.ns_alias(attr.value.as_ref(), prefix.as_ref())?, + Some(PrefixDeclaration::Default) => self.ns_default(attr.value.as_ref())?, + } + } + + // Decompose tag name + let (key, maybe_prefix) = token.name().decompose(); + let ns = self.ns_resolve(maybe_prefix.map(|p| p.into_inner()).unwrap_or(&b""[..])); + + Ok((ns, key.into_inner())) + } + + fn ns_default(&mut self, fqns: &[u8]) -> Result<(), ParsingError> { + self.ns_alias(fqns, &b""[..]) + } + + fn ns_alias(&mut self, fqns: &[u8], alias: &'a [u8]) -> Result<(), ParsingError> { + let parsed_ns = XmlNamespace::from(fqns); + if let Some(reg_fqns) = self.aliases.get(alias) { + if *reg_fqns != parsed_ns { + return Err(ParsingError::NamespacePrefixAlreadyUsed) + } + } + self.aliases.insert(alias, parsed_ns); + Ok(()) + } + + // If the namespace is not found in the alias table (binding table) + // we suppose it's a fully qualified namespace (fqns) + fn ns_resolve(&self, prefix: &[u8]) -> XmlNamespace { + match self.aliases.get(prefix) { + Some(fqns) => fqns.clone(), + None => XmlNamespace::from(prefix), + } + } +} + +trait DavReader<'a> { + async fn doctype(&self) -> Result<(), ParsingError>; + async fn tag(&self) -> Result, ParsingError>; + async fn txt(&self) -> Result, ParsingError>; +} +/*impl<'a, I: AsyncBufRead+Unpin> DavReader<'a> for Reader { + async fn doctype(&self) -> Result<(), ParsingError> { + } + async fn tag(&self) -> Result, ParsingError> { + } + async fn txt(&self) -> Result, ParsingError> { + } +}*/ + +pub async fn propfind( + xml: &mut Reader, + ctx: Context<'_, E>, + buf: &mut Vec, +) -> Result, ParsingError> { + let local = ctx.clone(); + + match xml.read_event_into_async(buf).await? { + _ => unimplemented!(), + } + + unimplemented!(); +} diff --git a/src/dav/mod.rs b/src/dav/mod.rs index 4044895..835544b 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -4,6 +4,7 @@ mod acltypes; mod versioningtypes; mod encoder; mod calencoder; +mod decoder; use std::net::SocketAddr; -- cgit v1.2.3 From c9edf6c37c2b2758f0407caa48d7434af7b5a659 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 4 Mar 2024 17:55:48 +0100 Subject: beginning... --- src/dav/decoder.rs | 173 +++++++++++++++++++++++++---------------------------- src/dav/types.rs | 3 +- 2 files changed, 82 insertions(+), 94 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 5996a05..f6cbd27 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -1,17 +1,18 @@ use std::borrow::Cow; -use im::HashMap; -use quick_xml::events::{BytesStart, BytesText}; +use quick_xml::events::{Event, BytesStart, BytesDecl, BytesText}; use quick_xml::events::attributes::AttrError; -use quick_xml::name::PrefixDeclaration; -use quick_xml::reader::Reader; +use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; +use quick_xml::reader::NsReader; use tokio::io::AsyncBufRead; use super::types::*; +#[derive(Debug)] pub enum ParsingError { NamespacePrefixAlreadyUsed, WrongToken, + TagNotFound, QuickXml(quick_xml::Error) } impl From for ParsingError { @@ -29,112 +30,98 @@ const DAV_URN: &[u8] = b"DAV:"; const CALDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; const CARDDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; const XML_URN: &[u8] = b"xml"; +const DAV_NS: ResolveResult = Bound(Namespace(DAV_URN)); -#[derive(PartialEq, Clone)] -pub enum XmlNamespace { - None, - Dav, - CalDav, - CardDav, - Xml, - Unknown(Vec), +pub struct PeekRead { + evt: Event<'static>, + rdr: NsReader, + buf: Vec, } -impl From<&[u8]> for XmlNamespace { - fn from(value: &[u8]) -> Self { - match value { - [] => Self::None, - DAV_URN => Self::Dav, - CALDAV_URN => Self::CalDav, - CARDDAV_URN => Self::CardDav, - XML_URN => Self::Xml, - v => Self::Unknown(v.into()), - } +impl PeekRead { + async fn new(mut rdr: NsReader) -> Result { + let mut buf: Vec = vec![]; + let evt = rdr.read_event_into_async(&mut buf).await?.into_owned(); + buf.clear(); + Ok(Self { evt, rdr, buf }) } -} -/// Context must stay cheap to clone -/// as we are cloning it from one fonction to another -#[derive(Clone)] -pub struct Context<'a, E: Extension + Clone> { - pub aliases: HashMap<&'a [u8], XmlNamespace>, - phantom: std::marker::PhantomData, -} -impl<'a, E: Extension + Clone> Context<'a, E> { - /// External buffer - pub fn new() -> Self { - Self { - aliases: HashMap::new(), - phantom: std::marker::PhantomData - } + fn peek(&self) -> &Event<'static> { + &self.evt } - - pub fn ns_scan(&mut self, token: &'a BytesStart<'a>) -> Result<(XmlNamespace, &[u8]), ParsingError> { - // Register namespace aliases from attributes (aka namespace bindings) - for attr_res in token.attributes() { - let attr = attr_res?; - match attr.key.as_namespace_binding() { - None => (), - Some(PrefixDeclaration::Named(prefix)) => self.ns_alias(attr.value.as_ref(), prefix.as_ref())?, - Some(PrefixDeclaration::Default) => self.ns_default(attr.value.as_ref())?, - } + // skip tag, some tags can't be skipped like end, text, cdata + async fn skip(&mut self) -> Result<(), ParsingError> { + match &self.evt { + Event::Start(b) => { + let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; + self.next().await + }, + Event::Empty(_) | Event::Comment(_) | Event::PI(_) | Event::Decl(_) | Event::DocType(_) => self.next().await, + _ => return Err(ParsingError::WrongToken), } - - // Decompose tag name - let (key, maybe_prefix) = token.name().decompose(); - let ns = self.ns_resolve(maybe_prefix.map(|p| p.into_inner()).unwrap_or(&b""[..])); - - Ok((ns, key.into_inner())) } - fn ns_default(&mut self, fqns: &[u8]) -> Result<(), ParsingError> { - self.ns_alias(fqns, &b""[..]) + // read one more tag + async fn next(&mut self) -> Result<(), ParsingError> { + let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); + self.buf.clear(); + self.evt = evt; + Ok(()) } +} + +pub trait QReadable: Sized { + async fn read(xml: &mut PeekRead) -> Result; +} - fn ns_alias(&mut self, fqns: &[u8], alias: &'a [u8]) -> Result<(), ParsingError> { - let parsed_ns = XmlNamespace::from(fqns); - if let Some(reg_fqns) = self.aliases.get(alias) { - if *reg_fqns != parsed_ns { - return Err(ParsingError::NamespacePrefixAlreadyUsed) +impl QReadable for PropFind { + async fn read(xml: &mut PeekRead) -> Result, ParsingError> { + + // Find propfind + loop { + match xml.peek() { + Event::Start(b) if b.local_name().into_inner() == &b"propfind"[..] => break, + _ => xml.skip().await?, } } - self.aliases.insert(alias, parsed_ns); - Ok(()) - } - - // If the namespace is not found in the alias table (binding table) - // we suppose it's a fully qualified namespace (fqns) - fn ns_resolve(&self, prefix: &[u8]) -> XmlNamespace { - match self.aliases.get(prefix) { - Some(fqns) => fqns.clone(), - None => XmlNamespace::from(prefix), + xml.next().await?; + + // Find any tag + let propfind = loop { + match xml.peek() { + Event::Start(b) | Event::Empty(b) if b.local_name().into_inner() == &b"allprop"[..] => { + unimplemented!() + }, + Event::Start(b) if b.local_name().into_inner() == &b"prop"[..] => { + unimplemented!(); + }, + Event::Empty(b) if b.local_name().into_inner() == &b"propname"[..] => break PropFind::PropName, + _ => xml.skip().await?, + } + }; + xml.next().await?; + + // Close tag + loop { + match xml.peek() { + Event::End(b) if b.local_name().into_inner() == &b"propfind"[..] => break, + _ => xml.skip().await?, + } } + + Ok(propfind) } } -trait DavReader<'a> { - async fn doctype(&self) -> Result<(), ParsingError>; - async fn tag(&self) -> Result, ParsingError>; - async fn txt(&self) -> Result, ParsingError>; -} -/*impl<'a, I: AsyncBufRead+Unpin> DavReader<'a> for Reader { - async fn doctype(&self) -> Result<(), ParsingError> { - } - async fn tag(&self) -> Result, ParsingError> { - } - async fn txt(&self) -> Result, ParsingError> { - } -}*/ +#[cfg(test)] +mod tests { + use super::*; -pub async fn propfind( - xml: &mut Reader, - ctx: Context<'_, E>, - buf: &mut Vec, -) -> Result, ParsingError> { - let local = ctx.clone(); + #[tokio::test] + async fn basic_propfind() { + let src = r#""#; - match xml.read_event_into_async(buf).await? { - _ => unimplemented!(), + let mut rdr = PeekRead::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = PropFind::::read(&mut rdr).await.unwrap(); + assert!(matches!(got, PropFind::PropName)); } - - unimplemented!(); } diff --git a/src/dav/types.rs b/src/dav/types.rs index 7e3eb1c..a1b1c7f 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -20,7 +20,8 @@ impl Extension for NoExtension { type Property = Disabled; type PropertyRequest = Disabled; type ResourceType = Disabled; - } +} + /// 14.1. activelock XML Element /// -- cgit v1.2.3 From b7a990ecdb0346e12cfeb0de34565f51af61ec80 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 4 Mar 2024 22:27:37 +0100 Subject: Decoder is starting to work --- src/dav/decoder.rs | 166 ++++++++++++++++++++++++++++++++++++++++++----------- src/main.rs | 1 + 2 files changed, 132 insertions(+), 35 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index f6cbd27..719ea8c 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -1,4 +1,5 @@ use std::borrow::Cow; +use std::future::Future; use quick_xml::events::{Event, BytesStart, BytesDecl, BytesText}; use quick_xml::events::attributes::AttrError; @@ -13,7 +14,8 @@ pub enum ParsingError { NamespacePrefixAlreadyUsed, WrongToken, TagNotFound, - QuickXml(quick_xml::Error) + QuickXml(quick_xml::Error), + Eof } impl From for ParsingError { fn from(value: AttrError) -> Self { @@ -29,15 +31,16 @@ impl From for ParsingError { const DAV_URN: &[u8] = b"DAV:"; const CALDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; const CARDDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; -const XML_URN: &[u8] = b"xml"; -const DAV_NS: ResolveResult = Bound(Namespace(DAV_URN)); +//const XML_URN: &[u8] = b"xml"; -pub struct PeekRead { +trait Reader = AsyncBufRead+Unpin+'static; + +pub struct PeekRead { evt: Event<'static>, rdr: NsReader, buf: Vec, } -impl PeekRead { +impl PeekRead { async fn new(mut rdr: NsReader) -> Result { let mut buf: Vec = vec![]; let evt = rdr.read_event_into_async(&mut buf).await?.into_owned(); @@ -48,67 +51,154 @@ impl PeekRead { fn peek(&self) -> &Event<'static> { &self.evt } - // skip tag, some tags can't be skipped like end, text, cdata - async fn skip(&mut self) -> Result<(), ParsingError> { + + /// skip tag. Can't skip end, can't skip eof. + async fn skip(&mut self) -> Result, ParsingError> { match &self.evt { Event::Start(b) => { let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; self.next().await }, - Event::Empty(_) | Event::Comment(_) | Event::PI(_) | Event::Decl(_) | Event::DocType(_) => self.next().await, - _ => return Err(ParsingError::WrongToken), + Event::End(_) => Err(ParsingError::WrongToken), + Event::Eof => Err(ParsingError::Eof), + _ => self.next().await, } } - // read one more tag - async fn next(&mut self) -> Result<(), ParsingError> { + /// read one more tag + async fn next(&mut self) -> Result, ParsingError> { let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); self.buf.clear(); - self.evt = evt; - Ok(()) + let old_evt = std::mem::replace(&mut self.evt, evt); + Ok(old_evt) + } + + + /// check if this is the desired tag + fn is_tag(&self, ns: &[u8], key: &str) -> bool { + let qname = match self.peek() { + Event::Start(bs) | Event::Empty(bs) => bs.name(), + Event::End(be) => be.name(), + _ => return false, + }; + + let (extr_ns, local) = self.rdr.resolve_element(qname); + + if local.into_inner() != key.as_bytes() { + return false + } + + match extr_ns { + ResolveResult::Bound(v) => v.into_inner() == ns, + _ => false, + } + } + + /// find start tag + async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + loop { + match self.peek() { + Event::Start(b) if self.is_tag(ns, key) => break, + _ => { self.skip().await?; }, + } + } + self.next().await + } + + // find stop tag + async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + loop { + match self.peek() { + Event::End(b) if self.is_tag(ns, key) => break, + _ => { self.skip().await?; }, + } + } + self.next().await } } -pub trait QReadable: Sized { +pub trait QReadable: Sized { async fn read(xml: &mut PeekRead) -> Result; } -impl QReadable for PropFind { +impl QReadable for PropFind { async fn read(xml: &mut PeekRead) -> Result, ParsingError> { - // Find propfind - loop { - match xml.peek() { - Event::Start(b) if b.local_name().into_inner() == &b"propfind"[..] => break, - _ => xml.skip().await?, - } - } - xml.next().await?; + xml.tag_start(DAV_URN, "propfind").await?; // Find any tag - let propfind = loop { + let propfind: PropFind = loop { match xml.peek() { - Event::Start(b) | Event::Empty(b) if b.local_name().into_inner() == &b"allprop"[..] => { - unimplemented!() + Event::Start(_) if xml.is_tag(DAV_URN, "allprop") => { + xml.tag_start(DAV_URN, "allprop").await?; + let r = PropFind::AllProp(Some(Include::read(xml).await?)); + xml.tag_stop(DAV_URN, "allprop").await?; + break r }, - Event::Start(b) if b.local_name().into_inner() == &b"prop"[..] => { - unimplemented!(); + Event::Start(_) if xml.is_tag(DAV_URN, "prop") => { + xml.tag_start(DAV_URN, "prop").await?; + let r = PropFind::Prop(PropName::read(xml).await?); + xml.tag_stop(DAV_URN, "prop").await?; + break r }, - Event::Empty(b) if b.local_name().into_inner() == &b"propname"[..] => break PropFind::PropName, - _ => xml.skip().await?, + Event::Empty(_) if xml.is_tag(DAV_URN, "allprop") => { + xml.next().await?; + break PropFind::AllProp(None) + }, + Event::Empty(_) if xml.is_tag(DAV_URN, "propname") => { + xml.next().await?; + break PropFind::PropName + }, + _ => { xml.skip().await?; }, } }; - xml.next().await?; // Close tag + xml.tag_stop(DAV_URN, "propfind").await?; + + Ok(propfind) + } +} + + +impl QReadable for Include { + async fn read(xml: &mut PeekRead) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "include").await?; + let mut acc: Vec> = Vec::new(); loop { match xml.peek() { - Event::End(b) if b.local_name().into_inner() == &b"propfind"[..] => break, - _ => xml.skip().await?, + Event::Start(_) => acc.push(PropertyRequest::read(xml).await?), + Event::End(_) if xml.is_tag(DAV_URN, "include") => break, + _ => { xml.skip().await?; }, } } + xml.tag_stop(DAV_URN, "include").await?; + Ok(Include(acc)) + } +} - Ok(propfind) +impl QReadable for PropName { + async fn read(xml: &mut PeekRead) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "prop").await?; + let mut acc: Vec> = Vec::new(); + loop { + match xml.peek() { + Event::Start(_) => acc.push(PropertyRequest::read(xml).await?), + Event::End(_) if xml.is_tag(DAV_URN, "prop") => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "prop").await?; + Ok(PropName(acc)) + } +} + +impl QReadable for PropertyRequest { + async fn read(xml: &mut PeekRead) -> Result, ParsingError> { + /*match xml.peek() { + + }*/ + unimplemented!(); } } @@ -118,7 +208,13 @@ mod tests { #[tokio::test] async fn basic_propfind() { - let src = r#""#; + let src = r#" + + + + + +"#; let mut rdr = PeekRead::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let got = PropFind::::read(&mut rdr).await.unwrap(); diff --git a/src/main.rs b/src/main.rs index c9ce42d..e098d44 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ #![feature(type_alias_impl_trait)] #![feature(async_fn_in_trait)] #![feature(async_closure)] +#![feature(trait_alias)] mod auth; mod bayou; -- cgit v1.2.3 From 8e5d8a8aaa7bde2357ca70542a88f744005d58ba Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 5 Mar 2024 16:07:47 +0100 Subject: Refactor encoder+decoder WIP (compile) --- src/dav/calencoder.rs | 2 + src/dav/caltypes.rs | 32 +-- src/dav/decoder.rs | 132 ++++++++--- src/dav/encoder.rs | 616 +++++++++++++++++++++++-------------------------- src/dav/error.rs | 20 ++ src/dav/mod.rs | 3 + src/dav/realization.rs | 41 ++++ src/dav/types.rs | 138 ++++++----- src/dav/xml.rs | 128 ++++++++++ 9 files changed, 677 insertions(+), 435 deletions(-) create mode 100644 src/dav/error.rs create mode 100644 src/dav/realization.rs create mode 100644 src/dav/xml.rs diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index db7ece4..80b8656 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -1,3 +1,4 @@ +/* use super::encoder::{QuickWritable, Context}; use super::caltypes::*; use super::types::Extension; @@ -893,3 +894,4 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } } +*/ diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 7131ec2..c8177e9 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1,7 +1,8 @@ #![allow(dead_code)] +/* use chrono::{DateTime,Utc}; -use super::types as Dav; +use super::types as dav; //@FIXME ACL (rfc3744) is missing, required //@FIXME Versioning (rfc3253) is missing, required @@ -11,15 +12,6 @@ use super::types as Dav; // For reference, non-official extensions documented by SabreDAV: // https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions -pub struct CalExtension { - pub root: bool -} -impl Dav::Extension for CalExtension { - type Error = Violation; - type Property = Property; - type PropertyRequest = PropertyRequest; - type ResourceType = ResourceType; -} // ----- Root elements ----- @@ -35,7 +27,7 @@ impl Dav::Extension for CalExtension { /// instruction in Section 12.13.2 of [RFC2518]. /// /// -pub struct MkCalendar(pub Dav::Set); +pub struct MkCalendar(pub dav::Set); /// If a response body for a successful request is included, it MUST @@ -51,7 +43,7 @@ pub struct MkCalendar(pub Dav::Set); /// Definition: /// /// -pub struct MkCalendarResponse(pub Vec>); +pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- @@ -68,8 +60,8 @@ pub struct MkCalendarResponse(pub Vec>); /// -pub struct CalendarQuery { - pub selector: Option>, +pub struct CalendarQuery { + pub selector: Option>, pub filter: Filter, pub timezone: Option, } @@ -88,9 +80,9 @@ pub struct CalendarQuery { /// -pub struct CalendarMultiget { - pub selector: Option>, - pub href: Vec, +pub struct CalendarMultiget { + pub selector: Option>, + pub href: Vec, } /// Name: free-busy-query @@ -1056,10 +1048,10 @@ pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); pub struct LimitFreebusySet(pub DateTime, pub DateTime); /// Used by CalendarQuery & CalendarMultiget -pub enum CalendarSelector { +pub enum CalendarSelector { AllProp, PropName, - Prop(Dav::PropName), + Prop(dav::PropName), } /// Name: comp-filter @@ -1402,4 +1394,4 @@ impl Collation { Self::Unknown(c) => c.as_str(), } } -} +}*/ diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 719ea8c..1756464 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -8,33 +8,32 @@ use quick_xml::reader::NsReader; use tokio::io::AsyncBufRead; use super::types::*; +use super::error::*; -#[derive(Debug)] -pub enum ParsingError { - NamespacePrefixAlreadyUsed, - WrongToken, - TagNotFound, - QuickXml(quick_xml::Error), - Eof +/* +// --- Traits ---- + +trait Reader = AsyncBufRead+Unpin+'static; + +trait Decodable: Extension { + async fn decode_propreq(xml: &mut PeekRead) -> Result, ParsingError>; } -impl From for ParsingError { - fn from(value: AttrError) -> Self { - Self::QuickXml(value.into()) +impl Decodable for NoExtension { + async fn decode_propreq(xml: &mut PeekRead) -> Result, ParsingError> { + Ok(None) } } -impl From for ParsingError { - fn from(value: quick_xml::Error) -> Self { - Self::QuickXml(value) - } + +pub trait QReadable: Sized { + async fn read(xml: &mut PeekRead) -> Result; } +// --- Peek read with namespaces + const DAV_URN: &[u8] = b"DAV:"; const CALDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; const CARDDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; //const XML_URN: &[u8] = b"xml"; - -trait Reader = AsyncBufRead+Unpin+'static; - pub struct PeekRead { evt: Event<'static>, rdr: NsReader, @@ -117,11 +116,9 @@ impl PeekRead { } } -pub trait QReadable: Sized { - async fn read(xml: &mut PeekRead) -> Result; -} +// ----- Decode ---- -impl QReadable for PropFind { +impl QReadable for PropFind { async fn read(xml: &mut PeekRead) -> Result, ParsingError> { // Find propfind xml.tag_start(DAV_URN, "propfind").await?; @@ -161,13 +158,13 @@ impl QReadable for PropFind { } -impl QReadable for Include { +impl QReadable for Include { async fn read(xml: &mut PeekRead) -> Result, ParsingError> { xml.tag_start(DAV_URN, "include").await?; let mut acc: Vec> = Vec::new(); loop { match xml.peek() { - Event::Start(_) => acc.push(PropertyRequest::read(xml).await?), + Event::Start(_) | Event::Empty(_) => acc.push(PropertyRequest::read(xml).await?), Event::End(_) if xml.is_tag(DAV_URN, "include") => break, _ => { xml.skip().await?; }, } @@ -177,13 +174,13 @@ impl QReadable for Include { } } -impl QReadable for PropName { +impl QReadable for PropName { async fn read(xml: &mut PeekRead) -> Result, ParsingError> { xml.tag_start(DAV_URN, "prop").await?; let mut acc: Vec> = Vec::new(); loop { match xml.peek() { - Event::Start(_) => acc.push(PropertyRequest::read(xml).await?), + Event::Start(_) | Event::Empty(_) => acc.push(PropertyRequest::read(xml).await?), Event::End(_) if xml.is_tag(DAV_URN, "prop") => break, _ => { xml.skip().await?; }, } @@ -193,12 +190,52 @@ impl QReadable for PropName { } } -impl QReadable for PropertyRequest { +impl QReadable for PropertyRequest { async fn read(xml: &mut PeekRead) -> Result, ParsingError> { - /*match xml.peek() { - - }*/ - unimplemented!(); + loop { + let (need_close, bs) = match xml.peek() { + Event::Start(b) => (true, b), + Event::Empty(b) => (false, b), + _ => { + xml.skip().await?; + continue + }, + }; + + let mut maybe_res = None; + + // Option 1: a pure DAV property + let (ns, loc) = xml.rdr.resolve_element(bs.name()); + if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { + maybe_res = match loc.into_inner() { + b"creationdate" => Some(PropertyRequest::CreationDate), + b"displayname" => Some(PropertyRequest::DisplayName), + b"getcontentlanguage" => Some(PropertyRequest::GetContentLanguage), + b"getcontentlength" => Some(PropertyRequest::GetContentLength), + b"getetag" => Some(PropertyRequest::GetEtag), + b"getlastmodified" => Some(PropertyRequest::GetLastModified), + b"lockdiscovery" => Some(PropertyRequest::LockDiscovery), + b"resourcetype" => Some(PropertyRequest::ResourceType), + b"supportedlock" => Some(PropertyRequest::SupportedLock), + _ => None, + }; + } + + // Option 2: an extension property + if maybe_res.is_none() { + maybe_res = E::decode_propreq(xml).await?.map(PropertyRequest::Extension); + } + + // In any cases, we must close the opened tag + if need_close { + xml.skip().await?; + } + + // Return if something is found - otherwise loop + if let Some(res) = maybe_res { + return Ok(res) + } + } } } @@ -207,7 +244,7 @@ mod tests { use super::*; #[tokio::test] - async fn basic_propfind() { + async fn basic_propfind_propname() { let src = r#" @@ -220,4 +257,37 @@ mod tests { let got = PropFind::::read(&mut rdr).await.unwrap(); assert!(matches!(got, PropFind::PropName)); } +/* + #[tokio::test] + async fn basic_propfind_prop() { + let src = r#" + + + + + + + + + + + + + +"#; + + let mut rdr = PeekRead::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = PropFind::::read(&mut rdr).await.unwrap(); + assert_eq!(got, PropFind::Prop(PropName(vec![ + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]))); + } + */ } +*/ diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 7778d61..745c396 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -2,390 +2,339 @@ use std::io::Cursor; use quick_xml::Error as QError; use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; -use quick_xml::writer::{ElementWriter, Writer}; +use quick_xml::writer::ElementWriter; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; use super::types::*; +use super::xml::{Writer,QWrite,IWrite}; -//-------------- TRAITS ---------------------- - -/// Basic encode trait to make a type encodable -pub trait QuickWritable { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError>; -} - -/// Encoding context -pub trait Context: Extension { - fn child(&self) -> Self; - fn create_dav_element(&self, name: &str) -> BytesStart; - async fn hook_error(&self, err: &Self::Error, xml: &mut Writer) -> Result<(), QError>; - async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError>; - async fn hook_propertyrequest(&self, prop: &Self::PropertyRequest, xml: &mut Writer) -> Result<(), QError>; - async fn hook_resourcetype(&self, prop: &Self::ResourceType, xml: &mut Writer) -> Result<(), QError>; -} - -/// -------------- NoExtension Encoding Context -impl Context for NoExtension { - fn child(&self) -> Self { - Self { root: false } - } - fn create_dav_element(&self, name: &str) -> BytesStart { - let mut start = BytesStart::new(format!("D:{}", name)); - if self.root { - start.push_attribute(("xmlns:D", "DAV:")); - } - start - } - async fn hook_error(&self, _err: &Disabled, _xml: &mut Writer) -> Result<(), QError> { - unreachable!(); - } - async fn hook_property(&self, _prop: &Disabled, _xml: &mut Writer) -> Result<(), QError> { - unreachable!(); - } - async fn hook_propertyrequest(&self, _prop: &Disabled, _xml: &mut Writer) -> Result<(), QError> { - unreachable!(); - } - async fn hook_resourcetype(&self, _restype: &Disabled, _xml: &mut Writer) -> Result<(), QError> { - unreachable!(); - } -} - - -//--------------------- ENCODING -------------------- - // --- XML ROOTS /// PROPFIND REQUEST -impl QuickWritable for PropFind { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("propfind"); +impl QWrite for PropFind { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("propfind"); let end = start.to_end(); - let ctx = ctx.child(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::PropName => xml.write_event_async(Event::Empty(ctx.create_dav_element("propname"))).await?, + Self::PropName => { + let empty_propname = xml.create_dav_element("propname"); + xml.q.write_event_async(Event::Empty(empty_propname)).await? + }, Self::AllProp(maybe_include) => { - xml.write_event_async(Event::Empty(ctx.create_dav_element("allprop"))).await?; + let empty_allprop = xml.create_dav_element("allprop"); + xml.q.write_event_async(Event::Empty(empty_allprop)).await?; if let Some(include) = maybe_include { - include.write(xml, ctx.child()).await?; + include.qwrite(xml).await?; } }, - Self::Prop(propname) => propname.write(xml, ctx.child()).await?, + Self::Prop(propname) => propname.qwrite(xml).await?, } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } /// PROPPATCH REQUEST -impl QuickWritable for PropertyUpdate { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("propertyupdate"); +impl QWrite for PropertyUpdate { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("propertyupdate"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for update in self.0.iter() { - update.write(xml, ctx.child()).await?; + update.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } /// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE /// DELETE RESPONSE, -impl QuickWritable for Multistatus { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("multistatus"); +impl QWrite for Multistatus { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("multistatus"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for response in self.responses.iter() { - response.write(xml, ctx.child()).await?; + response.qwrite(xml).await?; } if let Some(description) = &self.responsedescription { - description.write(xml, ctx.child()).await?; + description.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::End(end)).await?; Ok(()) } } /// LOCK REQUEST -impl QuickWritable for LockInfo { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("lockinfo"); +impl QWrite for LockInfo { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockinfo"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.lockscope.write(xml, ctx.child()).await?; - self.locktype.write(xml, ctx.child()).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.lockscope.qwrite(xml).await?; + self.locktype.qwrite(xml).await?; if let Some(owner) = &self.owner { - owner.write(xml, ctx.child()).await?; + owner.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } /// SOME LOCK RESPONSES -impl QuickWritable for PropValue { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("prop"); +impl QWrite for PropValue { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("prop"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for propval in &self.0 { - propval.write(xml, ctx.child()).await?; + propval.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } // --- XML inner elements -impl QuickWritable for PropertyUpdateItem { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for PropertyUpdateItem { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Set(set) => set.write(xml, ctx).await, - Self::Remove(rm) => rm.write(xml, ctx).await, + Self::Set(set) => set.qwrite(xml).await, + Self::Remove(rm) => rm.qwrite(xml).await, } } } -impl QuickWritable for Set { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("set"); +impl QWrite for Set { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("set"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Remove { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("remove"); +impl QWrite for Remove { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("remove"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for AnyProp { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for AnyProp { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Name(propname) => propname.write(xml, ctx).await, - Self::Value(propval) => propval.write(xml, ctx).await, + Self::Name(propname) => propname.qwrite(xml).await, + Self::Value(propval) => propval.qwrite(xml).await, } } } -impl QuickWritable for PropName { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("prop"); +impl QWrite for PropName { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("prop"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for propname in &self.0 { - propname.write(xml, ctx.child()).await?; + propname.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Href { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("href"); +impl QWrite for Href { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("href"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&self.0))).await?; - xml.write_event_async(Event::End(end)).await?; - - Ok(()) + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Response { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("response"); +impl QWrite for Response { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("response"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.href.write(xml, ctx.child()).await?; - self.status_or_propstat.write(xml, ctx.child()).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.href.qwrite(xml).await?; + self.status_or_propstat.qwrite(xml).await?; if let Some(error) = &self.error { - error.write(xml, ctx.child()).await?; + error.qwrite(xml).await?; } if let Some(responsedescription) = &self.responsedescription { - responsedescription.write(xml, ctx.child()).await?; + responsedescription.qwrite(xml).await?; } if let Some(location) = &self.location { - location.write(xml, ctx.child()).await?; + location.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; - - Ok(()) + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for StatusOrPropstat { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for StatusOrPropstat { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Status(status) => status.write(xml, ctx.child()).await, + Self::Status(status) => status.qwrite(xml).await, Self::PropStat(propstat_list) => { for propstat in propstat_list.iter() { - propstat.write(xml, ctx.child()).await?; + propstat.qwrite(xml).await?; } - Ok(()) } } } } -impl QuickWritable for Status { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("status"); +impl QWrite for Status { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("status"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; let txt = format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason")); - xml.write_event_async(Event::Text(BytesText::new(&txt))).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::End(end)).await?; Ok(()) } } -impl QuickWritable for ResponseDescription { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("responsedescription"); +impl QWrite for ResponseDescription { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("responsedescription"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&self.0))).await?; - xml.write_event_async(Event::End(end)).await?; - - Ok(()) + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Location { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("location"); +impl QWrite for Location { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("location"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await?; - - Ok(()) + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for PropStat { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("propstat"); +impl QWrite for PropStat { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("propstat"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.prop.write(xml, ctx.child()).await?; - self.status.write(xml, ctx.child()).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.prop.qwrite(xml).await?; + self.status.qwrite(xml).await?; if let Some(error) = &self.error { - error.write(xml, ctx.child()).await?; + error.qwrite(xml).await?; } if let Some(description) = &self.responsedescription { - description.write(xml, ctx.child()).await?; + description.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::End(end)).await?; Ok(()) } } -impl QuickWritable for Property { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { use Property::*; match self { CreationDate(date) => { // 1997-12-01T17:42:21-08:00 - let start = ctx.create_dav_element("creationdate"); + let start = xml.create_dav_element("creationdate"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, DisplayName(name) => { // Example collection - let start = ctx.create_dav_element("displayname"); + let start = xml.create_dav_element("displayname"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(name))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(name))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, GetContentLanguage(lang) => { - let start = ctx.create_dav_element("getcontentlanguage"); + let start = xml.create_dav_element("getcontentlanguage"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(lang))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(lang))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, GetContentLength(len) => { // 4525 - let start = ctx.create_dav_element("getcontentlength"); + let start = xml.create_dav_element("getcontentlength"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, GetContentType(ct) => { // text/html - let start = ctx.create_dav_element("getcontenttype"); + let start = xml.create_dav_element("getcontenttype"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&ct))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&ct))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, GetEtag(et) => { // "zzyzx" - let start = ctx.create_dav_element("getetag"); + let start = xml.create_dav_element("getetag"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(et))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(et))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, GetLastModified(date) => { // Mon, 12 Jan 1998 09:25:56 GMT - let start = ctx.create_dav_element("getlastmodified"); + let start = xml.create_dav_element("getlastmodified"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; + xml.q.write_event_async(Event::End(end)).await?; }, LockDiscovery(many_locks) => { // ... - let start = ctx.create_dav_element("lockdiscovery"); + let start = xml.create_dav_element("lockdiscovery"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for lock in many_locks.iter() { - lock.write(xml, ctx.child()).await?; + lock.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::End(end)).await?; }, ResourceType(many_types) => { // @@ -397,16 +346,16 @@ impl QuickWritable for Property { // // - let start = ctx.create_dav_element("resourcetype"); + let start = xml.create_dav_element("resourcetype"); if many_types.is_empty() { - xml.write_event_async(Event::Empty(start)).await?; + xml.q.write_event_async(Event::Empty(start)).await?; } else { let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for restype in many_types.iter() { - restype.write(xml, ctx.child()).await?; + restype.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::End(end)).await?; } }, SupportedLock(many_entries) => { @@ -414,52 +363,56 @@ impl QuickWritable for Property { // ... - let start = ctx.create_dav_element("supportedlock"); + let start = xml.create_dav_element("supportedlock"); if many_entries.is_empty() { - xml.write_event_async(Event::Empty(start)).await?; + xml.q.write_event_async(Event::Empty(start)).await?; } else { let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for entry in many_entries.iter() { - entry.write(xml, ctx.child()).await?; + entry.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; + xml.q.write_event_async(Event::End(end)).await?; } }, - Extension(inner) => { - ctx.hook_property(inner, xml).await?; - }, + Extension(inner) => inner.qwrite(xml).await?, }; Ok(()) } } -impl QuickWritable for ResourceType { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for ResourceType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Collection => xml.write_event_async(Event::Empty(ctx.create_dav_element("collection"))).await, - Self::Extension(inner) => ctx.hook_resourcetype(inner, xml).await, + Self::Collection => { + let empty_collection = xml.create_dav_element("collection"); + xml.q.write_event_async(Event::Empty(empty_collection)).await + }, + Self::Extension(inner) => inner.qwrite(xml).await, } } } -impl QuickWritable for Include { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("include"); +impl QWrite for Include { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("include"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for prop in self.0.iter() { - prop.write(xml, ctx.child()).await?; + prop.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for PropertyRequest { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { use PropertyRequest::*; - let mut atom = async |c| xml.write_event_async(Event::Empty(ctx.create_dav_element(c))).await; + let mut atom = async |c| { + let empty_tag = xml.create_dav_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; match self { CreationDate => atom("creationdate").await, @@ -472,13 +425,13 @@ impl QuickWritable for PropertyRequest { LockDiscovery => atom("lockdiscovery").await, ResourceType => atom("resourcetype").await, SupportedLock => atom("supportedlock").await, - Extension(inner) => ctx.hook_propertyrequest(inner, xml).await, + Extension(inner) => inner.qwrite(xml).await, } } } -impl QuickWritable for ActiveLock { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for ActiveLock { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { // // // @@ -494,192 +447,193 @@ impl QuickWritable for ActiveLock { // http://example.com/workspace/webdav/proposal.doc // // - let start = ctx.create_dav_element("activelock"); + let start = xml.create_dav_element("activelock"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.locktype.write(xml, ctx.child()).await?; - self.lockscope.write(xml, ctx.child()).await?; - self.depth.write(xml, ctx.child()).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.locktype.qwrite(xml).await?; + self.lockscope.qwrite(xml).await?; + self.depth.qwrite(xml).await?; if let Some(owner) = &self.owner { - owner.write(xml, ctx.child()).await?; + owner.qwrite(xml).await?; } if let Some(timeout) = &self.timeout { - timeout.write(xml, ctx.child()).await?; + timeout.qwrite(xml).await?; } if let Some(locktoken) = &self.locktoken { - locktoken.write(xml, ctx.child()).await?; + locktoken.qwrite(xml).await?; } - self.lockroot.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await?; - - Ok(()) + self.lockroot.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for LockType { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("locktype"); +impl QWrite for LockType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("locktype"); let end = start.to_end(); - let ctx = ctx.child(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::Write => xml.write_event_async(Event::Empty(ctx.create_dav_element("write"))).await?, + Self::Write => { + let empty_write = xml.create_dav_element("write"); + xml.q.write_event_async(Event::Empty(empty_write)).await? + }, }; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for LockScope { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("lockscope"); +impl QWrite for LockScope { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockscope"); let end = start.to_end(); - let ctx = ctx.child(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::Exclusive => xml.write_event_async(Event::Empty(ctx.create_dav_element("exclusive"))).await?, - Self::Shared => xml.write_event_async(Event::Empty(ctx.create_dav_element("shared"))).await?, + Self::Exclusive => { + let empty_tag = xml.create_dav_element("exclusive"); + xml.q.write_event_async(Event::Empty(empty_tag)).await? + }, + Self::Shared => { + let empty_tag = xml.create_dav_element("shared"); + xml.q.write_event_async(Event::Empty(empty_tag)).await? + }, }; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Owner { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("owner"); +impl QWrite for Owner { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("owner"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::Txt(txt) => xml.write_event_async(Event::Text(BytesText::new(&txt))).await?, - Self::Href(href) => href.write(xml, ctx.child()).await?, + Self::Txt(txt) => xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?, + Self::Href(href) => href.qwrite(xml).await?, } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Depth { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("depth"); +impl QWrite for Depth { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("depth"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::Zero => xml.write_event_async(Event::Text(BytesText::new("0"))).await?, - Self::One => xml.write_event_async(Event::Text(BytesText::new("1"))).await?, - Self::Infinity => xml.write_event_async(Event::Text(BytesText::new("infinity"))).await?, + Self::Zero => xml.q.write_event_async(Event::Text(BytesText::new("0"))).await?, + Self::One => xml.q.write_event_async(Event::Text(BytesText::new("1"))).await?, + Self::Infinity => xml.q.write_event_async(Event::Text(BytesText::new("infinity"))).await?, }; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Timeout { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("timeout"); +impl QWrite for Timeout { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("timeout"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; match self { Self::Seconds(count) => { let txt = format!("Second-{}", count); - xml.write_event_async(Event::Text(BytesText::new(&txt))).await? + xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await? }, - Self::Infinite => xml.write_event_async(Event::Text(BytesText::new("Infinite"))).await? + Self::Infinite => xml.q.write_event_async(Event::Text(BytesText::new("Infinite"))).await? }; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for LockToken { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("locktoken"); +impl QWrite for LockToken { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("locktoken"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for LockRoot { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("lockroot"); +impl QWrite for LockRoot { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockroot"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for LockEntry { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("lockentry"); +impl QWrite for LockEntry { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockentry"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.lockscope.write(xml, ctx.child()).await?; - self.locktype.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.lockscope.qwrite(xml).await?; + self.locktype.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Error { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_dav_element("error"); +impl QWrite for Error { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("error"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for violation in &self.0 { - violation.write(xml, ctx.child()).await?; + violation.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; - - Ok(()) + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Violation { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for Violation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_dav_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; + match self { - Violation::LockTokenMatchesRequestUri => xml.write_event_async(Event::Empty(ctx.create_dav_element("lock-token-matches-request-uri"))).await?, - Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => { - xml.write_event_async(Event::Empty(ctx.create_dav_element("lock-token-submitted"))).await? - }, + Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await, + Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => atom("lock-token-submitted").await, Violation::LockTokenSubmitted(hrefs) => { - let start = ctx.create_dav_element("lock-token-submitted"); + let start = xml.create_dav_element("lock-token-submitted"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for href in hrefs { - href.write(xml, ctx.child()).await?; + href.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; - }, - Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => { - xml.write_event_async(Event::Empty(ctx.create_dav_element("no-conflicting-lock"))).await? + xml.q.write_event_async(Event::End(end)).await }, + Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => atom("no-conflicting-lock").await, Violation::NoConflictingLock(hrefs) => { - let start = ctx.create_dav_element("no-conflicting-lock"); + let start = xml.create_dav_element("no-conflicting-lock"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for href in hrefs { - href.write(xml, ctx.child()).await?; + href.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await?; - }, - Violation::NoExternalEntities => xml.write_event_async(Event::Empty(ctx.create_dav_element("no-external-entities"))).await?, - Violation::PreservedLiveProperties => xml.write_event_async(Event::Empty(ctx.create_dav_element("preserved-live-properties"))).await?, - Violation::PropfindFiniteDepth => xml.write_event_async(Event::Empty(ctx.create_dav_element("propfind-finite-depth"))).await?, - Violation::CannotModifyProtectedProperty => xml.write_event_async(Event::Empty(ctx.create_dav_element("cannot-modify-protected-property"))).await?, - Violation::Extension(inner) => { - ctx.hook_error(inner, xml).await?; + xml.q.write_event_async(Event::End(end)).await }, - }; - Ok(()) + Violation::NoExternalEntities => atom("no-external-entities").await, + Violation::PreservedLiveProperties => atom("preserved-live-properties").await, + Violation::PropfindFiniteDepth => atom("propfind-finite-depth").await, + Violation::CannotModifyProtectedProperty => atom("cannot-modify-protected-property").await, + Violation::Extension(inner) => inner.qwrite(xml).await, + } } } diff --git a/src/dav/error.rs b/src/dav/error.rs new file mode 100644 index 0000000..1db2895 --- /dev/null +++ b/src/dav/error.rs @@ -0,0 +1,20 @@ +use quick_xml::events::attributes::AttrError; + +#[derive(Debug)] +pub enum ParsingError { + NamespacePrefixAlreadyUsed, + WrongToken, + TagNotFound, + QuickXml(quick_xml::Error), + Eof +} +impl From for ParsingError { + fn from(value: AttrError) -> Self { + Self::QuickXml(value.into()) + } +} +impl From for ParsingError { + fn from(value: quick_xml::Error) -> Self { + Self::QuickXml(value) + } +} diff --git a/src/dav/mod.rs b/src/dav/mod.rs index 835544b..abc46e7 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,3 +1,5 @@ +mod error; +mod xml; mod types; mod caltypes; mod acltypes; @@ -5,6 +7,7 @@ mod versioningtypes; mod encoder; mod calencoder; mod decoder; +mod realization; use std::net::SocketAddr; diff --git a/src/dav/realization.rs b/src/dav/realization.rs new file mode 100644 index 0000000..22c9cfc --- /dev/null +++ b/src/dav/realization.rs @@ -0,0 +1,41 @@ +use super::types as dav; +use super::caltypes as cal; +use super::xml; +use super::error; + +#[derive(Debug, PartialEq)] +pub struct Disabled(()); +impl xml::QRead for Disabled { + async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + unreachable!(); + } +} +impl xml::QWrite for Disabled { + async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + unreachable!(); + } +} + +/// The base WebDAV +/// +/// Any extension is kooh is disabled through an object we can't build +/// due to a private inner element. +pub struct Core {} +impl dav::Extension for Core { + type Error = Disabled; + type Property = Disabled; + type PropertyRequest = Disabled; + type ResourceType = Disabled; +} + +/* +// WebDAV with the base Calendar implementation (RFC4791) +pub struct CalendarMin {} +impl dav::Extension for CalendarMin +{ + type Error = cal::Violation; + type Property = cal::Property; + type PropertyRequest = cal::PropertyRequest; + type ResourceType = cal::ResourceType; +} +*/ diff --git a/src/dav/types.rs b/src/dav/types.rs index a1b1c7f..f2eae3a 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -1,28 +1,20 @@ #![allow(dead_code)] +use std::fmt::Debug; use chrono::{DateTime,FixedOffset}; +use super::xml; +use super::error; -/// Extension utilities -pub struct Disabled(()); +/// It's how we implement a DAV extension +/// (That's the dark magic part...) +pub trait ExtensionItem = xml::QRead + xml::QWrite + Debug + PartialEq; pub trait Extension { - type Error; - type Property; - type PropertyRequest; - type ResourceType; + type Error: ExtensionItem; + type Property: ExtensionItem; + type PropertyRequest: ExtensionItem; + type ResourceType: ExtensionItem; } -/// No extension -pub struct NoExtension { - pub root: bool -} -impl Extension for NoExtension { - type Error = Disabled; - type Property = Disabled; - type PropertyRequest = Disabled; - type ResourceType = Disabled; -} - - /// 14.1. activelock XML Element /// /// Name: activelock @@ -30,6 +22,7 @@ impl Extension for NoExtension { /// Purpose: Describes a lock on a resource. /// +#[derive(Debug, PartialEq)] pub struct ActiveLock { pub lockscope: LockScope, pub locktype: LockType, @@ -50,6 +43,7 @@ pub struct ActiveLock { /// elements. /// /// +#[derive(Debug, PartialEq)] pub struct Collection{} /// 14.4 depth XML Element @@ -62,6 +56,7 @@ pub struct Collection{} /// Value: "0" | "1" | "infinity" /// /// +#[derive(Debug, PartialEq)] pub enum Depth { Zero, One, @@ -84,8 +79,10 @@ pub enum Depth { /// postcondition code. Unrecognized elements MUST be ignored. /// /// -pub struct Error(pub Vec>); -pub enum Violation { +#[derive(Debug, PartialEq)] +pub struct Error(pub Vec>); +#[derive(Debug, PartialEq)] +pub enum Violation { /// Name: lock-token-matches-request-uri /// /// Use with: 409 Conflict @@ -169,7 +166,7 @@ pub enum Violation { CannotModifyProtectedProperty, /// Specific errors - Extension(T::Error), + Extension(E::Error), } /// 14.6. exclusive XML Element @@ -179,6 +176,7 @@ pub enum Violation { /// Purpose: Specifies an exclusive lock. /// /// +#[derive(Debug, PartialEq)] pub struct Exclusive {} /// 14.7. href XML Element @@ -194,6 +192,7 @@ pub struct Exclusive {} /// Value: Simple-ref /// /// +#[derive(Debug, PartialEq)] pub struct Href(pub String); @@ -209,7 +208,8 @@ pub struct Href(pub String); /// standards. This element MUST NOT contain text or mixed content. /// /// -pub struct Include(pub Vec>); +#[derive(Debug, PartialEq)] +pub struct Include(pub Vec>); /// 14.9. location XML Element /// @@ -225,6 +225,7 @@ pub struct Include(pub Vec>); /// that would be used in a Location header. /// /// +#[derive(Debug, PartialEq)] pub struct Location(pub Href); /// 14.10. lockentry XML Element @@ -235,6 +236,7 @@ pub struct Location(pub Href); /// resource. /// /// +#[derive(Debug, PartialEq)] pub struct LockEntry { pub lockscope: LockScope, pub locktype: LockType, @@ -248,6 +250,7 @@ pub struct LockEntry { /// specify the type of lock the client wishes to have created. /// /// +#[derive(Debug, PartialEq)] pub struct LockInfo { pub lockscope: LockScope, pub locktype: LockType, @@ -266,6 +269,7 @@ pub struct LockInfo { /// values and the response to LOCK requests. /// /// +#[derive(Debug, PartialEq)] pub struct LockRoot(pub Href); /// 14.13. lockscope XML Element @@ -275,6 +279,7 @@ pub struct LockRoot(pub Href); /// Purpose: Specifies whether a lock is an exclusive lock, or a shared /// lock. /// +#[derive(Debug, PartialEq)] pub enum LockScope { Exclusive, Shared @@ -290,6 +295,7 @@ pub enum LockScope { /// refers to the lock. /// /// +#[derive(Debug, PartialEq)] pub struct LockToken(pub Href); /// 14.15. locktype XML Element @@ -300,6 +306,7 @@ pub struct LockToken(pub Href); /// specification only defines one lock type, the write lock. /// /// +#[derive(Debug, PartialEq)] pub enum LockType { /// 14.30. write XML Element /// @@ -325,8 +332,9 @@ pub enum LockType { /// response descriptions contained within the responses. /// /// -pub struct Multistatus { - pub responses: Vec>, +#[derive(Debug, PartialEq)] +pub struct Multistatus { + pub responses: Vec>, pub responsedescription: Option, } @@ -354,6 +362,7 @@ pub struct Multistatus { /// /// //@FIXME might need support for an extension +#[derive(Debug, PartialEq)] pub enum Owner { Txt(String), Href(Href), @@ -373,12 +382,17 @@ pub enum Owner { /// text or mixed content. /// /// -pub enum AnyProp { - Name(PropName), - Value(PropValue), +#[derive(Debug, PartialEq)] +pub enum AnyProp { + Name(PropName), + Value(PropValue), } -pub struct PropName(pub Vec>); -pub struct PropValue(pub Vec>); + +#[derive(Debug, PartialEq)] +pub struct PropName(pub Vec>); + +#[derive(Debug, PartialEq)] +pub struct PropValue(pub Vec>); /// 14.19. propertyupdate XML Element /// @@ -390,10 +404,13 @@ pub struct PropValue(pub Vec>); /// required to modify the properties on the resource. /// /// -pub struct PropertyUpdate(pub Vec>); -pub enum PropertyUpdateItem { - Remove(Remove), - Set(Set), +#[derive(Debug, PartialEq)] +pub struct PropertyUpdate(pub Vec>); + +#[derive(Debug, PartialEq)] +pub enum PropertyUpdateItem { + Remove(Remove), + Set(Set), } /// 14.2 allprop XML Element @@ -430,10 +447,11 @@ pub enum PropertyUpdateItem { /// values. /// /// -pub enum PropFind { +#[derive(Debug, PartialEq)] +pub enum PropFind { PropName, - AllProp(Option>), - Prop(PropName), + AllProp(Option>), + Prop(PropName), } /// 14.22 propstat XML Element @@ -451,10 +469,11 @@ pub enum PropFind { /// the properties named in 'prop'. /// /// -pub struct PropStat { - pub prop: AnyProp, +#[derive(Debug, PartialEq)] +pub struct PropStat { + pub prop: AnyProp, pub status: Status, - pub error: Option>, + pub error: Option>, pub responsedescription: Option, } @@ -471,7 +490,8 @@ pub struct PropStat { /// the names of properties to be removed are required. /// /// -pub struct Remove(pub PropName); +#[derive(Debug, PartialEq)] +pub struct Remove(pub PropName); /// 14.24. response XML Element /// @@ -495,14 +515,17 @@ pub struct Remove(pub PropName); /// /// -pub enum StatusOrPropstat { +#[derive(Debug, PartialEq)] +pub enum StatusOrPropstat { Status(Status), - PropStat(Vec>), + PropStat(Vec>), } -pub struct Response { + +#[derive(Debug, PartialEq)] +pub struct Response { pub href: Href, // It's wrong according to the spec, but I don't understand why there is an href* - pub status_or_propstat: StatusOrPropstat, - pub error: Option>, + pub status_or_propstat: StatusOrPropstat, + pub error: Option>, pub responsedescription: Option, pub location: Option, } @@ -518,6 +541,7 @@ pub struct Response { /// user. /// /// +#[derive(Debug, PartialEq)] pub struct ResponseDescription(pub String); /// 14.26. set XML Element @@ -536,7 +560,8 @@ pub struct ResponseDescription(pub String); /// property, and MUST be subsequently retrievable using PROPFIND. /// /// -pub struct Set(pub PropValue); +#[derive(Debug, PartialEq)] +pub struct Set(pub PropValue); /// 14.27. shared XML Element /// @@ -546,6 +571,7 @@ pub struct Set(pub PropValue); /// /// /// +#[derive(Debug, PartialEq)] pub struct Shared {} @@ -559,6 +585,7 @@ pub struct Shared {} /// /// //@FIXME: Better typing is possible with an enum for example +#[derive(Debug, PartialEq)] pub struct Status(pub http::status::StatusCode); /// 14.29. timeout XML Element @@ -586,6 +613,7 @@ pub struct Status(pub http::status::StatusCode); /// elapse between granting of the lock at the server, and the automatic /// removal of the lock. The timeout value for TimeType "Second" MUST /// NOT be greater than 2^32-1. +#[derive(Debug, PartialEq)] pub enum Timeout { Seconds(u32), Infinite, @@ -619,7 +647,8 @@ pub enum Timeout { /// the header value could include LWS as defined in [RFC2616], Section /// 4.2. Server implementors SHOULD strip LWS from these values before /// using as WebDAV property values. -pub enum PropertyRequest { +#[derive(Debug, PartialEq)] +pub enum PropertyRequest { CreationDate, DisplayName, GetContentLanguage, @@ -630,9 +659,11 @@ pub enum PropertyRequest { LockDiscovery, ResourceType, SupportedLock, - Extension(T::PropertyRequest), + Extension(E::PropertyRequest), } -pub enum Property { + +#[derive(Debug, PartialEq)] +pub enum Property { /// 15.1. creationdate Property /// /// Name: creationdate @@ -883,7 +914,7 @@ pub enum Property { /// /// /// - ResourceType(Vec>), + ResourceType(Vec>), /// 15.10. supportedlock Property /// @@ -911,10 +942,11 @@ pub enum Property { SupportedLock(Vec), /// Any extension - Extension(T::Property), + Extension(E::Property), } -pub enum ResourceType { +#[derive(Debug, PartialEq)] +pub enum ResourceType { Collection, - Extension(T::ResourceType), + Extension(E::ResourceType), } diff --git a/src/dav/xml.rs b/src/dav/xml.rs new file mode 100644 index 0000000..777f99e --- /dev/null +++ b/src/dav/xml.rs @@ -0,0 +1,128 @@ +use std::collections::HashMap; +use tokio::io::{AsyncWrite, AsyncBufRead}; +use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; +use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; +use quick_xml::reader::NsReader; + +use super::error::ParsingError; + +// Async traits +pub trait IWrite = AsyncWrite + Unpin; +pub trait IRead = AsyncBufRead + Unpin + 'static; + +// Serialization/Deserialization traits +pub trait QWrite { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), quick_xml::Error>; +} +pub trait QRead { + async fn qread(&self, xml: &mut Reader) -> Result, ParsingError>; +} + +/// Transform a Rust object into an XML stream of characters +pub struct Writer { + pub q: quick_xml::writer::Writer, + root: bool, +} +impl Writer { + pub fn create_dav_element(&mut self, name: &str) -> BytesStart<'static> { + self.create_ns_element("D", name) + } + pub fn create_cal_element(&mut self, name: &str) -> BytesStart<'static> { + self.create_ns_element("C", name) + } + + fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> { + let mut start = BytesStart::new(format!("{}:{}", ns, name)); + //@FIXME not what we want + if self.root { + start.push_attribute(("xmlns:D", "DAV:")); + start.push_attribute(("xmlns:C", "urn:ietf:params:xml:ns:caldav")); + self.root = false; + } + start + } +} + +/// Transform an XML stream of characters into a Rust object +pub struct Reader { + evt: Event<'static>, + rdr: NsReader, + buf: Vec, +} +impl Reader { + async fn new(mut rdr: NsReader) -> Result { + let mut buf: Vec = vec![]; + let evt = rdr.read_event_into_async(&mut buf).await?.into_owned(); + buf.clear(); + Ok(Self { evt, rdr, buf }) + } + + fn peek(&self) -> &Event<'static> { + &self.evt + } + + /// skip tag. Can't skip end, can't skip eof. + async fn skip(&mut self) -> Result, ParsingError> { + match &self.evt { + Event::Start(b) => { + let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; + self.next().await + }, + Event::End(_) => Err(ParsingError::WrongToken), + Event::Eof => Err(ParsingError::Eof), + _ => self.next().await, + } + } + + /// read one more tag + async fn next(&mut self) -> Result, ParsingError> { + let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); + self.buf.clear(); + let old_evt = std::mem::replace(&mut self.evt, evt); + Ok(old_evt) + } + + + /// check if this is the desired tag + fn is_tag(&self, ns: &[u8], key: &str) -> bool { + let qname = match self.peek() { + Event::Start(bs) | Event::Empty(bs) => bs.name(), + Event::End(be) => be.name(), + _ => return false, + }; + + let (extr_ns, local) = self.rdr.resolve_element(qname); + + if local.into_inner() != key.as_bytes() { + return false + } + + match extr_ns { + ResolveResult::Bound(v) => v.into_inner() == ns, + _ => false, + } + } + + /// find start tag + async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + loop { + match self.peek() { + Event::Start(b) if self.is_tag(ns, key) => break, + _ => { self.skip().await?; }, + } + } + self.next().await + } + + // find stop tag + async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + loop { + match self.peek() { + Event::End(b) if self.is_tag(ns, key) => break, + _ => { self.skip().await?; }, + } + } + self.next().await + } +} + -- cgit v1.2.3 From f376e88c7327657e005be1ff48b8a9210d15d954 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 5 Mar 2024 16:26:15 +0100 Subject: Restored WebDAV encoder tests --- src/dav/encoder.rs | 54 ++++++++++++++++++++++-------------------------------- src/dav/xml.rs | 11 ++++------- 2 files changed, 26 insertions(+), 39 deletions(-) diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 745c396..c0a5332 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -640,16 +640,20 @@ impl QWrite for Violation { #[cfg(test)] mod tests { use super::*; + use crate::dav::realization::Core; use tokio::io::AsyncWriteExt; /// To run only the unit tests and avoid the behavior ones: /// cargo test --bin aerogramme - async fn serialize>(ctx: C, elem: &Q) -> String { + async fn serialize(elem: &impl QWrite) -> String { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); - let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - elem.write(&mut writer, ctx).await.expect("xml serialization"); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let mut writer = Writer { q, ns_to_apply }; + + elem.qwrite(&mut writer).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let got = std::str::from_utf8(buffer.as_slice()).unwrap(); @@ -660,20 +664,17 @@ mod tests { async fn basic_href() { let got = serialize( - NoExtension { root: false }, &Href("/SOGo/dav/so/".into()) ).await; - let expected = "/SOGo/dav/so/"; + let expected = r#"/SOGo/dav/so/"#; - assert_eq!(&got, expected); + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } - #[tokio::test] async fn basic_multistatus() { let got = serialize( - NoExtension { root: true }, - &Multistatus { + &Multistatus:: { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }, @@ -683,15 +684,14 @@ mod tests { Hello world "#; - assert_eq!(&got, expected); + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } #[tokio::test] async fn rfc_error_delete_locked() { let got = serialize( - NoExtension { root: true }, - &Error(vec![ + &Error::(vec![ Violation::LockTokenSubmitted(vec![ Href("/locked/".into()) ]) @@ -704,28 +704,26 @@ mod tests { "#; - assert_eq!(&got, expected); + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } #[tokio::test] async fn rfc_propname_req() { let got = serialize( - NoExtension { root: true }, - &PropFind::PropName, + &PropFind::::PropName, ).await; let expected = r#" "#; - assert_eq!(&got, expected); + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } #[tokio::test] async fn rfc_propname_res() { let got = serialize( - NoExtension { root: true }, - &Multistatus { + &Multistatus:: { responses: vec![ Response { href: Href("http://www.example.com/container/".into()), @@ -808,8 +806,7 @@ mod tests { #[tokio::test] async fn rfc_allprop_req() { let got = serialize( - NoExtension { root: true }, - &PropFind::AllProp(None), + &PropFind::::AllProp(None), ).await; let expected = r#" @@ -823,8 +820,7 @@ mod tests { async fn rfc_allprop_res() { use chrono::{DateTime,FixedOffset,TimeZone}; let got = serialize( - NoExtension { root: true }, - &Multistatus { + &Multistatus:: { responses: vec![ Response { href: Href("/container/".into()), @@ -966,12 +962,10 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } - #[tokio::test] async fn rfc_allprop_include() { let got = serialize( - NoExtension { root: true }, - &PropFind::AllProp(Some(Include(vec![ + &PropFind::::AllProp(Some(Include(vec![ PropertyRequest::DisplayName, PropertyRequest::ResourceType, ]))), @@ -991,8 +985,7 @@ mod tests { #[tokio::test] async fn rfc_propertyupdate() { let got = serialize( - NoExtension { root: true }, - &PropertyUpdate(vec![ + &PropertyUpdate::(vec![ PropertyUpdateItem::Set(Set(PropValue(vec![ Property::GetContentLanguage("fr-FR".into()), ]))), @@ -1021,8 +1014,7 @@ mod tests { #[tokio::test] async fn rfc_delete_locked2() { let got = serialize( - NoExtension { root: true }, - &Multistatus { + &Multistatus:: { responses: vec![Response { href: Href("http://www.example.com/container/resource3".into()), status_or_propstat: StatusOrPropstat::Status(Status(http::status::StatusCode::from_u16(423).unwrap())), @@ -1050,7 +1042,6 @@ mod tests { #[tokio::test] async fn rfc_simple_lock_request() { let got = serialize( - NoExtension { root: true }, &LockInfo { lockscope: LockScope::Exclusive, locktype: LockType::Write, @@ -1076,8 +1067,7 @@ mod tests { #[tokio::test] async fn rfc_simple_lock_response() { let got = serialize( - NoExtension { root: true }, - &PropValue(vec![ + &PropValue::(vec![ Property::LockDiscovery(vec![ActiveLock { lockscope: LockScope::Exclusive, locktype: LockType::Write, diff --git a/src/dav/xml.rs b/src/dav/xml.rs index 777f99e..495c9a5 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -1,4 +1,3 @@ -use std::collections::HashMap; use tokio::io::{AsyncWrite, AsyncBufRead}; use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; @@ -21,7 +20,7 @@ pub trait QRead { /// Transform a Rust object into an XML stream of characters pub struct Writer { pub q: quick_xml::writer::Writer, - root: bool, + pub ns_to_apply: Vec<(String, String)>, } impl Writer { pub fn create_dav_element(&mut self, name: &str) -> BytesStart<'static> { @@ -33,11 +32,9 @@ impl Writer { fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> { let mut start = BytesStart::new(format!("{}:{}", ns, name)); - //@FIXME not what we want - if self.root { - start.push_attribute(("xmlns:D", "DAV:")); - start.push_attribute(("xmlns:C", "urn:ietf:params:xml:ns:caldav")); - self.root = false; + if !self.ns_to_apply.is_empty() { + start.extend_attributes(self.ns_to_apply.iter().map(|(k, n)| (k.as_str(), n.as_str()))); + self.ns_to_apply.clear() } start } -- cgit v1.2.3 From 8fec92a0868b2a2fd31edbf12d6896a974913111 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 5 Mar 2024 18:02:43 +0100 Subject: Re-enable calendar encoder --- src/dav/calencoder.rs | 569 ++++++++++++++++++++++++-------------------------- src/dav/caltypes.rs | 5 +- 2 files changed, 278 insertions(+), 296 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 80b8656..01cecc3 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -1,141 +1,94 @@ -/* -use super::encoder::{QuickWritable, Context}; -use super::caltypes::*; -use super::types::Extension; - use quick_xml::Error as QError; use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; -use quick_xml::writer::{ElementWriter, Writer}; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; -const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; - -// =============== Calendar Trait =========================== -pub trait CalContext: Context { - fn create_cal_element(&self, name: &str) -> BytesStart; -} - -// =============== CalDAV Extension Setup =================== -impl Context for CalExtension { - fn child(&self) -> Self { - Self { root: false } - } - fn create_dav_element(&self, name: &str) -> BytesStart { - self.create_ns_element("D", name) - } - - async fn hook_error(&self, err: &Violation, xml: &mut Writer) -> Result<(), QError> { - err.write(xml, self.child()).await - } - - async fn hook_property(&self, prop: &Self::Property, xml: &mut Writer) -> Result<(), QError> { - prop.write(xml, self.child()).await - } - - async fn hook_resourcetype(&self, restype: &Self::ResourceType, xml: &mut Writer) -> Result<(), QError> { - restype.write(xml, self.child()).await - } - - async fn hook_propertyrequest(&self, propreq: &Self::PropertyRequest, xml: &mut Writer) -> Result<(), QError> { - propreq.write(xml, self.child()).await - } -} - -impl CalContext for CalExtension { - fn create_cal_element(&self, name: &str) -> BytesStart { - self.create_ns_element("C", name) - } -} +use super::caltypes::*; +use super::xml::{QWrite, IWrite, Writer}; +use super::types::Extension; -impl CalExtension { - fn create_ns_element(&self, ns: &str, name: &str) -> BytesStart { - let mut start = BytesStart::new(format!("{}:{}", ns, name)); - if self.root { - start.push_attribute(("xmlns:D", "DAV:")); - start.push_attribute(("xmlns:C", "urn:ietf:params:xml:ns:caldav")); - } - start - } -} +const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; // ==================== Calendar Types Serialization ========================= // -------------------- MKCALENDAR METHOD ------------------------------------ -impl QuickWritable for MkCalendar { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_cal_element("mkcalendar"); +impl QWrite for MkCalendar { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("mkcalendar"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for MkCalendarResponse { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_cal_element("mkcalendar-response"); +impl QWrite for MkCalendarResponse { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("mkcalendar-response"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for propstat in self.0.iter() { - propstat.write(xml, ctx.child()).await?; + propstat.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } // ----------------------- REPORT METHOD ------------------------------------- -impl QuickWritable for CalendarQuery { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_cal_element("calendar-query"); +impl QWrite for CalendarQuery { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("calendar-query"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; if let Some(selector) = &self.selector { - selector.write(xml, ctx.child()).await?; + selector.qwrite(xml).await?; } - self.filter.write(xml, ctx.child()).await?; + self.filter.qwrite(xml).await?; if let Some(tz) = &self.timezone { - tz.write(xml, ctx.child()).await?; + tz.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for CalendarMultiget { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_cal_element("calendar-multiget"); +impl QWrite for CalendarMultiget { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("calendar-multiget"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; if let Some(selector) = &self.selector { - selector.write(xml, ctx.child()).await?; + selector.qwrite(xml).await?; } for href in self.href.iter() { - href.write(xml, ctx.child()).await?; + href.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for FreeBusyQuery { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_cal_element("free-busy-query"); +impl QWrite for FreeBusyQuery { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("free-busy-query"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } // -------------------------- DAV::prop -------------------------------------- -impl QuickWritable for PropertyRequest { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut atom = async |c| xml.write_event_async(Event::Empty(ctx.create_cal_element(c))).await; +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_cal_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; match self { Self::CalendarDescription => atom("calendar-description").await, @@ -148,128 +101,137 @@ impl QuickWritable for PropertyRequest { Self::MaxInstances => atom("max-instances").await, Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, Self::SupportedCollationSet => atom("supported-collation-set").await, - Self::CalendarData(req) => req.write(xml, ctx).await, + Self::CalendarData(req) => req.qwrite(xml).await, } } } -impl QuickWritable for Property { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { Self::CalendarDescription { lang, text } => { - let mut start = ctx.create_cal_element("calendar-description"); + let mut start = xml.create_cal_element("calendar-description"); if let Some(the_lang) = lang { start.push_attribute(("xml:lang", the_lang.as_str())); } let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(text))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(text))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::CalendarTimezone(payload) => { - let start = ctx.create_cal_element("calendar-timezone"); + let start = xml.create_cal_element("calendar-timezone"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(payload))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(payload))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::SupportedCalendarComponentSet(many_comp) => { - let start = ctx.create_cal_element("supported-calendar-component-set"); + let start = xml.create_cal_element("supported-calendar-component-set"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for comp in many_comp.iter() { - comp.write(xml, ctx.child()).await?; + comp.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await }, Self::SupportedCalendarData(many_mime) => { - let start = ctx.create_cal_element("supported-calendar-data"); + let start = xml.create_cal_element("supported-calendar-data"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for mime in many_mime.iter() { - mime.write(xml, ctx.child()).await?; + mime.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await }, Self::MaxResourceSize(bytes) => { - let start = ctx.create_cal_element("max-resource-size"); + let start = xml.create_cal_element("max-resource-size"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::MinDateTime(dt) => { - let start = ctx.create_cal_element("min-date-time"); + let start = xml.create_cal_element("min-date-time"); let end = start.to_end(); let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::MaxDateTime(dt) => { - let start = ctx.create_cal_element("max-date-time"); + let start = xml.create_cal_element("max-date-time"); let end = start.to_end(); let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::MaxInstances(count) => { - let start = ctx.create_cal_element("max-instances"); + let start = xml.create_cal_element("max-instances"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::MaxAttendeesPerInstance(count) => { - let start = ctx.create_cal_element("max-attendees-per-instance"); + let start = xml.create_cal_element("max-attendees-per-instance"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::SupportedCollationSet(many_collations) => { - let start = ctx.create_cal_element("supported-collation-set"); + let start = xml.create_cal_element("supported-collation-set"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for collation in many_collations.iter() { - collation.write(xml, ctx.child()).await?; + collation.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await }, - Self::CalendarData(inner) => inner.write(xml, ctx).await, + Self::CalendarData(inner) => inner.qwrite(xml).await, } } } // ---------------------- DAV::resourcetype ---------------------------------- -impl QuickWritable for ResourceType { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for ResourceType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Calendar => xml.write_event_async(Event::Empty(ctx.create_dav_element("calendar"))).await, + Self::Calendar => { + let empty_tag = xml.create_dav_element("calendar"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, } } } // --------------------------- DAV::error ------------------------------------ -impl QuickWritable for Violation { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut atom = async |c| xml.write_event_async(Event::Empty(ctx.create_cal_element(c))).await; +impl QWrite for Violation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_cal_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; match self { //@FIXME // DAV elements, should not be here but in RFC3744 on ACLs // (we do not use atom as this error is in the DAV namespace, not the caldav one) - Self::NeedPrivileges => xml.write_event_async(Event::Empty(ctx.create_dav_element("need-privileges"))).await, + Self::NeedPrivileges => { + let empty_tag = xml.create_dav_element("need-privileges"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, // Regular CalDAV errors Self::ResourceMustBeNull => atom("resource-must-be-null").await, @@ -280,12 +242,12 @@ impl QuickWritable for Violation { Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, Self::SupportedCalendarComponent => atom("supported-calendar-component").await, Self::NoUidConflict(href) => { - let start = ctx.create_cal_element("no-uid-conflict"); + let start = xml.create_cal_element("no-uid-conflict"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - href.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + href.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await }, Self::MaxResourceSize => atom("max-resource-size").await, Self::MinDateTime => atom("min-date-time").await, @@ -294,20 +256,20 @@ impl QuickWritable for Violation { Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, Self::ValidFilter => atom("valid-filter").await, Self::SupportedFilter { comp, prop, param } => { - let start = ctx.create_cal_element("supported-filter"); + let start = xml.create_cal_element("supported-filter"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for comp_item in comp.iter() { - comp_item.write(xml, ctx.child()).await?; + comp_item.qwrite(xml).await?; } for prop_item in prop.iter() { - prop_item.write(xml, ctx.child()).await?; + prop_item.qwrite(xml).await?; } for param_item in param.iter() { - param_item.write(xml, ctx.child()).await?; + param_item.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await }, Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await, } @@ -316,112 +278,115 @@ impl QuickWritable for Violation { // ---------------------------- Inner XML ------------------------------------ -impl QuickWritable for SupportedCollation { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let start = ctx.create_cal_element("supported-collation"); +impl QWrite for SupportedCollation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("supported-collation"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Collation { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for Collation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let col = match self { Self::AsciiCaseMap => "i;ascii-casemap", Self::Octet => "i;octet", Self::Unknown(v) => v.as_str(), }; - xml.write_event_async(Event::Text(BytesText::new(col))).await + xml.q.write_event_async(Event::Text(BytesText::new(col))).await } } -impl QuickWritable for CalendarDataPayload { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("calendar-data"); +impl QWrite for CalendarDataPayload { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("calendar-data"); if let Some(mime) = &self.mime { start.push_attribute(("content-type", mime.content_type.as_str())); start.push_attribute(("version", mime.version.as_str())); } let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for CalendarDataRequest { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("calendar-data"); +impl QWrite for CalendarDataRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("calendar-data"); if let Some(mime) = &self.mime { start.push_attribute(("content-type", mime.content_type.as_str())); start.push_attribute(("version", mime.version.as_str())); } let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; if let Some(comp) = &self.comp { - comp.write(xml, ctx.child()).await?; + comp.qwrite(xml).await?; } if let Some(recurrence) = &self.recurrence { - recurrence.write(xml, ctx.child()).await?; + recurrence.qwrite(xml).await?; } if let Some(freebusy) = &self.limit_freebusy_set { - freebusy.write(xml, ctx.child()).await?; + freebusy.qwrite(xml).await?; } - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for CalendarDataEmpty { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("calendar-data"); +impl QWrite for CalendarDataEmpty { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("calendar-data"); if let Some(mime) = &self.0 { empty.push_attribute(("content-type", mime.content_type.as_str())); empty.push_attribute(("version", mime.version.as_str())); } - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } -impl QuickWritable for Comp { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("comp"); +impl QWrite for Comp { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("comp"); start.push_attribute(("name", self.name.as_str())); match &self.additional_rules { - None => xml.write_event_async(Event::Empty(start)).await, + None => xml.q.write_event_async(Event::Empty(start)).await, Some(rules) => { let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - rules.prop_kind.write(xml, ctx.child()).await?; - rules.comp_kind.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.prop_kind.qwrite(xml).await?; + rules.comp_kind.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await }, } } } -impl QuickWritable for CompSupport { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("comp"); +impl QWrite for CompSupport { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("comp"); empty.push_attribute(("name", self.0.as_str())); - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } -impl QuickWritable for CompKind { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for CompKind { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::AllComp => xml.write_event_async(Event::Empty(ctx.create_cal_element("allcomp"))).await, + Self::AllComp => { + let empty_tag = xml.create_cal_element("allcomp"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, Self::Comp(many_comp) => { for comp in many_comp.iter() { // Required: recursion in an async fn requires boxing // rustc --explain E0733 - Box::pin(comp.write(xml, ctx.child())).await?; + Box::pin(comp.qwrite(xml)).await?; } Ok(()) } @@ -429,13 +394,16 @@ impl QuickWritable for CompKind { } } -impl QuickWritable for PropKind { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for PropKind { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::AllProp => xml.write_event_async(Event::Empty(ctx.create_cal_element("allprop"))).await, + Self::AllProp => { + let empty_tag = xml.create_cal_element("allprop"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, Self::Prop(many_prop) => { for prop in many_prop.iter() { - prop.write(xml, ctx.child()).await?; + prop.qwrite(xml).await?; } Ok(()) } @@ -443,163 +411,175 @@ impl QuickWritable for PropKind { } } -impl QuickWritable for CalProp { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("prop"); +impl QWrite for CalProp { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("prop"); empty.push_attribute(("name", self.name.0.as_str())); match self.novalue { None => (), Some(true) => empty.push_attribute(("novalue", "yes")), Some(false) => empty.push_attribute(("novalue", "no")), } - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } -impl QuickWritable for RecurrenceModifier { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for RecurrenceModifier { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Expand(exp) => exp.write(xml, ctx).await, - Self::LimitRecurrenceSet(lrs) => lrs.write(xml, ctx).await, + Self::Expand(exp) => exp.qwrite(xml).await, + Self::LimitRecurrenceSet(lrs) => lrs.qwrite(xml).await, } } } -impl QuickWritable for Expand { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("expand"); +impl QWrite for Expand { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("expand"); empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } -impl QuickWritable for LimitRecurrenceSet { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("limit-recurrence-set"); +impl QWrite for LimitRecurrenceSet { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("limit-recurrence-set"); empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } -impl QuickWritable for LimitFreebusySet { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("limit-freebusy-set"); +impl QWrite for LimitFreebusySet { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("limit-freebusy-set"); empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } -impl QuickWritable for CalendarSelector { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for CalendarSelector { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::AllProp => xml.write_event_async(Event::Empty(ctx.create_dav_element("allprop"))).await, - Self::PropName => xml.write_event_async(Event::Empty(ctx.create_dav_element("propname"))).await, - Self::Prop(prop) => prop.write(xml, ctx).await, + Self::AllProp => { + let empty_tag = xml.create_dav_element("allprop"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::PropName => { + let empty_tag = xml.create_dav_element("propname"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Prop(prop) => prop.qwrite(xml).await, } } } -impl QuickWritable for CompFilter { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("comp-filter"); +impl QWrite for CompFilter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("comp-filter"); start.push_attribute(("name", self.name.as_str())); match &self.additional_rules { - None => xml.write_event_async(Event::Empty(start)).await, + None => xml.q.write_event_async(Event::Empty(start)).await, Some(rules) => { let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - rules.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } } } -impl QuickWritable for CompFilterRules { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for CompFilterRules { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::IsNotDefined => xml.write_event_async(Event::Empty(ctx.create_dav_element("is-not-defined"))).await, - Self::Matches(cfm) => cfm.write(xml, ctx).await, + Self::IsNotDefined => { + let empty_tag = xml.create_dav_element("is-not-defined"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Matches(cfm) => cfm.qwrite(xml).await, } } } -impl QuickWritable for CompFilterMatch { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for CompFilterMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { if let Some(time_range) = &self.time_range { - time_range.write(xml, ctx.child()).await?; + time_range.qwrite(xml).await?; } for prop_item in self.prop_filter.iter() { - prop_item.write(xml, ctx.child()).await?; + prop_item.qwrite(xml).await?; } for comp_item in self.comp_filter.iter() { // Required: recursion in an async fn requires boxing // rustc --explain E0733 - Box::pin(comp_item.write(xml, ctx.child())).await?; + Box::pin(comp_item.qwrite(xml)).await?; } Ok(()) } } -impl QuickWritable for PropFilter { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("prop-filter"); +impl QWrite for PropFilter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("prop-filter"); start.push_attribute(("name", self.name.as_str())); match &self.additional_rules { - None => xml.write_event_async(Event::Empty(start)).await, + None => xml.q.write_event_async(Event::Empty(start.clone())).await, Some(rules) => { let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - rules.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } } } -impl QuickWritable for PropFilterRules { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for PropFilterRules { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::IsNotDefined => xml.write_event_async(Event::Empty(ctx.create_dav_element("is-not-defined"))).await, - Self::Match(prop_match) => prop_match.write(xml, ctx).await, + Self::IsNotDefined => { + let empty_tag = xml.create_dav_element("is-not-defined"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Match(prop_match) => prop_match.qwrite(xml).await, } } } -impl QuickWritable for PropFilterMatch { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for PropFilterMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { if let Some(time_range) = &self.time_range { - time_range.write(xml, ctx.child()).await?; + time_range.qwrite(xml).await?; } if let Some(time_or_text) = &self.time_or_text { - time_or_text.write(xml, ctx.child()).await?; + time_or_text.qwrite(xml).await?; } for param_item in self.param_filter.iter() { - param_item.write(xml, ctx.child()).await?; + param_item.qwrite(xml).await?; } Ok(()) } } -impl QuickWritable for TimeOrText { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for TimeOrText { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Time(time) => time.write(xml, ctx).await, - Self::Text(txt) => txt.write(xml, ctx).await, + Self::Time(time) => time.qwrite(xml).await, + Self::Text(txt) => txt.qwrite(xml).await, } } } -impl QuickWritable for TextMatch { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("text-match"); +impl QWrite for TextMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("text-match"); if let Some(collation) = &self.collation { start.push_attribute(("collation", collation.as_str())); } @@ -610,63 +590,66 @@ impl QuickWritable for TextMatch { } let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for ParamFilter { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("param-filter"); +impl QWrite for ParamFilter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("param-filter"); start.push_attribute(("name", self.name.as_str())); match &self.additional_rules { - None => xml.write_event_async(Event::Empty(start)).await, + None => xml.q.write_event_async(Event::Empty(start)).await, Some(rules) => { let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - rules.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } } } -impl QuickWritable for ParamFilterMatch { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { +impl QWrite for ParamFilterMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::IsNotDefined => xml.write_event_async(Event::Empty(ctx.create_dav_element("is-not-defined"))).await, - Self::Match(tm) => tm.write(xml, ctx).await, + Self::IsNotDefined => { + let empty_tag = xml.create_dav_element("is-not-defined"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Match(tm) => tm.qwrite(xml).await, } } } -impl QuickWritable for TimeZone { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("timezone"); +impl QWrite for TimeZone { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("timezone"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - xml.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for Filter { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut start = ctx.create_cal_element("filter"); +impl QWrite for Filter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("filter"); let end = start.to_end(); - xml.write_event_async(Event::Start(start.clone())).await?; - self.0.write(xml, ctx.child()).await?; - xml.write_event_async(Event::End(end)).await + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await } } -impl QuickWritable for TimeRange { - async fn write(&self, xml: &mut Writer, ctx: C) -> Result<(), QError> { - let mut empty = ctx.create_cal_element("time-range"); +impl QWrite for TimeRange { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("time-range"); match self { Self::OnlyStart(start) => empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())), Self::OnlyEnd(end) => empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())), @@ -675,11 +658,11 @@ impl QuickWritable for TimeRange { empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); } } - xml.write_event_async(Event::Empty(empty)).await + xml.q.write_event_async(Event::Empty(empty)).await } } - +/* #[cfg(test)] mod tests { use super::*; diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index c8177e9..585afe2 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -1,6 +1,5 @@ #![allow(dead_code)] -/* use chrono::{DateTime,Utc}; use super::types as dav; @@ -647,7 +646,7 @@ pub enum Violation { /// making use of the same UID property value in the DAV:href element; /// /// - NoUidConflict(Dav::Href), + NoUidConflict(dav::Href), /// (CALDAV:max-resource-size): The resource submitted in the PUT /// request, or targeted by a COPY or MOVE request, MUST have an octet @@ -1394,4 +1393,4 @@ impl Collation { Self::Unknown(c) => c.as_str(), } } -}*/ +} -- cgit v1.2.3 From 1aafd752ca00ddda5340aacfb0ed291b803845da Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 5 Mar 2024 18:15:03 +0100 Subject: Re-enable cal encoder tests --- src/dav/caldecoder.rs | 33 +++++++++++++++++++++++++++++++++ src/dav/calencoder.rs | 24 +++++++++++++----------- src/dav/caltypes.rs | 45 ++++++++++++++++++++++++++++++++++++++++++++- src/dav/mod.rs | 17 ++++++++++++++--- src/dav/realization.rs | 7 +++---- 5 files changed, 107 insertions(+), 19 deletions(-) create mode 100644 src/dav/caldecoder.rs diff --git a/src/dav/caldecoder.rs b/src/dav/caldecoder.rs new file mode 100644 index 0000000..75af4b7 --- /dev/null +++ b/src/dav/caldecoder.rs @@ -0,0 +1,33 @@ +use super::types as dav; +use super::caltypes::*; +use super::xml; +use super::error; + +// ---- ROOT ELEMENTS --- + +// ---- EXTENSIONS --- +impl xml::QRead for Violation { + async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + unreachable!(); + } +} + +impl xml::QRead for Property { + async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + unreachable!(); + } +} + +impl xml::QRead for PropertyRequest { + async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + unreachable!(); + } +} + +impl xml::QRead for ResourceType { + async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + unreachable!(); + } +} + +// ---- INNER XML ---- diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 01cecc3..d030aa1 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -662,19 +662,25 @@ impl QWrite for TimeRange { } } -/* #[cfg(test)] mod tests { use super::*; use crate::dav::types as dav; + use crate::dav::realization::Calendar; use tokio::io::AsyncWriteExt; use chrono::{Utc,TimeZone,DateTime}; - async fn serialize>(ctx: C, elem: &Q) -> String { + async fn serialize(elem: &impl QWrite) -> String { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); - let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - elem.write(&mut writer, ctx).await.expect("xml serialization"); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ + ("xmlns:D".into(), "DAV:".into()), + ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), + ]; + let mut writer = Writer { q, ns_to_apply }; + + elem.qwrite(&mut writer).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let got = std::str::from_utf8(buffer.as_slice()).unwrap(); @@ -684,8 +690,7 @@ mod tests { #[tokio::test] async fn basic_violation() { let got = serialize( - CalExtension { root: true }, - &dav::Error(vec![ + &dav::Error::(vec![ dav::Violation::Extension(Violation::ResourceMustBeNull), ]) ).await; @@ -700,8 +705,7 @@ mod tests { #[tokio::test] async fn rfc_calendar_query1_req() { let got = serialize( - CalExtension { root: true }, - &CalendarQuery { + &CalendarQuery:: { selector: Some(CalendarSelector::Prop(dav::PropName(vec![ dav::PropertyRequest::GetEtag, dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { @@ -806,8 +810,7 @@ mod tests { #[tokio::test] async fn rfc_calendar_query1_res() { let got = serialize( - CalExtension { root: true }, - &dav::Multistatus { + &dav::Multistatus:: { responses: vec![ dav::Response { href: dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), @@ -877,4 +880,3 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); } } -*/ diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 585afe2..68e7baf 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -26,6 +26,7 @@ use super::types as dav; /// instruction in Section 12.13.2 of [RFC2518]. /// /// +#[derive(Debug, PartialEq)] pub struct MkCalendar(pub dav::Set); @@ -42,6 +43,7 @@ pub struct MkCalendar(pub dav::Set); /// Definition: /// /// +#[derive(Debug, PartialEq)] pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- @@ -59,6 +61,7 @@ pub struct MkCalendarResponse(pub Vec>); /// +#[derive(Debug, PartialEq)] pub struct CalendarQuery { pub selector: Option>, pub filter: Filter, @@ -79,6 +82,7 @@ pub struct CalendarQuery { /// +#[derive(Debug, PartialEq)] pub struct CalendarMultiget { pub selector: Option>, pub href: Vec, @@ -95,14 +99,17 @@ pub struct CalendarMultiget { /// /// Definition: /// +#[derive(Debug, PartialEq)] pub struct FreeBusyQuery(pub TimeRange); // ----- Hooks ----- +#[derive(Debug, PartialEq)] pub enum ResourceType { Calendar, } /// Check the matching Property object for documentation +#[derive(Debug, PartialEq)] pub enum PropertyRequest { CalendarDescription, CalendarTimezone, @@ -116,6 +123,8 @@ pub enum PropertyRequest { SupportedCollationSet, CalendarData(CalendarDataRequest), } + +#[derive(Debug, PartialEq)] pub enum Property { /// Name: calendar-description /// @@ -591,6 +600,7 @@ pub enum Property { CalendarData(CalendarDataPayload), } +#[derive(Debug, PartialEq)] pub enum Violation { /// (DAV:resource-must-be-null): A resource MUST NOT exist at the /// Request-URI; @@ -761,6 +771,7 @@ pub enum Violation { /// If the client chooses a collation not supported by the server, the /// server MUST respond with a CALDAV:supported-collation precondition /// error response. +#[derive(Debug, PartialEq)] pub struct SupportedCollation(pub Collation); /// @@ -769,6 +780,7 @@ pub struct SupportedCollation(pub Collation); /// when nested in the DAV:prop XML element in a calendaring /// REPORT response to specify the content of a returned /// calendar object resource. +#[derive(Debug, PartialEq)] pub struct CalendarDataPayload { pub mime: Option, pub payload: String, @@ -781,6 +793,7 @@ pub struct CalendarDataPayload { /// when nested in the DAV:prop XML element in a calendaring /// REPORT request to specify which parts of calendar object /// resources should be returned in the response; +#[derive(Debug, PartialEq)] pub struct CalendarDataRequest { pub mime: Option, pub comp: Option, @@ -795,6 +808,7 @@ pub struct CalendarDataRequest { /// when nested in the CALDAV:supported-calendar-data property /// to specify a supported media type for calendar object /// resources; +#[derive(Debug, PartialEq)] pub struct CalendarDataEmpty(pub Option); /// ); /// version value: a version string /// attributes can be used on all three variants of the /// CALDAV:calendar-data XML element. +#[derive(Debug, PartialEq)] pub struct CalendarDataSupport { pub content_type: String, pub version: String, @@ -828,10 +843,13 @@ pub struct CalendarDataSupport { /// However, the CALDAV:prop and CALDAV:allprop elements are defined /// in the "urn:ietf:params:xml:ns:caldav" namespace instead of the /// "DAV:" namespace. +#[derive(Debug, PartialEq)] pub struct Comp { pub name: Component, pub additional_rules: Option, } + +#[derive(Debug, PartialEq)] pub struct CompInner { pub prop_kind: PropKind, pub comp_kind: CompKind, @@ -850,6 +868,7 @@ pub struct CompInner { /// /// /// +#[derive(Debug, PartialEq)] pub struct CompSupport(pub Component); /// Name: allcomp @@ -865,6 +884,7 @@ pub struct CompSupport(pub Component); /// Definition: /// /// +#[derive(Debug, PartialEq)] pub enum CompKind { AllComp, Comp(Vec), @@ -888,6 +908,7 @@ pub enum CompKind { /// allprop element defined in [RFC2518]. However, the CALDAV:allprop /// element is defined in the "urn:ietf:params:xml:ns:caldav" /// namespace instead of the "DAV:" namespace. +#[derive(Debug, PartialEq)] pub enum PropKind { AllProp, Prop(Vec), @@ -917,11 +938,13 @@ pub enum PropKind { /// element defined in [RFC2518]. However, the CALDAV:prop element is /// defined in the "urn:ietf:params:xml:ns:caldav" namespace instead /// of the "DAV:" namespace. +#[derive(Debug, PartialEq)] pub struct CalProp { pub name: ComponentProperty, pub novalue: Option, } +#[derive(Debug, PartialEq)] pub enum RecurrenceModifier { Expand(Expand), LimitRecurrenceSet(LimitRecurrenceSet), @@ -967,6 +990,7 @@ pub enum RecurrenceModifier { /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] pub struct Expand(pub DateTime, pub DateTime); /// CALDAV:limit-recurrence-set XML Element @@ -1014,6 +1038,7 @@ pub struct Expand(pub DateTime, pub DateTime); /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); /// Name: limit-freebusy-set @@ -1044,9 +1069,11 @@ pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] pub struct LimitFreebusySet(pub DateTime, pub DateTime); /// Used by CalendarQuery & CalendarMultiget +#[derive(Debug, PartialEq)] pub enum CalendarSelector { AllProp, PropName, @@ -1101,17 +1128,20 @@ pub enum CalendarSelector { /// /// name value: a calendar object or calendar component /// type (e.g., VEVENT) +#[derive(Debug, PartialEq)] pub struct CompFilter { pub name: Component, // Option 1 = None, Option 2, 3, 4 = Some pub additional_rules: Option, } +#[derive(Debug, PartialEq)] pub enum CompFilterRules { // Option 2 IsNotDefined, // Options 3 & 4 Matches(CompFilterMatch), } +#[derive(Debug, PartialEq)] pub struct CompFilterMatch { pub time_range: Option, pub prop_filter: Vec, @@ -1162,22 +1192,26 @@ pub struct CompFilterMatch { /// /// /// name value: a calendar property name (e.g., ATTENDEE) +#[derive(Debug, PartialEq)] pub struct PropFilter { pub name: Component, // None = Option 1, Some() = Option 2, 3 & 4 pub additional_rules: Option, } +#[derive(Debug, PartialEq)] pub enum PropFilterRules { // Option 2 IsNotDefined, // Options 3 & 4 Match(PropFilterMatch), } +#[derive(Debug, PartialEq)] pub struct PropFilterMatch { pub time_range: Option, pub time_or_text: Option, pub param_filter: Vec, } +#[derive(Debug, PartialEq)] pub enum TimeOrText { Time(TimeRange), Text(TextMatch), @@ -1211,6 +1245,7 @@ pub enum TimeOrText { /// PCDATA value: string /// +#[derive(Debug, PartialEq)] pub struct TextMatch { pub collation: Option, pub negate_condition: Option, @@ -1246,10 +1281,12 @@ pub struct TextMatch { /// /// /// name value: a property parameter name (e.g., PARTSTAT) +#[derive(Debug, PartialEq)] pub struct ParamFilter { pub name: PropertyParameter, pub additional_rules: Option, } +#[derive(Debug, PartialEq)] pub enum ParamFilterMatch { IsNotDefined, Match(TextMatch), @@ -1305,6 +1342,7 @@ pub enum ParamFilterMatch { /// /// /// PCDATA value: an iCalendar object with exactly one VTIMEZONE +#[derive(Debug, PartialEq)] pub struct TimeZone(pub String); /// Name: filter @@ -1320,6 +1358,7 @@ pub struct TimeZone(pub String); /// /// Definition: /// +#[derive(Debug, PartialEq)] pub struct Filter(pub CompFilter); /// Name: time-range @@ -1331,6 +1370,7 @@ pub struct Filter(pub CompFilter); /// end CDATA #IMPLIED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] pub enum TimeRange { OnlyStart(DateTime), OnlyEnd(DateTime), @@ -1340,6 +1380,7 @@ pub enum TimeRange { // ----------------------- ENUM ATTRIBUTES --------------------- /// Known components +#[derive(Debug, PartialEq)] pub enum Component { VCalendar, VJournal, @@ -1368,9 +1409,11 @@ impl Component { /// name="VERSION", name="SUMMARY", etc. /// Can be set on different objects: VCalendar, VEvent, etc. /// Might be replaced by an enum later +#[derive(Debug, PartialEq)] pub struct ComponentProperty(pub String); /// like PARSTAT +#[derive(Debug, PartialEq)] pub struct PropertyParameter(pub String); impl PropertyParameter { pub fn as_str<'a>(&'a self) -> &'a str { @@ -1378,7 +1421,7 @@ impl PropertyParameter { } } -#[derive(Default)] +#[derive(Default,Debug,PartialEq)] pub enum Collation { #[default] AsciiCaseMap, diff --git a/src/dav/mod.rs b/src/dav/mod.rs index abc46e7..bff95e7 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,14 +1,25 @@ +// utils mod error; mod xml; + +// webdav mod types; +mod encoder; +mod decoder; + +// calendar mod caltypes; +mod calencoder; +mod caldecoder; + +// wip mod acltypes; mod versioningtypes; -mod encoder; -mod calencoder; -mod decoder; + +// final type mod realization; + use std::net::SocketAddr; use anyhow::{anyhow, Result}; diff --git a/src/dav/realization.rs b/src/dav/realization.rs index 22c9cfc..a02de94 100644 --- a/src/dav/realization.rs +++ b/src/dav/realization.rs @@ -28,14 +28,13 @@ impl dav::Extension for Core { type ResourceType = Disabled; } -/* // WebDAV with the base Calendar implementation (RFC4791) -pub struct CalendarMin {} -impl dav::Extension for CalendarMin +pub struct Calendar {} +impl dav::Extension for Calendar { type Error = cal::Violation; type Property = cal::Property; type PropertyRequest = cal::PropertyRequest; type ResourceType = cal::ResourceType; } -*/ + -- cgit v1.2.3 From 2dd6deae545690cdcc00ca1123d1818598497fed Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 5 Mar 2024 19:06:04 +0100 Subject: Re-enable + enhance DAV decode tests --- src/dav/caldecoder.rs | 8 +- src/dav/decoder.rs | 200 +++++++++++-------------------------------------- src/dav/error.rs | 1 + src/dav/realization.rs | 6 +- src/dav/xml.rs | 23 +++--- 5 files changed, 68 insertions(+), 170 deletions(-) diff --git a/src/dav/caldecoder.rs b/src/dav/caldecoder.rs index 75af4b7..b45d649 100644 --- a/src/dav/caldecoder.rs +++ b/src/dav/caldecoder.rs @@ -7,25 +7,25 @@ use super::error; // ---- EXTENSIONS --- impl xml::QRead for Violation { - async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { unreachable!(); } } impl xml::QRead for Property { - async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { unreachable!(); } } impl xml::QRead for PropertyRequest { - async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { unreachable!(); } } impl xml::QRead for ResourceType { - async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { unreachable!(); } } diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 1756464..a7fdca5 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -8,135 +8,25 @@ use quick_xml::reader::NsReader; use tokio::io::AsyncBufRead; use super::types::*; -use super::error::*; +use super::error::ParsingError; +use super::xml::{QRead, Reader, IRead, DAV_URN, CAL_URN}; -/* -// --- Traits ---- - -trait Reader = AsyncBufRead+Unpin+'static; - -trait Decodable: Extension { - async fn decode_propreq(xml: &mut PeekRead) -> Result, ParsingError>; -} -impl Decodable for NoExtension { - async fn decode_propreq(xml: &mut PeekRead) -> Result, ParsingError> { - Ok(None) - } -} - -pub trait QReadable: Sized { - async fn read(xml: &mut PeekRead) -> Result; -} - -// --- Peek read with namespaces - -const DAV_URN: &[u8] = b"DAV:"; -const CALDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; -const CARDDAV_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; -//const XML_URN: &[u8] = b"xml"; -pub struct PeekRead { - evt: Event<'static>, - rdr: NsReader, - buf: Vec, -} -impl PeekRead { - async fn new(mut rdr: NsReader) -> Result { - let mut buf: Vec = vec![]; - let evt = rdr.read_event_into_async(&mut buf).await?.into_owned(); - buf.clear(); - Ok(Self { evt, rdr, buf }) - } - - fn peek(&self) -> &Event<'static> { - &self.evt - } - - /// skip tag. Can't skip end, can't skip eof. - async fn skip(&mut self) -> Result, ParsingError> { - match &self.evt { - Event::Start(b) => { - let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; - self.next().await - }, - Event::End(_) => Err(ParsingError::WrongToken), - Event::Eof => Err(ParsingError::Eof), - _ => self.next().await, - } - } - - /// read one more tag - async fn next(&mut self) -> Result, ParsingError> { - let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); - self.buf.clear(); - let old_evt = std::mem::replace(&mut self.evt, evt); - Ok(old_evt) - } - - - /// check if this is the desired tag - fn is_tag(&self, ns: &[u8], key: &str) -> bool { - let qname = match self.peek() { - Event::Start(bs) | Event::Empty(bs) => bs.name(), - Event::End(be) => be.name(), - _ => return false, - }; - - let (extr_ns, local) = self.rdr.resolve_element(qname); - - if local.into_inner() != key.as_bytes() { - return false - } - - match extr_ns { - ResolveResult::Bound(v) => v.into_inner() == ns, - _ => false, - } - } - - /// find start tag - async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - loop { - match self.peek() { - Event::Start(b) if self.is_tag(ns, key) => break, - _ => { self.skip().await?; }, - } - } - self.next().await - } - - // find stop tag - async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - loop { - match self.peek() { - Event::End(b) if self.is_tag(ns, key) => break, - _ => { self.skip().await?; }, - } - } - self.next().await - } -} - -// ----- Decode ---- - -impl QReadable for PropFind { - async fn read(xml: &mut PeekRead) -> Result, ParsingError> { +impl QRead> for PropFind { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { // Find propfind xml.tag_start(DAV_URN, "propfind").await?; - // Find any tag let propfind: PropFind = loop { match xml.peek() { Event::Start(_) if xml.is_tag(DAV_URN, "allprop") => { xml.tag_start(DAV_URN, "allprop").await?; - let r = PropFind::AllProp(Some(Include::read(xml).await?)); + let r = PropFind::AllProp(Include::qread(xml).await?); xml.tag_stop(DAV_URN, "allprop").await?; break r }, Event::Start(_) if xml.is_tag(DAV_URN, "prop") => { - xml.tag_start(DAV_URN, "prop").await?; - let r = PropFind::Prop(PropName::read(xml).await?); - xml.tag_stop(DAV_URN, "prop").await?; - break r + let propname = PropName::qread(xml).await?.ok_or(ParsingError::MissingChild)?; + break PropFind::Prop(propname); }, Event::Empty(_) if xml.is_tag(DAV_URN, "allprop") => { xml.next().await?; @@ -153,49 +43,52 @@ impl QReadable for PropFind { // Close tag xml.tag_stop(DAV_URN, "propfind").await?; - Ok(propfind) + Ok(Some(propfind)) } } -impl QReadable for Include { - async fn read(xml: &mut PeekRead) -> Result, ParsingError> { +impl QRead> for Include { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { xml.tag_start(DAV_URN, "include").await?; let mut acc: Vec> = Vec::new(); loop { match xml.peek() { - Event::Start(_) | Event::Empty(_) => acc.push(PropertyRequest::read(xml).await?), + Event::Start(_) | Event::Empty(_) => { + PropertyRequest::qread(xml).await?.map(|v| acc.push(v)); + }, Event::End(_) if xml.is_tag(DAV_URN, "include") => break, _ => { xml.skip().await?; }, } } xml.tag_stop(DAV_URN, "include").await?; - Ok(Include(acc)) + Ok(Some(Include(acc))) } } -impl QReadable for PropName { - async fn read(xml: &mut PeekRead) -> Result, ParsingError> { +impl QRead> for PropName { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { xml.tag_start(DAV_URN, "prop").await?; let mut acc: Vec> = Vec::new(); loop { match xml.peek() { - Event::Start(_) | Event::Empty(_) => acc.push(PropertyRequest::read(xml).await?), + Event::Start(_) | Event::Empty(_) => { + PropertyRequest::qread(xml).await?.map(|v| acc.push(v)); + }, Event::End(_) if xml.is_tag(DAV_URN, "prop") => break, _ => { xml.skip().await?; }, } } xml.tag_stop(DAV_URN, "prop").await?; - Ok(PropName(acc)) + Ok(Some(PropName(acc))) } } -impl QReadable for PropertyRequest { - async fn read(xml: &mut PeekRead) -> Result, ParsingError> { +impl QRead> for PropertyRequest { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { loop { - let (need_close, bs) = match xml.peek() { - Event::Start(b) => (true, b), - Event::Empty(b) => (false, b), + let bs = match xml.peek() { + Event::Start(b) | Event::Empty(b) => b, _ => { xml.skip().await?; continue @@ -212,6 +105,7 @@ impl QReadable for PropertyRequest { b"displayname" => Some(PropertyRequest::DisplayName), b"getcontentlanguage" => Some(PropertyRequest::GetContentLanguage), b"getcontentlength" => Some(PropertyRequest::GetContentLength), + b"getcontenttype" => Some(PropertyRequest::GetContentType), b"getetag" => Some(PropertyRequest::GetEtag), b"getlastmodified" => Some(PropertyRequest::GetLastModified), b"lockdiscovery" => Some(PropertyRequest::LockDiscovery), @@ -223,18 +117,13 @@ impl QReadable for PropertyRequest { // Option 2: an extension property if maybe_res.is_none() { - maybe_res = E::decode_propreq(xml).await?.map(PropertyRequest::Extension); + maybe_res = E::PropertyRequest::qread(xml).await?.map(PropertyRequest::Extension); } - // In any cases, we must close the opened tag - if need_close { - xml.skip().await?; - } + // Close the current tag + xml.skip().await?; - // Return if something is found - otherwise loop - if let Some(res) = maybe_res { - return Ok(res) - } + return Ok(maybe_res) } } } @@ -242,6 +131,7 @@ impl QReadable for PropertyRequest { #[cfg(test)] mod tests { use super::*; + use crate::dav::realization::Core; #[tokio::test] async fn basic_propfind_propname() { @@ -253,11 +143,12 @@ mod tests { "#; - let mut rdr = PeekRead::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = PropFind::::read(&mut rdr).await.unwrap(); - assert!(matches!(got, PropFind::PropName)); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = PropFind::::qread(&mut rdr).await.unwrap().unwrap(); + + assert_eq!(got, PropFind::::PropName); } -/* + #[tokio::test] async fn basic_propfind_prop() { let src = r#" @@ -265,19 +156,20 @@ mod tests { - - - - - - - + + + + + + + "#; - let mut rdr = PeekRead::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = PropFind::::read(&mut rdr).await.unwrap(); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = PropFind::::qread(&mut rdr).await.unwrap().unwrap(); + assert_eq!(got, PropFind::Prop(PropName(vec![ PropertyRequest::DisplayName, PropertyRequest::GetContentLength, @@ -288,6 +180,4 @@ mod tests { PropertyRequest::SupportedLock, ]))); } - */ } -*/ diff --git a/src/dav/error.rs b/src/dav/error.rs index 1db2895..5bd8ed3 100644 --- a/src/dav/error.rs +++ b/src/dav/error.rs @@ -2,6 +2,7 @@ use quick_xml::events::attributes::AttrError; #[derive(Debug)] pub enum ParsingError { + MissingChild, NamespacePrefixAlreadyUsed, WrongToken, TagNotFound, diff --git a/src/dav/realization.rs b/src/dav/realization.rs index a02de94..1898173 100644 --- a/src/dav/realization.rs +++ b/src/dav/realization.rs @@ -6,8 +6,8 @@ use super::error; #[derive(Debug, PartialEq)] pub struct Disabled(()); impl xml::QRead for Disabled { - async fn qread(&self, xml: &mut xml::Reader) -> Result, error::ParsingError> { - unreachable!(); + async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { + Ok(None) } } impl xml::QWrite for Disabled { @@ -20,6 +20,7 @@ impl xml::QWrite for Disabled { /// /// Any extension is kooh is disabled through an object we can't build /// due to a private inner element. +#[derive(Debug, PartialEq)] pub struct Core {} impl dav::Extension for Core { type Error = Disabled; @@ -29,6 +30,7 @@ impl dav::Extension for Core { } // WebDAV with the base Calendar implementation (RFC4791) +#[derive(Debug, PartialEq)] pub struct Calendar {} impl dav::Extension for Calendar { diff --git a/src/dav/xml.rs b/src/dav/xml.rs index 495c9a5..5ebda02 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -5,6 +5,11 @@ use quick_xml::reader::NsReader; use super::error::ParsingError; +// Constants +pub const DAV_URN: &[u8] = b"DAV:"; +pub const CAL_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; +pub const CARD_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; + // Async traits pub trait IWrite = AsyncWrite + Unpin; pub trait IRead = AsyncBufRead + Unpin + 'static; @@ -14,7 +19,7 @@ pub trait QWrite { async fn qwrite(&self, xml: &mut Writer) -> Result<(), quick_xml::Error>; } pub trait QRead { - async fn qread(&self, xml: &mut Reader) -> Result, ParsingError>; + async fn qread(xml: &mut Reader) -> Result, ParsingError>; } /// Transform a Rust object into an XML stream of characters @@ -42,24 +47,24 @@ impl Writer { /// Transform an XML stream of characters into a Rust object pub struct Reader { + pub rdr: NsReader, evt: Event<'static>, - rdr: NsReader, buf: Vec, } impl Reader { - async fn new(mut rdr: NsReader) -> Result { + pub async fn new(mut rdr: NsReader) -> Result { let mut buf: Vec = vec![]; let evt = rdr.read_event_into_async(&mut buf).await?.into_owned(); buf.clear(); Ok(Self { evt, rdr, buf }) } - fn peek(&self) -> &Event<'static> { + pub fn peek(&self) -> &Event<'static> { &self.evt } /// skip tag. Can't skip end, can't skip eof. - async fn skip(&mut self) -> Result, ParsingError> { + pub async fn skip(&mut self) -> Result, ParsingError> { match &self.evt { Event::Start(b) => { let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; @@ -72,7 +77,7 @@ impl Reader { } /// read one more tag - async fn next(&mut self) -> Result, ParsingError> { + pub async fn next(&mut self) -> Result, ParsingError> { let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); self.buf.clear(); let old_evt = std::mem::replace(&mut self.evt, evt); @@ -81,7 +86,7 @@ impl Reader { /// check if this is the desired tag - fn is_tag(&self, ns: &[u8], key: &str) -> bool { + pub fn is_tag(&self, ns: &[u8], key: &str) -> bool { let qname = match self.peek() { Event::Start(bs) | Event::Empty(bs) => bs.name(), Event::End(be) => be.name(), @@ -101,7 +106,7 @@ impl Reader { } /// find start tag - async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + pub async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { loop { match self.peek() { Event::Start(b) if self.is_tag(ns, key) => break, @@ -112,7 +117,7 @@ impl Reader { } // find stop tag - async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + pub async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { loop { match self.peek() { Event::End(b) if self.is_tag(ns, key) => break, -- cgit v1.2.3 From ba32a0d4a6810e4bf9d18f14086597c20212bbcb Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 6 Mar 2024 10:12:02 +0100 Subject: decode errors --- src/dav/decoder.rs | 168 +++++++++++++++++++++++++++++++++++++++++++++++++++-- src/dav/error.rs | 6 ++ src/dav/xml.rs | 3 + 3 files changed, 173 insertions(+), 4 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index a7fdca5..7de5d63 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -11,6 +11,7 @@ use super::types::*; use super::error::ParsingError; use super::xml::{QRead, Reader, IRead, DAV_URN, CAL_URN}; +// ---- ROOT ---- impl QRead> for PropFind { async fn qread(xml: &mut Reader) -> Result, ParsingError> { // Find propfind @@ -47,6 +48,117 @@ impl QRead> for PropFind { } } +impl QRead> for Error { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "error").await?; + let mut violations = Vec::new(); + loop { + match xml.peek() { + Event::Start(_) | Event::Empty(_) => { + Violation::qread(xml).await?.map(|v| violations.push(v)); + }, + Event::End(_) if xml.is_tag(DAV_URN, "error") => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "error").await?; + Ok(Some(Error(violations))) + } +} + +// ---- INNER XML +impl QRead> for Violation { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + loop { + let bs = match xml.peek() { + Event::Start(b) | Event::Empty(b) => b, + _ => { + xml.skip().await?; + continue + }, + }; + + let mut maybe_res = None; + + // Option 1: a pure DAV property + let (ns, loc) = xml.rdr.resolve_element(bs.name()); + if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { + maybe_res = match loc.into_inner() { + b"lock-token-matches-request-uri" => { + xml.next().await?; + Some(Violation::LockTokenMatchesRequestUri) + }, + b"lock-token-submitted" => { + // start tag + xml.next().await?; + + let mut links = Vec::new(); + loop { + // If we find a Href + if let Some(href) = Href::qread(xml).await? { + links.push(href); + continue + } + + // Otherwise + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "lock-token-submitted").await?; + Some(Violation::LockTokenSubmitted(links)) + }, + b"no-conflicting-lock" => { + // start tag + xml.next().await?; + + let mut links = Vec::new(); + loop { + // If we find a Href + if let Some(href) = Href::qread(xml).await? { + links.push(href); + continue + } + + // Otherwise + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "no-conflicting-lock").await?; + Some(Violation::NoConflictingLock(links)) + }, + b"no-external-entities" => { + xml.next().await?; + Some(Violation::NoExternalEntities) + }, + b"preserved-live-properties" => { + xml.next().await?; + Some(Violation::PreservedLiveProperties) + }, + b"propfind-finite-depth" => { + xml.next().await?; + Some(Violation::PropfindFiniteDepth) + }, + b"cannot-modify-protected-property" => { + xml.next().await?; + Some(Violation::CannotModifyProtectedProperty) + }, + _ => None, + }; + } + + // Option 2: an extension property, delegating + if maybe_res.is_none() { + maybe_res = E::Error::qread(xml).await?.map(Violation::Extension); + } + + return Ok(maybe_res) + } + } +} impl QRead> for Include { async fn qread(xml: &mut Reader) -> Result, ParsingError> { @@ -113,21 +225,50 @@ impl QRead> for PropertyRequest { b"supportedlock" => Some(PropertyRequest::SupportedLock), _ => None, }; + // Close the current tag if we read something + if maybe_res.is_some() { + xml.skip().await?; + } } - // Option 2: an extension property + // Option 2: an extension property, delegating if maybe_res.is_none() { maybe_res = E::PropertyRequest::qread(xml).await?.map(PropertyRequest::Extension); } - // Close the current tag - xml.skip().await?; - return Ok(maybe_res) } } } +impl QRead for Href { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "href") => xml.next().await?, + _ => return Ok(None), + }; + + let mut url = String::new(); + loop { + match xml.peek() { + Event::End(_) => break, + Event::Start(_) | Event::Empty(_) => return Err(ParsingError::WrongToken), + Event::CData(unescaped) => { + url.push_str(std::str::from_utf8(unescaped.as_ref())?); + xml.next().await? + }, + Event::Text(escaped) => { + url.push_str(escaped.unescape()?.as_ref()); + xml.next().await? + } + _ => xml.skip().await?, + }; + } + xml.tag_stop(DAV_URN, "href").await?; + Ok(Some(Href(url))) + } +} + #[cfg(test)] mod tests { use super::*; @@ -180,4 +321,23 @@ mod tests { PropertyRequest::SupportedLock, ]))); } + + #[tokio::test] + async fn rfc_lock_error() { + let src = r#" + + + /locked/ + + "#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = Error::::qread(&mut rdr).await.unwrap().unwrap(); + + assert_eq!(got, Error(vec![ + Violation::LockTokenSubmitted(vec![ + Href("/locked/".into()) + ]) + ])); + } } diff --git a/src/dav/error.rs b/src/dav/error.rs index 5bd8ed3..b04d2ac 100644 --- a/src/dav/error.rs +++ b/src/dav/error.rs @@ -6,6 +6,7 @@ pub enum ParsingError { NamespacePrefixAlreadyUsed, WrongToken, TagNotFound, + Utf8Error(std::str::Utf8Error), QuickXml(quick_xml::Error), Eof } @@ -19,3 +20,8 @@ impl From for ParsingError { Self::QuickXml(value) } } +impl From for ParsingError { + fn from(value: std::str::Utf8Error) -> Self { + Self::Utf8Error(value) + } +} diff --git a/src/dav/xml.rs b/src/dav/xml.rs index 5ebda02..1cce86a 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -65,6 +65,7 @@ impl Reader { /// skip tag. Can't skip end, can't skip eof. pub async fn skip(&mut self) -> Result, ParsingError> { + println!("skip on {:?}", &self.evt); match &self.evt { Event::Start(b) => { let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; @@ -107,6 +108,7 @@ impl Reader { /// find start tag pub async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + println!("search start tag {}", key); loop { match self.peek() { Event::Start(b) if self.is_tag(ns, key) => break, @@ -118,6 +120,7 @@ impl Reader { // find stop tag pub async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + println!("search stop tag {}", key); loop { match self.peek() { Event::End(b) if self.is_tag(ns, key) => break, -- cgit v1.2.3 From 05c952f0207fa40d5dc315933bd8fd34dd0cdd1c Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 6 Mar 2024 12:42:27 +0100 Subject: WIP lock/propertyupdate implementation --- src/dav/decoder.rs | 409 ++++++++++++++++++++++++++++++++++++++++++++++------- src/dav/encoder.rs | 27 ++-- src/dav/error.rs | 13 ++ src/dav/xml.rs | 18 +++ 4 files changed, 399 insertions(+), 68 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 7de5d63..43e5c49 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -12,6 +12,8 @@ use super::error::ParsingError; use super::xml::{QRead, Reader, IRead, DAV_URN, CAL_URN}; // ---- ROOT ---- + +/// Propfind request impl QRead> for PropFind { async fn qread(xml: &mut Reader) -> Result, ParsingError> { // Find propfind @@ -48,6 +50,37 @@ impl QRead> for PropFind { } } +/// PROPPATCH request +impl QRead> for PropertyUpdate { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "propertyupdate").await?; + let mut collected_items = Vec::new(); + loop { + // Try to collect a property item + if let Some(item) = PropertyUpdateItem::qread(xml).await? { + collected_items.push(item); + continue + } + + // Skip or stop otherwise + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + + xml.tag_stop(DAV_URN, "propertyupdate").await?; + Ok(Some(PropertyUpdate(collected_items))) + } +} + +//@TODO Multistatus + +//@TODO LockInfo + +//@TODO PropValue + +/// Error response impl QRead> for Error { async fn qread(xml: &mut Reader) -> Result, ParsingError> { xml.tag_start(DAV_URN, "error").await?; @@ -66,7 +99,56 @@ impl QRead> for Error { } } + + // ---- INNER XML +impl QRead> for PropertyUpdateItem { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if let Some(rm) = Remove::qread(xml).await? { + return Ok(Some(PropertyUpdateItem::Remove(rm))) + } + Ok(Set::qread(xml).await?.map(PropertyUpdateItem::Set)) + } +} + +impl QRead> for Remove { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "remove") => xml.next().await?, + _ => return Ok(None), + }; + + let propname = loop { + match xml.peek() { + Event::Start(b) | Event::Empty(b) if xml.is_tag(DAV_URN, "prop") => break PropName::qread(xml).await?, + _ => xml.skip().await?, + }; + }; + + xml.tag_stop(DAV_URN, "remove").await?; + Ok(propname.map(Remove)) + } +} + +impl QRead> for Set { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "set") => xml.next().await?, + _ => return Ok(None), + }; + let propvalue = loop { + match xml.peek() { + Event::Start(b) | Event::Empty(b) if xml.is_tag(DAV_URN, "prop") => break PropValue::qread(xml).await?, + _ => xml.skip().await?, + }; + }; + + + xml.tag_stop(DAV_URN, "set").await?; + Ok(propvalue.map(Set)) + } +} + impl QRead> for Violation { async fn qread(xml: &mut Reader) -> Result, ParsingError> { loop { @@ -163,13 +245,17 @@ impl QRead> for Violation { impl QRead> for Include { async fn qread(xml: &mut Reader) -> Result, ParsingError> { xml.tag_start(DAV_URN, "include").await?; - let mut acc: Vec> = Vec::new(); + let mut acc = Vec::new(); loop { + // Found a property + if let Some(prop) = PropertyRequest::qread(xml).await? { + acc.push(prop); + continue; + } + + // Otherwise skip or escape match xml.peek() { - Event::Start(_) | Event::Empty(_) => { - PropertyRequest::qread(xml).await?.map(|v| acc.push(v)); - }, - Event::End(_) if xml.is_tag(DAV_URN, "include") => break, + Event::End(_) => break, _ => { xml.skip().await?; }, } } @@ -181,13 +267,17 @@ impl QRead> for Include { impl QRead> for PropName { async fn qread(xml: &mut Reader) -> Result, ParsingError> { xml.tag_start(DAV_URN, "prop").await?; - let mut acc: Vec> = Vec::new(); + let mut acc = Vec::new(); loop { + // Found a property + if let Some(prop) = PropertyRequest::qread(xml).await? { + acc.push(prop); + continue; + } + + // Otherwise skip or escape match xml.peek() { - Event::Start(_) | Event::Empty(_) => { - PropertyRequest::qread(xml).await?.map(|v| acc.push(v)); - }, - Event::End(_) if xml.is_tag(DAV_URN, "prop") => break, + Event::End(_) => break, _ => { xml.skip().await?; }, } } @@ -198,72 +288,281 @@ impl QRead> for PropName { impl QRead> for PropertyRequest { async fn qread(xml: &mut Reader) -> Result, ParsingError> { - loop { - let bs = match xml.peek() { - Event::Start(b) | Event::Empty(b) => b, - _ => { - xml.skip().await?; - continue - }, + let bs = match xml.peek() { + Event::Start(b) | Event::Empty(b) => b, + _ => return Ok(None), + }; + + let mut maybe_res = None; + + // Option 1: a pure core DAV property + let (ns, loc) = xml.rdr.resolve_element(bs.name()); + if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { + maybe_res = match loc.into_inner() { + b"creationdate" => Some(PropertyRequest::CreationDate), + b"displayname" => Some(PropertyRequest::DisplayName), + b"getcontentlanguage" => Some(PropertyRequest::GetContentLanguage), + b"getcontentlength" => Some(PropertyRequest::GetContentLength), + b"getcontenttype" => Some(PropertyRequest::GetContentType), + b"getetag" => Some(PropertyRequest::GetEtag), + b"getlastmodified" => Some(PropertyRequest::GetLastModified), + b"lockdiscovery" => Some(PropertyRequest::LockDiscovery), + b"resourcetype" => Some(PropertyRequest::ResourceType), + b"supportedlock" => Some(PropertyRequest::SupportedLock), + _ => None, }; + // Close the current tag if we read something + if maybe_res.is_some() { + xml.skip().await?; + } + } - let mut maybe_res = None; + // Option 2: an extension property, delegating + if maybe_res.is_none() { + maybe_res = E::PropertyRequest::qread(xml).await?.map(PropertyRequest::Extension); + } - // Option 1: a pure DAV property - let (ns, loc) = xml.rdr.resolve_element(bs.name()); - if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - maybe_res = match loc.into_inner() { - b"creationdate" => Some(PropertyRequest::CreationDate), - b"displayname" => Some(PropertyRequest::DisplayName), - b"getcontentlanguage" => Some(PropertyRequest::GetContentLanguage), - b"getcontentlength" => Some(PropertyRequest::GetContentLength), - b"getcontenttype" => Some(PropertyRequest::GetContentType), - b"getetag" => Some(PropertyRequest::GetEtag), - b"getlastmodified" => Some(PropertyRequest::GetLastModified), - b"lockdiscovery" => Some(PropertyRequest::LockDiscovery), - b"resourcetype" => Some(PropertyRequest::ResourceType), - b"supportedlock" => Some(PropertyRequest::SupportedLock), - _ => None, - }; - // Close the current tag if we read something - if maybe_res.is_some() { - xml.skip().await?; - } - } + Ok(maybe_res) + } +} - // Option 2: an extension property, delegating - if maybe_res.is_none() { - maybe_res = E::PropertyRequest::qread(xml).await?.map(PropertyRequest::Extension); + +impl QRead> for PropValue { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "prop").await?; + let mut acc = Vec::new(); + loop { + // Found a property + if let Some(prop) = Property::qread(xml).await? { + acc.push(prop); + continue; } - return Ok(maybe_res) + // Otherwise skip or escape + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } } + xml.tag_stop(DAV_URN, "prop").await?; + Ok(Some(PropValue(acc))) } } -impl QRead for Href { +impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result, ParsingError> { - match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "href") => xml.next().await?, + use chrono::{DateTime, FixedOffset, TimeZone}; + + let bs = match xml.peek() { + Event::Start(b) | Event::Empty(b) => b, _ => return Ok(None), }; - let mut url = String::new(); + let mut maybe_res = None; + + // Option 1: a pure core DAV property + let (ns, loc) = xml.rdr.resolve_element(bs.name()); + if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { + maybe_res = match loc.into_inner() { + b"creationdate" => { + xml.next().await?; + let datestr = xml.tag_string().await?; + Some(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) + }, + b"displayname" => { + xml.next().await?; + Some(Property::DisplayName(xml.tag_string().await?)) + }, + b"getcontentlanguage" => { + xml.next().await?; + Some(Property::GetContentLanguage(xml.tag_string().await?)) + }, + b"getcontentlength" => { + xml.next().await?; + let cl = xml.tag_string().await?.parse::()?; + Some(Property::GetContentLength(cl)) + }, + b"getcontenttype" => { + xml.next().await?; + Some(Property::GetContentType(xml.tag_string().await?)) + }, + b"getetag" => { + xml.next().await?; + Some(Property::GetEtag(xml.tag_string().await?)) + }, + b"getlastmodified" => { + xml.next().await?; + xml.next().await?; + let datestr = xml.tag_string().await?; + Some(Property::CreationDate(DateTime::parse_from_rfc2822(datestr.as_str())?)) + }, + b"lockdiscovery" => { + // start tag + xml.next().await?; + + let mut acc = Vec::new(); + loop { + // If we find a lock + if let Some(lock) = ActiveLock::qread(xml).await? { + acc.push(lock); + continue + } + + // Otherwise + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "lockdiscovery").await?; + Some(Property::LockDiscovery(acc)) + }, + b"resourcetype" => { + xml.next().await?; + + let mut acc = Vec::new(); + loop { + // If we find a resource type... + if let Some(restype) = ResourceType::qread(xml).await? { + acc.push(restype); + continue + } + + // Otherwise + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "resourcetype").await?; + Some(Property::ResourceType(acc)) + }, + b"supportedlock" => { + xml.next().await?; + + let mut acc = Vec::new(); + loop { + // If we find a resource type... + if let Some(restype) = LockEntry::qread(xml).await? { + acc.push(restype); + continue + } + + // Otherwise + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "supportedlock").await?; + Some(Property::SupportedLock(acc)) + }, + _ => None, + }; + } + + // Option 2: an extension property, delegating + if maybe_res.is_none() { + maybe_res = E::Property::qread(xml).await?.map(Property::Extension); + } + + Ok(maybe_res) + } +} + +impl QRead for ActiveLock { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + unimplemented!(); + } +} + +impl QRead> for ResourceType { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + match xml.peek() { + Event::Empty(b) if xml.is_tag(DAV_URN, "collection") => { + xml.next().await?; + Ok(Some(ResourceType::Collection)) + }, + _ => Ok(E::ResourceType::qread(xml).await?.map(ResourceType::Extension)), + } + } +} + +impl QRead for LockEntry { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "lockentry").await?; + let (mut maybe_scope, mut maybe_type) = (None, None); + loop { match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "lockscope") => { + maybe_scope = LockScope::qread(xml).await?; + }, + Event::Start(b) if xml.is_tag(DAV_URN, "lockentry") => { + maybe_type = LockType::qread(xml).await?; + } Event::End(_) => break, - Event::Start(_) | Event::Empty(_) => return Err(ParsingError::WrongToken), - Event::CData(unescaped) => { - url.push_str(std::str::from_utf8(unescaped.as_ref())?); - xml.next().await? + _ => { xml.skip().await?; }, + } + } + + let lockentry = match (maybe_scope, maybe_type) { + (Some(lockscope), Some(locktype)) => LockEntry { lockscope, locktype }, + _ => return Err(ParsingError::MissingChild), + }; + + xml.tag_stop(DAV_URN, "lockentry").await?; + Ok(Some(lockentry)) + } +} + +impl QRead for LockScope { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "lockscope").await?; + let lockscope = loop { + match xml.peek() { + Event::Empty(b) if xml.is_tag(DAV_URN, "exclusive") => { + xml.next().await?; + break LockScope::Exclusive }, - Event::Text(escaped) => { - url.push_str(escaped.unescape()?.as_ref()); - xml.next().await? + Event::Empty(b) if xml.is_tag(DAV_URN, "shared") => { + xml.next().await?; + break LockScope::Shared } _ => xml.skip().await?, }; - } + }; + + xml.tag_stop(DAV_URN, "lockscope").await?; + Ok(Some(lockscope)) + } +} + +impl QRead for LockType { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "locktype").await?; + let locktype = loop { + match xml.peek() { + Event::Empty(b) if xml.is_tag(DAV_URN, "write") => { + xml.next().await?; + break LockType::Write + } + _ => xml.skip().await?, + }; + }; + xml.tag_stop(DAV_URN, "locktype").await?; + Ok(Some(locktype)) + } +} + +impl QRead for Href { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "href") => xml.next().await?, + _ => return Ok(None), + }; + + let mut url = xml.tag_string().await?; xml.tag_stop(DAV_URN, "href").await?; Ok(Some(Href(url))) } diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index c0a5332..f3a1860 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -101,6 +101,20 @@ impl QWrite for PropValue { } } +/// Error response +impl QWrite for Error { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("error"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for violation in &self.0 { + violation.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + // --- XML inner elements impl QWrite for PropertyUpdateItem { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { @@ -584,19 +598,6 @@ impl QWrite for LockEntry { } } -impl QWrite for Error { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("error"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for violation in &self.0 { - violation.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - impl QWrite for Violation { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut atom = async |c| { diff --git a/src/dav/error.rs b/src/dav/error.rs index b04d2ac..8cb60ba 100644 --- a/src/dav/error.rs +++ b/src/dav/error.rs @@ -8,6 +8,8 @@ pub enum ParsingError { TagNotFound, Utf8Error(std::str::Utf8Error), QuickXml(quick_xml::Error), + Chrono(chrono::format::ParseError), + Int(std::num::ParseIntError), Eof } impl From for ParsingError { @@ -25,3 +27,14 @@ impl From for ParsingError { Self::Utf8Error(value) } } +impl From for ParsingError { + fn from(value: chrono::format::ParseError) -> Self { + Self::Chrono(value) + } +} + +impl From for ParsingError { + fn from(value: std::num::ParseIntError) -> Self { + Self::Int(value) + } +} diff --git a/src/dav/xml.rs b/src/dav/xml.rs index 1cce86a..bf02721 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -129,5 +129,23 @@ impl Reader { } self.next().await } + + pub async fn tag_string(&mut self) -> Result { + let mut acc = String::new(); + loop { + match self.peek() { + Event::CData(unescaped) => { + acc.push_str(std::str::from_utf8(unescaped.as_ref())?); + self.next().await? + }, + Event::Text(escaped) => { + acc.push_str(escaped.unescape()?.as_ref()); + self.next().await? + } + Event::End(_) | Event::Start(_) | Event::Empty(_) => return Ok(acc), + _ => self.next().await?, + }; + } + } } -- cgit v1.2.3 From 96a27d7b223d97675ba9388a6dea9514939ff502 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 6 Mar 2024 16:09:20 +0100 Subject: Implement lockinfo --- src/dav/decoder.rs | 275 ++++++++++++++++++++++++++++++++++++++++++++++------- src/dav/encoder.rs | 1 + src/dav/error.rs | 1 + src/dav/types.rs | 1 + src/dav/xml.rs | 10 ++ 5 files changed, 256 insertions(+), 32 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 43e5c49..042a608 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -11,6 +11,15 @@ use super::types::*; use super::error::ParsingError; use super::xml::{QRead, Reader, IRead, DAV_URN, CAL_URN}; +//@TODO (1) Rewrite all objects as Href, +// where we return Ok(None) instead of trying to find the object at any cost. +// Add a xml.find() -> Result, ParsingError> or similar for the cases we +// really need the object +// (2) Rewrite QRead and replace Result, _> with Result<_, _>, not found being a possible +// error. +// (3) Rewrite vectors with xml.collect() -> Result, _> +// (4) Something for alternatives would be great but no idea yet + // ---- ROOT ---- /// Propfind request @@ -74,11 +83,59 @@ impl QRead> for PropertyUpdate { } } +/// Generic response //@TODO Multistatus -//@TODO LockInfo +// LOCK REQUEST +impl QRead for LockInfo { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "lockinfo").await?; + let (mut m_scope, mut m_type, mut owner) = (None, None, None); + loop { + if let Some(v) = LockScope::qread(xml).await? { + m_scope = Some(v); + } else if let Some(v) = LockType::qread(xml).await? { + m_type = Some(v); + } else if let Some(v) = Owner::qread(xml).await? { + owner = Some(v); + } else { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + xml.tag_stop(DAV_URN, "lockinfo").await?; + match (m_scope, m_type) { + (Some(lockscope), Some(locktype)) => Ok(Some(LockInfo { lockscope, locktype, owner })), + _ => Err(ParsingError::MissingChild), + } + } +} + +// LOCK RESPONSE +impl QRead> for PropValue { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "prop").await?; + let mut acc = Vec::new(); + loop { + // Found a property + if let Some(prop) = Property::qread(xml).await? { + acc.push(prop); + continue; + } + + // Otherwise skip or escape + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + xml.tag_stop(DAV_URN, "prop").await?; + Ok(Some(PropValue(acc))) + } +} -//@TODO PropValue /// Error response impl QRead> for Error { @@ -326,29 +383,6 @@ impl QRead> for PropertyRequest { } } - -impl QRead> for PropValue { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "prop").await?; - let mut acc = Vec::new(); - loop { - // Found a property - if let Some(prop) = Property::qread(xml).await? { - acc.push(prop); - continue; - } - - // Otherwise skip or escape - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } - xml.tag_stop(DAV_URN, "prop").await?; - Ok(Some(PropValue(acc))) - } -} - impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result, ParsingError> { use chrono::{DateTime, FixedOffset, TimeZone}; @@ -472,7 +506,126 @@ impl QRead> for Property { impl QRead for ActiveLock { async fn qread(xml: &mut Reader) -> Result, ParsingError> { - unimplemented!(); + xml.tag_start(DAV_URN, "activelock").await?; + let (mut m_scope, mut m_type, mut m_depth, mut owner, mut timeout, mut locktoken, mut m_root) = + (None, None, None, None, None, None, None); + + loop { + if let Some(v) = LockScope::qread(xml).await? { + m_scope = Some(v); + } else if let Some(v) = LockType::qread(xml).await? { + m_type = Some(v); + } else if let Some(v) = Depth::qread(xml).await? { + m_depth = Some(v); + } else if let Some(v) = Owner::qread(xml).await? { + owner = Some(v); + } else if let Some(v) = Timeout::qread(xml).await? { + timeout = Some(v); + } else if let Some(v) = LockToken::qread(xml).await? { + locktoken = Some(v); + } else if let Some(v) = LockRoot::qread(xml).await? { + m_root = Some(v); + } else { + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + } + + xml.tag_stop(DAV_URN, "activelock").await?; + match (m_scope, m_type, m_depth, m_root) { + (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => + Ok(Some(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot })), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl QRead for Depth { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "depth").await?; + let depth_str = xml.tag_string().await?; + xml.tag_stop(DAV_URN, "depth").await?; + match depth_str.as_str() { + "0" => Ok(Some(Depth::Zero)), + "1" => Ok(Some(Depth::One)), + "infinity" => Ok(Some(Depth::Infinity)), + _ => Err(ParsingError::WrongToken), + } + } +} + +impl QRead for Owner { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if xml.maybe_tag_start(DAV_URN, "owner").await?.is_none() { + return Ok(None) + } + + let mut owner = Owner::Unknown; + loop { + match xml.peek() { + Event::Text(_) | Event::CData(_) => { + let txt = xml.tag_string().await?; + if matches!(owner, Owner::Unknown) { + owner = Owner::Txt(txt); + } + } + Event::Start(_) | Event::Empty(_) => { + if let Some(href) = Href::qread(xml).await? { + owner = Owner::Href(href) + } + xml.skip().await?; + } + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + }; + xml.tag_stop(DAV_URN, "owner").await?; + Ok(Some(owner)) + } +} + +impl QRead for Timeout { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + const SEC_PFX: &str = "SEC_PFX"; + + match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "timeout") => xml.next().await?, + _ => return Ok(None), + }; + + let timeout = match xml.tag_string().await?.as_str() { + "Infinite" => Timeout::Infinite, + seconds => match seconds.strip_prefix(SEC_PFX) { + Some(secs) => Timeout::Seconds(secs.parse::()?), + None => return Err(ParsingError::InvalidValue), + }, + }; + + xml.tag_stop(DAV_URN, "timeout").await?; + Ok(Some(timeout)) + } +} + +impl QRead for LockToken { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + match xml.peek() { + Event::Start(b) if xml.is_tag(DAV_URN, "locktoken") => xml.next().await?, + _ => return Ok(None), + }; + let href = Href::qread(xml).await?.ok_or(ParsingError::MissingChild)?; + xml.tag_stop(DAV_URN, "locktoken").await?; + Ok(Some(LockToken(href))) + } +} + +impl QRead for LockRoot { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "lockroot").await?; + let href = Href::qread(xml).await?.ok_or(ParsingError::MissingChild)?; + xml.tag_stop(DAV_URN, "lockroot").await?; + Ok(Some(LockRoot(href))) } } @@ -495,10 +648,10 @@ impl QRead for LockEntry { loop { match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "lockscope") => { + Event::Start(_) if xml.is_tag(DAV_URN, "lockscope") => { maybe_scope = LockScope::qread(xml).await?; }, - Event::Start(b) if xml.is_tag(DAV_URN, "lockentry") => { + Event::Start(_) if xml.is_tag(DAV_URN, "lockentry") => { maybe_type = LockType::qread(xml).await?; } Event::End(_) => break, @@ -518,14 +671,18 @@ impl QRead for LockEntry { impl QRead for LockScope { async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "lockscope").await?; + if xml.maybe_tag_start(DAV_URN, "lockscope").await?.is_none() { + return Ok(None) + } + let lockscope = loop { + println!("lockscope tag: {:?}", xml.peek()); match xml.peek() { - Event::Empty(b) if xml.is_tag(DAV_URN, "exclusive") => { + Event::Empty(_) if xml.is_tag(DAV_URN, "exclusive") => { xml.next().await?; break LockScope::Exclusive }, - Event::Empty(b) if xml.is_tag(DAV_URN, "shared") => { + Event::Empty(_) if xml.is_tag(DAV_URN, "shared") => { xml.next().await?; break LockScope::Shared } @@ -540,7 +697,10 @@ impl QRead for LockScope { impl QRead for LockType { async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "locktype").await?; + if xml.maybe_tag_start(DAV_URN, "locktype").await?.is_none() { + return Ok(None) + } + let locktype = loop { match xml.peek() { Event::Empty(b) if xml.is_tag(DAV_URN, "write") => { @@ -639,4 +799,55 @@ mod tests { ]) ])); } + + + #[tokio::test] + async fn rfc_propertyupdate() { + let src = r#" + + + + + Jim Whitehead + Roy Fielding + + + + + + + "#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = PropertyUpdate::::qread(&mut rdr).await.unwrap().unwrap(); + + assert_eq!(got, PropertyUpdate(vec![ + PropertyUpdateItem::Set(Set(PropValue(vec![]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![]))), + ])); + } + + #[tokio::test] + async fn rfc_lockinfo1() { + let src = r#" + + + + + + http://example.org/~ejw/contact.html + + +"#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = LockInfo::qread(&mut rdr).await.unwrap().unwrap(); + assert_eq!(got, LockInfo { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + }); + } + } diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index f3a1860..ec937c6 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -527,6 +527,7 @@ impl QWrite for Owner { match self { Self::Txt(txt) => xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?, Self::Href(href) => href.qwrite(xml).await?, + Self::Unknown => (), } xml.q.write_event_async(Event::End(end)).await } diff --git a/src/dav/error.rs b/src/dav/error.rs index 8cb60ba..88a5e60 100644 --- a/src/dav/error.rs +++ b/src/dav/error.rs @@ -6,6 +6,7 @@ pub enum ParsingError { NamespacePrefixAlreadyUsed, WrongToken, TagNotFound, + InvalidValue, Utf8Error(std::str::Utf8Error), QuickXml(quick_xml::Error), Chrono(chrono::format::ParseError), diff --git a/src/dav/types.rs b/src/dav/types.rs index f2eae3a..08c0bc6 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -366,6 +366,7 @@ pub struct Multistatus { pub enum Owner { Txt(String), Href(Href), + Unknown, } /// 14.18. prop XML Element diff --git a/src/dav/xml.rs b/src/dav/xml.rs index bf02721..ff121f4 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -106,6 +106,16 @@ impl Reader { } } + /// maybe find start tag + pub async fn maybe_tag_start(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { + println!("maybe start tag {}", key); + let peek = self.peek(); + match peek { + Event::Start(_) | Event::Empty(_) if self.is_tag(ns, key) => Ok(Some(self.next().await?)), + _ => Ok(None), + } + } + /// find start tag pub async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { println!("search start tag {}", key); -- cgit v1.2.3 From ce2fa5c3bc0c2c819525ef38f8c28b91a0889803 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 6 Mar 2024 18:35:54 +0100 Subject: Fix typing of Response --- src/dav/calencoder.rs | 16 +++++---- src/dav/decoder.rs | 45 ++++++++++++++++++++++++- src/dav/encoder.rs | 93 ++++++++++++++++++++++++++++++--------------------- src/dav/types.rs | 10 ++++-- 4 files changed, 115 insertions(+), 49 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index d030aa1..114eee9 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -813,8 +813,9 @@ mod tests { &dav::Multistatus:: { responses: vec![ dav::Response { - href: dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), - status_or_propstat: dav::StatusOrPropstat::PropStat(vec![dav::PropStat { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), + vec![dav::PropStat { prop: dav::AnyProp::Value(dav::PropValue(vec![ dav::Property::GetEtag("\"fffff-abcd2\"".into()), dav::Property::Extension(Property::CalendarData(CalendarDataPayload { @@ -825,14 +826,16 @@ mod tests { status: dav::Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }]), + }] + ), location: None, error: None, responsedescription: None, }, dav::Response { - href: dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), - status_or_propstat: dav::StatusOrPropstat::PropStat(vec![dav::PropStat { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), + vec![dav::PropStat { prop: dav::AnyProp::Value(dav::PropValue(vec![ dav::Property::GetEtag("\"fffff-abcd3\"".into()), dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ @@ -843,7 +846,8 @@ mod tests { status: dav::Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }]), + }] + ), location: None, error: None, responsedescription: None, diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 042a608..66c0839 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -84,7 +84,29 @@ impl QRead> for PropertyUpdate { } /// Generic response -//@TODO Multistatus +impl QRead> for Multistatus { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + xml.tag_start(DAV_URN, "multistatus").await?; + let mut responses = Vec::new(); + let mut responsedescription = None; + + loop { + if let Some(v) = Response::qread(xml).await? { + responses.push(v); + } else if let Some(v) = ResponseDescription::qread(xml).await? { + responsedescription = Some(v); + } else { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.tag_stop(DAV_URN, "multistatus").await?; + Ok(Some(Multistatus { responses, responsedescription })) + } +} // LOCK REQUEST impl QRead for LockInfo { @@ -159,6 +181,27 @@ impl QRead> for Error { // ---- INNER XML +impl QRead> for Response { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if xml.maybe_tag_start(DAV_URN, "response").await?.is_none() { + return Ok(None) + } + + unimplemented!(); + } +} + +impl QRead for ResponseDescription { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if xml.maybe_tag_start(DAV_URN, "responsedescription").await?.is_none() { + return Ok(None) + } + let cnt = xml.tag_string().await?; + xml.tag_stop(DAV_URN, "responsedescription").await?; + Ok(Some(ResponseDescription(cnt))) + } +} + impl QRead> for PropertyUpdateItem { async fn qread(xml: &mut Reader) -> Result, ParsingError> { if let Some(rm) = Remove::qread(xml).await? { diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index ec937c6..5736217 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -186,7 +186,6 @@ impl QWrite for Response { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - self.href.qwrite(xml).await?; self.status_or_propstat.qwrite(xml).await?; if let Some(error) = &self.error { error.qwrite(xml).await?; @@ -204,8 +203,14 @@ impl QWrite for Response { impl QWrite for StatusOrPropstat { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::Status(status) => status.qwrite(xml).await, - Self::PropStat(propstat_list) => { + Self::Status(many_href, status) => { + for href in many_href.iter() { + href.qwrite(xml).await?; + } + status.qwrite(xml).await + }, + Self::PropStat(href, propstat_list) => { + href.qwrite(xml).await?; for propstat in propstat_list.iter() { propstat.qwrite(xml).await?; } @@ -728,39 +733,43 @@ mod tests { &Multistatus:: { responses: vec![ Response { - href: Href("http://www.example.com/container/".into()), - status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { - prop: AnyProp::Name(PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ])), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }]), + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/".into()), + vec![PropStat { + prop: AnyProp::Name(PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ])), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), error: None, responsedescription: None, location: None, }, Response { - href: Href("http://www.example.com/container/front.html".into()), - status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { - prop: AnyProp::Name(PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ])), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }]), + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/front.html".into()), + vec![PropStat { + prop: AnyProp::Name(PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ])), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + } + ]), error: None, responsedescription: None, location: None, @@ -825,8 +834,9 @@ mod tests { &Multistatus:: { responses: vec![ Response { - href: Href("/container/".into()), - status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/".into()), + vec![PropStat { prop: AnyProp::Value(PropValue(vec![ Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() @@ -848,14 +858,16 @@ mod tests { status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }]), + }] + ), error: None, responsedescription: None, location: None, }, Response { - href: Href("/container/front.html".into()), - status_or_propstat: StatusOrPropstat::PropStat(vec![PropStat { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/front.html".into()), + vec![PropStat { prop: AnyProp::Value(PropValue(vec![ Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() @@ -884,7 +896,8 @@ mod tests { status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }]), + }] + ), error: None, responsedescription: None, location: None, @@ -1018,8 +1031,10 @@ mod tests { let got = serialize( &Multistatus:: { responses: vec![Response { - href: Href("http://www.example.com/container/resource3".into()), - status_or_propstat: StatusOrPropstat::Status(Status(http::status::StatusCode::from_u16(423).unwrap())), + status_or_propstat: StatusOrPropstat::Status( + vec![Href("http://www.example.com/container/resource3".into())], + Status(http::status::StatusCode::from_u16(423).unwrap()) + ), error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), responsedescription: None, location: None, diff --git a/src/dav/types.rs b/src/dav/types.rs index 08c0bc6..b3842de 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -516,15 +516,19 @@ pub struct Remove(pub PropName); /// /// +/// +/// --- rewritten as --- +/// #[derive(Debug, PartialEq)] pub enum StatusOrPropstat { - Status(Status), - PropStat(Vec>), + // One status, multiple hrefs... + Status(Vec, Status), + // A single href, multiple properties... + PropStat(Href, Vec>), } #[derive(Debug, PartialEq)] pub struct Response { - pub href: Href, // It's wrong according to the spec, but I don't understand why there is an href* pub status_or_propstat: StatusOrPropstat, pub error: Option>, pub responsedescription: Option, -- cgit v1.2.3 From 67e5953c244674c84adf4bd03a0cab1530f6507b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 6 Mar 2024 20:58:41 +0100 Subject: drop anyprop as it can't be decoded --- src/dav/calencoder.rs | 14 ++++----- src/dav/caltypes.rs | 2 +- src/dav/decoder.rs | 82 +++++++++++++++++++++++++++++++++++++++++++++++++-- src/dav/encoder.rs | 41 ++++++++++---------------- src/dav/types.rs | 32 ++++++++------------ 5 files changed, 117 insertions(+), 54 deletions(-) diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index 114eee9..cadfc78 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -5,7 +5,7 @@ use tokio::io::AsyncWrite; use super::caltypes::*; use super::xml::{QWrite, IWrite, Writer}; -use super::types::Extension; +use super::types::{Extension, Node}; const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; @@ -23,7 +23,7 @@ impl QWrite for MkCalendar { } } -impl QWrite for MkCalendarResponse { +impl> QWrite for MkCalendarResponse { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_cal_element("mkcalendar-response"); let end = start.to_end(); @@ -810,19 +810,19 @@ mod tests { #[tokio::test] async fn rfc_calendar_query1_res() { let got = serialize( - &dav::Multistatus:: { + &dav::Multistatus::> { responses: vec![ dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), vec![dav::PropStat { - prop: dav::AnyProp::Value(dav::PropValue(vec![ + prop: dav::PropValue(vec![ dav::Property::GetEtag("\"fffff-abcd2\"".into()), dav::Property::Extension(Property::CalendarData(CalendarDataPayload { mime: None, payload: "PLACEHOLDER".into() })), - ])), + ]), status: dav::Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -836,13 +836,13 @@ mod tests { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), vec![dav::PropStat { - prop: dav::AnyProp::Value(dav::PropValue(vec![ + prop: dav::PropValue(vec![ dav::Property::GetEtag("\"fffff-abcd3\"".into()), dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ mime: None, payload: "PLACEHOLDER".into(), })), - ])), + ]), status: dav::Status(http::status::StatusCode::OK), error: None, responsedescription: None, diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index 68e7baf..d9cbb12 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -44,7 +44,7 @@ pub struct MkCalendar(pub dav::Set); /// /// #[derive(Debug, PartialEq)] -pub struct MkCalendarResponse(pub Vec>); +pub struct MkCalendarResponse>(pub Vec>); // --- (REPORT PART) --- diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 66c0839..41eca36 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -84,7 +84,7 @@ impl QRead> for PropertyUpdate { } /// Generic response -impl QRead> for Multistatus { +impl> QRead> for Multistatus { async fn qread(xml: &mut Reader) -> Result, ParsingError> { xml.tag_start(DAV_URN, "multistatus").await?; let mut responses = Vec::new(); @@ -181,16 +181,74 @@ impl QRead> for Error { // ---- INNER XML -impl QRead> for Response { +impl> QRead> for Response { async fn qread(xml: &mut Reader) -> Result, ParsingError> { if xml.maybe_tag_start(DAV_URN, "response").await?.is_none() { return Ok(None) } + let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); + let mut href = Vec::new(); + let mut propstat = Vec::new(); + loop { + if let Some(v) = Status::qread(xml).await? { + status = Some(v); + } else if let Some(v) = Href::qread(xml).await? { + href.push(v); + } else if let Some(v) = PropStat::qread(xml).await? { + propstat.push(v); + } else if let Some(v) = Error::qread(xml).await? { + error = Some(v); + } else if let Some(v) = ResponseDescription::qread(xml).await? { + responsedescription = Some(v); + } else if let Some(v) = Location::qread(xml).await? { + location = Some(v); + } else { + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await? }, + }; + } + } + + xml.tag_stop(DAV_URN, "response").await?; + match (status, &propstat[..], &href[..]) { + (Some(status), &[], &[_, ..]) => Ok(Some(Response { + status_or_propstat: StatusOrPropstat::Status(href, status), + error, responsedescription, location, + })), + (None, &[_, ..], &[_, ..]) => Ok(Some(Response { + status_or_propstat: StatusOrPropstat::PropStat(href.into_iter().next().unwrap(), propstat), + error, responsedescription, location, + })), + (Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl> QRead> for PropStat { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if xml.maybe_tag_start(DAV_URN, "propstat").await?.is_none() { + return Ok(None) + } unimplemented!(); } } +impl QRead for Status { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if xml.maybe_tag_start(DAV_URN, "status").await?.is_none() { + return Ok(None) + } + let fullcode = xml.tag_string().await?; + let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; + let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; + xml.tag_stop(DAV_URN, "status").await?; + Ok(Some(Status(code))) + } +} + impl QRead for ResponseDescription { async fn qread(xml: &mut Reader) -> Result, ParsingError> { if xml.maybe_tag_start(DAV_URN, "responsedescription").await?.is_none() { @@ -202,6 +260,26 @@ impl QRead for ResponseDescription { } } +impl QRead for Location { + async fn qread(xml: &mut Reader) -> Result, ParsingError> { + if xml.maybe_tag_start(DAV_URN, "location").await?.is_none() { + return Ok(None) + } + let href = loop { + if let Some(v) = Href::qread(xml).await? { + break v + } + + match xml.peek() { + Event::End(_) => return Err(ParsingError::MissingChild), + _ => xml.skip().await?, + }; + }; + xml.tag_stop(DAV_URN, "location").await?; + Ok(Some(Location(href))) + } +} + impl QRead> for PropertyUpdateItem { async fn qread(xml: &mut Reader) -> Result, ParsingError> { if let Some(rm) = Remove::qread(xml).await? { diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 5736217..9e60f29 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -53,7 +53,7 @@ impl QWrite for PropertyUpdate { /// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE /// DELETE RESPONSE, -impl QWrite for Multistatus { +impl> QWrite for Multistatus { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("multistatus"); let end = start.to_end(); @@ -146,15 +146,6 @@ impl QWrite for Remove { } -impl QWrite for AnyProp { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Name(propname) => propname.qwrite(xml).await, - Self::Value(propval) => propval.qwrite(xml).await, - } - } -} - impl QWrite for PropName { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("prop"); @@ -180,7 +171,7 @@ impl QWrite for Href { } } -impl QWrite for Response { +impl> QWrite for Response { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("response"); let end = start.to_end(); @@ -200,7 +191,7 @@ impl QWrite for Response { } } -impl QWrite for StatusOrPropstat { +impl> QWrite for StatusOrPropstat { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { Self::Status(many_href, status) => { @@ -258,7 +249,7 @@ impl QWrite for Location { } } -impl QWrite for PropStat { +impl> QWrite for PropStat { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("propstat"); let end = start.to_end(); @@ -681,7 +672,7 @@ mod tests { #[tokio::test] async fn basic_multistatus() { let got = serialize( - &Multistatus:: { + &Multistatus::> { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }, @@ -730,18 +721,18 @@ mod tests { #[tokio::test] async fn rfc_propname_res() { let got = serialize( - &Multistatus:: { + &Multistatus::> { responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( Href("http://www.example.com/container/".into()), vec![PropStat { - prop: AnyProp::Name(PropName(vec![ + prop: PropName(vec![ PropertyRequest::CreationDate, PropertyRequest::DisplayName, PropertyRequest::ResourceType, PropertyRequest::SupportedLock, - ])), + ]), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -755,7 +746,7 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("http://www.example.com/container/front.html".into()), vec![PropStat { - prop: AnyProp::Name(PropName(vec![ + prop: PropName(vec![ PropertyRequest::CreationDate, PropertyRequest::DisplayName, PropertyRequest::GetContentLength, @@ -764,7 +755,7 @@ mod tests { PropertyRequest::GetLastModified, PropertyRequest::ResourceType, PropertyRequest::SupportedLock, - ])), + ]), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -831,13 +822,13 @@ mod tests { async fn rfc_allprop_res() { use chrono::{DateTime,FixedOffset,TimeZone}; let got = serialize( - &Multistatus:: { + &Multistatus::> { responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/".into()), vec![PropStat { - prop: AnyProp::Value(PropValue(vec![ + prop: PropValue(vec![ Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) @@ -854,7 +845,7 @@ mod tests { locktype: LockType::Write, }, ]), - ])), + ]), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -868,7 +859,7 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/front.html".into()), vec![PropStat { - prop: AnyProp::Value(PropValue(vec![ + prop: PropValue(vec![ Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) @@ -892,7 +883,7 @@ mod tests { locktype: LockType::Write, }, ]), - ])), + ]), status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, @@ -1029,7 +1020,7 @@ mod tests { #[tokio::test] async fn rfc_delete_locked2() { let got = serialize( - &Multistatus:: { + &Multistatus::> { responses: vec![Response { status_or_propstat: StatusOrPropstat::Status( vec![Href("http://www.example.com/container/resource3".into())], diff --git a/src/dav/types.rs b/src/dav/types.rs index b3842de..246a4bd 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -7,12 +7,12 @@ use super::error; /// It's how we implement a DAV extension /// (That's the dark magic part...) -pub trait ExtensionItem = xml::QRead + xml::QWrite + Debug + PartialEq; +pub trait Node = xml::QRead + xml::QWrite + Debug + PartialEq; pub trait Extension { - type Error: ExtensionItem; - type Property: ExtensionItem; - type PropertyRequest: ExtensionItem; - type ResourceType: ExtensionItem; + type Error: Node; + type Property: Node; + type PropertyRequest: Node; + type ResourceType: Node; } /// 14.1. activelock XML Element @@ -333,8 +333,8 @@ pub enum LockType { /// /// #[derive(Debug, PartialEq)] -pub struct Multistatus { - pub responses: Vec>, +pub struct Multistatus> { + pub responses: Vec>, pub responsedescription: Option, } @@ -383,12 +383,6 @@ pub enum Owner { /// text or mixed content. /// /// -#[derive(Debug, PartialEq)] -pub enum AnyProp { - Name(PropName), - Value(PropValue), -} - #[derive(Debug, PartialEq)] pub struct PropName(pub Vec>); @@ -471,8 +465,8 @@ pub enum PropFind { /// /// #[derive(Debug, PartialEq)] -pub struct PropStat { - pub prop: AnyProp, +pub struct PropStat> { + pub prop: N, pub status: Status, pub error: Option>, pub responsedescription: Option, @@ -520,16 +514,16 @@ pub struct Remove(pub PropName); /// --- rewritten as --- /// #[derive(Debug, PartialEq)] -pub enum StatusOrPropstat { +pub enum StatusOrPropstat> { // One status, multiple hrefs... Status(Vec, Status), // A single href, multiple properties... - PropStat(Href, Vec>), + PropStat(Href, Vec>), } #[derive(Debug, PartialEq)] -pub struct Response { - pub status_or_propstat: StatusOrPropstat, +pub struct Response> { + pub status_or_propstat: StatusOrPropstat, pub error: Option>, pub responsedescription: Option, pub location: Option, -- cgit v1.2.3 From 5e71a7d84804a86d049d3e0a614c35bedf7cb636 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 6 Mar 2024 23:24:54 +0100 Subject: Rewrote the whole decoder --- src/dav/caldecoder.rs | 8 +- src/dav/calencoder.rs | 4 +- src/dav/caltypes.rs | 3 +- src/dav/decoder.rs | 748 ++++++++++++++++++++----------------------------- src/dav/encoder.rs | 2 +- src/dav/error.rs | 1 + src/dav/realization.rs | 4 +- src/dav/types.rs | 19 +- src/dav/xml.rs | 87 +++++- 9 files changed, 406 insertions(+), 470 deletions(-) diff --git a/src/dav/caldecoder.rs b/src/dav/caldecoder.rs index b45d649..5f40c4b 100644 --- a/src/dav/caldecoder.rs +++ b/src/dav/caldecoder.rs @@ -7,25 +7,25 @@ use super::error; // ---- EXTENSIONS --- impl xml::QRead for Violation { - async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result { unreachable!(); } } impl xml::QRead for Property { - async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result { unreachable!(); } } impl xml::QRead for PropertyRequest { - async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result { unreachable!(); } } impl xml::QRead for ResourceType { - async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { + async fn qread(xml: &mut xml::Reader) -> Result { unreachable!(); } } diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs index cadfc78..58b88c7 100644 --- a/src/dav/calencoder.rs +++ b/src/dav/calencoder.rs @@ -4,8 +4,8 @@ use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; use super::caltypes::*; -use super::xml::{QWrite, IWrite, Writer}; -use super::types::{Extension, Node}; +use super::xml::{Node, QWrite, IWrite, Writer}; +use super::types::Extension; const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs index d9cbb12..befecef 100644 --- a/src/dav/caltypes.rs +++ b/src/dav/caltypes.rs @@ -2,6 +2,7 @@ use chrono::{DateTime,Utc}; use super::types as dav; +use super::xml; //@FIXME ACL (rfc3744) is missing, required //@FIXME Versioning (rfc3253) is missing, required @@ -44,7 +45,7 @@ pub struct MkCalendar(pub dav::Set); /// /// #[derive(Debug, PartialEq)] -pub struct MkCalendarResponse>(pub Vec>); +pub struct MkCalendarResponse>(pub Vec>); // --- (REPORT PART) --- diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 41eca36..144cc4e 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -9,7 +9,7 @@ use tokio::io::AsyncBufRead; use super::types::*; use super::error::ParsingError; -use super::xml::{QRead, Reader, IRead, DAV_URN, CAL_URN}; +use super::xml::{Node, QRead, Reader, IRead, DAV_URN, CAL_URN}; //@TODO (1) Rewrite all objects as Href, // where we return Ok(None) instead of trying to find the object at any cost. @@ -24,21 +24,22 @@ use super::xml::{QRead, Reader, IRead, DAV_URN, CAL_URN}; /// Propfind request impl QRead> for PropFind { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { + async fn qread(xml: &mut Reader) -> Result { // Find propfind - xml.tag_start(DAV_URN, "propfind").await?; + xml.open(DAV_URN, "propfind").await?; + // Find any tag let propfind: PropFind = loop { match xml.peek() { Event::Start(_) if xml.is_tag(DAV_URN, "allprop") => { - xml.tag_start(DAV_URN, "allprop").await?; - let r = PropFind::AllProp(Include::qread(xml).await?); + xml.open(DAV_URN, "allprop").await?; + let includ = xml.maybe_find::>().await?; + let r = PropFind::AllProp(includ); xml.tag_stop(DAV_URN, "allprop").await?; break r }, Event::Start(_) if xml.is_tag(DAV_URN, "prop") => { - let propname = PropName::qread(xml).await?.ok_or(ParsingError::MissingChild)?; - break PropFind::Prop(propname); + break PropFind::Prop(xml.find::>().await?); }, Event::Empty(_) if xml.is_tag(DAV_URN, "allprop") => { xml.next().await?; @@ -55,47 +56,32 @@ impl QRead> for PropFind { // Close tag xml.tag_stop(DAV_URN, "propfind").await?; - Ok(Some(propfind)) + Ok(propfind) } } /// PROPPATCH request impl QRead> for PropertyUpdate { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "propertyupdate").await?; - let mut collected_items = Vec::new(); - loop { - // Try to collect a property item - if let Some(item) = PropertyUpdateItem::qread(xml).await? { - collected_items.push(item); - continue - } - - // Skip or stop otherwise - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } - + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "propertyupdate").await?; + let collected_items = xml.collect::>().await?; xml.tag_stop(DAV_URN, "propertyupdate").await?; - Ok(Some(PropertyUpdate(collected_items))) + Ok(PropertyUpdate(collected_items)) } } /// Generic response impl> QRead> for Multistatus { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "multistatus").await?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "multistatus").await?; let mut responses = Vec::new(); let mut responsedescription = None; loop { - if let Some(v) = Response::qread(xml).await? { - responses.push(v); - } else if let Some(v) = ResponseDescription::qread(xml).await? { - responsedescription = Some(v); - } else { + let mut dirty = false; + xml.maybe_push(&mut responses, &mut dirty).await?; + xml.maybe_read(&mut responsedescription, &mut dirty).await?; + if !dirty { match xml.peek() { Event::End(_) => break, _ => xml.skip().await?, @@ -104,23 +90,22 @@ impl> QRead> for Multistatus { } xml.tag_stop(DAV_URN, "multistatus").await?; - Ok(Some(Multistatus { responses, responsedescription })) + Ok(Multistatus { responses, responsedescription }) } } // LOCK REQUEST impl QRead for LockInfo { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "lockinfo").await?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockinfo").await?; let (mut m_scope, mut m_type, mut owner) = (None, None, None); loop { - if let Some(v) = LockScope::qread(xml).await? { - m_scope = Some(v); - } else if let Some(v) = LockType::qread(xml).await? { - m_type = Some(v); - } else if let Some(v) = Owner::qread(xml).await? { - owner = Some(v); - } else { + let mut dirty = false; + xml.maybe_read::(&mut m_scope, &mut dirty).await?; + xml.maybe_read::(&mut m_type, &mut dirty).await?; + xml.maybe_read::(&mut owner, &mut dirty).await?; + + if !dirty { match xml.peek() { Event::End(_) => break, _ => xml.skip().await?, @@ -129,7 +114,7 @@ impl QRead for LockInfo { } xml.tag_stop(DAV_URN, "lockinfo").await?; match (m_scope, m_type) { - (Some(lockscope), Some(locktype)) => Ok(Some(LockInfo { lockscope, locktype, owner })), + (Some(lockscope), Some(locktype)) => Ok(LockInfo { lockscope, locktype, owner }), _ => Err(ParsingError::MissingChild), } } @@ -137,44 +122,22 @@ impl QRead for LockInfo { // LOCK RESPONSE impl QRead> for PropValue { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "prop").await?; - let mut acc = Vec::new(); - loop { - // Found a property - if let Some(prop) = Property::qread(xml).await? { - acc.push(prop); - continue; - } - - // Otherwise skip or escape - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "prop").await?; + let mut acc = xml.collect::>().await?; xml.tag_stop(DAV_URN, "prop").await?; - Ok(Some(PropValue(acc))) + Ok(PropValue(acc)) } } /// Error response impl QRead> for Error { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "error").await?; - let mut violations = Vec::new(); - loop { - match xml.peek() { - Event::Start(_) | Event::Empty(_) => { - Violation::qread(xml).await?.map(|v| violations.push(v)); - }, - Event::End(_) if xml.is_tag(DAV_URN, "error") => break, - _ => { xml.skip().await?; }, - } - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "error").await?; + let violations = xml.collect::>().await?; xml.tag_stop(DAV_URN, "error").await?; - Ok(Some(Error(violations))) + Ok(Error(violations)) } } @@ -182,28 +145,22 @@ impl QRead> for Error { // ---- INNER XML impl> QRead> for Response { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "response").await?.is_none() { - return Ok(None) - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "response").await?; let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); let mut href = Vec::new(); let mut propstat = Vec::new(); loop { - if let Some(v) = Status::qread(xml).await? { - status = Some(v); - } else if let Some(v) = Href::qread(xml).await? { - href.push(v); - } else if let Some(v) = PropStat::qread(xml).await? { - propstat.push(v); - } else if let Some(v) = Error::qread(xml).await? { - error = Some(v); - } else if let Some(v) = ResponseDescription::qread(xml).await? { - responsedescription = Some(v); - } else if let Some(v) = Location::qread(xml).await? { - location = Some(v); - } else { + let mut dirty = false; + xml.maybe_read::(&mut status, &mut dirty).await?; + xml.maybe_push::(&mut href, &mut dirty).await?; + xml.maybe_push::>(&mut propstat, &mut dirty).await?; + xml.maybe_read::>(&mut error, &mut dirty).await?; + xml.maybe_read::(&mut responsedescription, &mut dirty).await?; + xml.maybe_read::(&mut location, &mut dirty).await?; + + if !dirty { match xml.peek() { Event::End(_) => break, _ => { xml.skip().await? }, @@ -213,14 +170,14 @@ impl> QRead> for Response { xml.tag_stop(DAV_URN, "response").await?; match (status, &propstat[..], &href[..]) { - (Some(status), &[], &[_, ..]) => Ok(Some(Response { + (Some(status), &[], &[_, ..]) => Ok(Response { status_or_propstat: StatusOrPropstat::Status(href, status), error, responsedescription, location, - })), - (None, &[_, ..], &[_, ..]) => Ok(Some(Response { + }), + (None, &[_, ..], &[_, ..]) => Ok(Response { status_or_propstat: StatusOrPropstat::PropStat(href.into_iter().next().unwrap(), propstat), error, responsedescription, location, - })), + }), (Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue), _ => Err(ParsingError::MissingChild), } @@ -228,255 +185,173 @@ impl> QRead> for Response { } impl> QRead> for PropStat { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "propstat").await?.is_none() { - return Ok(None) + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "propstat").await?; + + let (mut m_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); + + loop { + let mut dirty = false; + xml.maybe_read::(&mut m_prop, &mut dirty).await?; + xml.maybe_read::(&mut m_status, &mut dirty).await?; + xml.maybe_read::>(&mut error, &mut dirty).await?; + xml.maybe_read::(&mut responsedescription, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.tag_stop(DAV_URN, "propstat").await?; + match (m_prop, m_status) { + (Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), + _ => Err(ParsingError::MissingChild), } - unimplemented!(); } } impl QRead for Status { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "status").await?.is_none() { - return Ok(None) - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "status").await?; let fullcode = xml.tag_string().await?; let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; xml.tag_stop(DAV_URN, "status").await?; - Ok(Some(Status(code))) + Ok(Status(code)) } } impl QRead for ResponseDescription { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "responsedescription").await?.is_none() { - return Ok(None) - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "responsedescription").await?; let cnt = xml.tag_string().await?; xml.tag_stop(DAV_URN, "responsedescription").await?; - Ok(Some(ResponseDescription(cnt))) + Ok(ResponseDescription(cnt)) } } impl QRead for Location { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "location").await?.is_none() { - return Ok(None) - } - let href = loop { - if let Some(v) = Href::qread(xml).await? { - break v - } - - match xml.peek() { - Event::End(_) => return Err(ParsingError::MissingChild), - _ => xml.skip().await?, - }; - }; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "location").await?; + let href = xml.find::().await?; xml.tag_stop(DAV_URN, "location").await?; - Ok(Some(Location(href))) + Ok(Location(href)) } } impl QRead> for PropertyUpdateItem { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if let Some(rm) = Remove::qread(xml).await? { - return Ok(Some(PropertyUpdateItem::Remove(rm))) + async fn qread(xml: &mut Reader) -> Result { + match Remove::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(PropertyUpdateItem::Remove), } - Ok(Set::qread(xml).await?.map(PropertyUpdateItem::Set)) + Set::qread(xml).await.map(PropertyUpdateItem::Set) } } impl QRead> for Remove { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "remove") => xml.next().await?, - _ => return Ok(None), - }; - - let propname = loop { - match xml.peek() { - Event::Start(b) | Event::Empty(b) if xml.is_tag(DAV_URN, "prop") => break PropName::qread(xml).await?, - _ => xml.skip().await?, - }; - }; - + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "remove").await?; + let propname = xml.find::>().await?; xml.tag_stop(DAV_URN, "remove").await?; - Ok(propname.map(Remove)) + Ok(Remove(propname)) } } impl QRead> for Set { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "set") => xml.next().await?, - _ => return Ok(None), - }; - let propvalue = loop { - match xml.peek() { - Event::Start(b) | Event::Empty(b) if xml.is_tag(DAV_URN, "prop") => break PropValue::qread(xml).await?, - _ => xml.skip().await?, - }; - }; - - + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "set").await?; + let propvalue = xml.find::>().await?; xml.tag_stop(DAV_URN, "set").await?; - Ok(propvalue.map(Set)) + Ok(Set(propvalue)) } } impl QRead> for Violation { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - loop { - let bs = match xml.peek() { - Event::Start(b) | Event::Empty(b) => b, - _ => { - xml.skip().await?; - continue + async fn qread(xml: &mut Reader) -> Result { + let bs = match xml.peek() { + Event::Start(b) | Event::Empty(b) => b, + _ => return Err(ParsingError::Recoverable), + }; + + // Option 1: a pure DAV property + let (ns, loc) = xml.rdr.resolve_element(bs.name()); + if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { + match loc.into_inner() { + b"lock-token-matches-request-uri" => { + xml.next().await?; + return Ok(Violation::LockTokenMatchesRequestUri) + }, + b"lock-token-submitted" => { + xml.next().await?; + let links = xml.collect::().await?; + xml.tag_stop(DAV_URN, "lock-token-submitted").await?; + return Ok(Violation::LockTokenSubmitted(links)) + }, + b"no-conflicting-lock" => { + // start tag + xml.next().await?; + let links = xml.collect::().await?; + xml.tag_stop(DAV_URN, "no-conflicting-lock").await?; + return Ok(Violation::NoConflictingLock(links)) + }, + b"no-external-entities" => { + xml.next().await?; + return Ok(Violation::NoExternalEntities) + }, + b"preserved-live-properties" => { + xml.next().await?; + return Ok(Violation::PreservedLiveProperties) }, + b"propfind-finite-depth" => { + xml.next().await?; + return Ok(Violation::PropfindFiniteDepth) + }, + b"cannot-modify-protected-property" => { + xml.next().await?; + return Ok(Violation::CannotModifyProtectedProperty) + }, + _ => (), }; - - let mut maybe_res = None; - - // Option 1: a pure DAV property - let (ns, loc) = xml.rdr.resolve_element(bs.name()); - if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - maybe_res = match loc.into_inner() { - b"lock-token-matches-request-uri" => { - xml.next().await?; - Some(Violation::LockTokenMatchesRequestUri) - }, - b"lock-token-submitted" => { - // start tag - xml.next().await?; - - let mut links = Vec::new(); - loop { - // If we find a Href - if let Some(href) = Href::qread(xml).await? { - links.push(href); - continue - } - - // Otherwise - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } - xml.tag_stop(DAV_URN, "lock-token-submitted").await?; - Some(Violation::LockTokenSubmitted(links)) - }, - b"no-conflicting-lock" => { - // start tag - xml.next().await?; - - let mut links = Vec::new(); - loop { - // If we find a Href - if let Some(href) = Href::qread(xml).await? { - links.push(href); - continue - } - - // Otherwise - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } - xml.tag_stop(DAV_URN, "no-conflicting-lock").await?; - Some(Violation::NoConflictingLock(links)) - }, - b"no-external-entities" => { - xml.next().await?; - Some(Violation::NoExternalEntities) - }, - b"preserved-live-properties" => { - xml.next().await?; - Some(Violation::PreservedLiveProperties) - }, - b"propfind-finite-depth" => { - xml.next().await?; - Some(Violation::PropfindFiniteDepth) - }, - b"cannot-modify-protected-property" => { - xml.next().await?; - Some(Violation::CannotModifyProtectedProperty) - }, - _ => None, - }; - } - - // Option 2: an extension property, delegating - if maybe_res.is_none() { - maybe_res = E::Error::qread(xml).await?.map(Violation::Extension); - } - - return Ok(maybe_res) } + + // Option 2: an extension property, delegating + E::Error::qread(xml).await.map(Violation::Extension) } } impl QRead> for Include { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "include").await?; - let mut acc = Vec::new(); - loop { - // Found a property - if let Some(prop) = PropertyRequest::qread(xml).await? { - acc.push(prop); - continue; - } - - // Otherwise skip or escape - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "include").await?; + let acc = xml.collect::>().await?; xml.tag_stop(DAV_URN, "include").await?; - Ok(Some(Include(acc))) + Ok(Include(acc)) } } impl QRead> for PropName { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "prop").await?; - let mut acc = Vec::new(); - loop { - // Found a property - if let Some(prop) = PropertyRequest::qread(xml).await? { - acc.push(prop); - continue; - } - - // Otherwise skip or escape - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "prop").await?; + let acc = xml.collect::>().await?; xml.tag_stop(DAV_URN, "prop").await?; - Ok(Some(PropName(acc))) + Ok(PropName(acc)) } } impl QRead> for PropertyRequest { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { + async fn qread(xml: &mut Reader) -> Result { let bs = match xml.peek() { Event::Start(b) | Event::Empty(b) => b, - _ => return Ok(None), + _ => return Err(ParsingError::Recoverable), }; - let mut maybe_res = None; - // Option 1: a pure core DAV property let (ns, loc) = xml.rdr.resolve_element(bs.name()); if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - maybe_res = match loc.into_inner() { + let maybe_res = match loc.into_inner() { b"creationdate" => Some(PropertyRequest::CreationDate), b"displayname" => Some(PropertyRequest::DisplayName), b"getcontentlanguage" => Some(PropertyRequest::GetContentLanguage), @@ -490,163 +365,105 @@ impl QRead> for PropertyRequest { _ => None, }; // Close the current tag if we read something - if maybe_res.is_some() { + if let Some(res) = maybe_res { xml.skip().await?; + return Ok(res) } } // Option 2: an extension property, delegating - if maybe_res.is_none() { - maybe_res = E::PropertyRequest::qread(xml).await?.map(PropertyRequest::Extension); - } - - Ok(maybe_res) + E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension) } } impl QRead> for Property { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { + async fn qread(xml: &mut Reader) -> Result { use chrono::{DateTime, FixedOffset, TimeZone}; let bs = match xml.peek() { Event::Start(b) | Event::Empty(b) => b, - _ => return Ok(None), + _ => return Err(ParsingError::Recoverable), }; - let mut maybe_res = None; - // Option 1: a pure core DAV property let (ns, loc) = xml.rdr.resolve_element(bs.name()); if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - maybe_res = match loc.into_inner() { + match loc.into_inner() { b"creationdate" => { xml.next().await?; let datestr = xml.tag_string().await?; - Some(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) + return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) }, b"displayname" => { xml.next().await?; - Some(Property::DisplayName(xml.tag_string().await?)) + return Ok(Property::DisplayName(xml.tag_string().await?)) }, b"getcontentlanguage" => { xml.next().await?; - Some(Property::GetContentLanguage(xml.tag_string().await?)) + return Ok(Property::GetContentLanguage(xml.tag_string().await?)) }, b"getcontentlength" => { xml.next().await?; let cl = xml.tag_string().await?.parse::()?; - Some(Property::GetContentLength(cl)) + return Ok(Property::GetContentLength(cl)) }, b"getcontenttype" => { xml.next().await?; - Some(Property::GetContentType(xml.tag_string().await?)) + return Ok(Property::GetContentType(xml.tag_string().await?)) }, b"getetag" => { xml.next().await?; - Some(Property::GetEtag(xml.tag_string().await?)) + return Ok(Property::GetEtag(xml.tag_string().await?)) }, b"getlastmodified" => { - xml.next().await?; xml.next().await?; let datestr = xml.tag_string().await?; - Some(Property::CreationDate(DateTime::parse_from_rfc2822(datestr.as_str())?)) + return Ok(Property::CreationDate(DateTime::parse_from_rfc2822(datestr.as_str())?)) }, b"lockdiscovery" => { - // start tag xml.next().await?; - - let mut acc = Vec::new(); - loop { - // If we find a lock - if let Some(lock) = ActiveLock::qread(xml).await? { - acc.push(lock); - continue - } - - // Otherwise - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } + let acc = xml.collect::().await?; xml.tag_stop(DAV_URN, "lockdiscovery").await?; - Some(Property::LockDiscovery(acc)) + return Ok(Property::LockDiscovery(acc)) }, b"resourcetype" => { xml.next().await?; - - let mut acc = Vec::new(); - loop { - // If we find a resource type... - if let Some(restype) = ResourceType::qread(xml).await? { - acc.push(restype); - continue - } - - // Otherwise - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } + let acc = xml.collect::>().await?; xml.tag_stop(DAV_URN, "resourcetype").await?; - Some(Property::ResourceType(acc)) + return Ok(Property::ResourceType(acc)) }, b"supportedlock" => { xml.next().await?; - - let mut acc = Vec::new(); - loop { - // If we find a resource type... - if let Some(restype) = LockEntry::qread(xml).await? { - acc.push(restype); - continue - } - - // Otherwise - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } + let acc = xml.collect::().await?; xml.tag_stop(DAV_URN, "supportedlock").await?; - Some(Property::SupportedLock(acc)) + return Ok(Property::SupportedLock(acc)) }, - _ => None, + _ => (), }; } // Option 2: an extension property, delegating - if maybe_res.is_none() { - maybe_res = E::Property::qread(xml).await?.map(Property::Extension); - } - - Ok(maybe_res) + E::Property::qread(xml).await.map(Property::Extension) } } impl QRead for ActiveLock { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "activelock").await?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "activelock").await?; let (mut m_scope, mut m_type, mut m_depth, mut owner, mut timeout, mut locktoken, mut m_root) = (None, None, None, None, None, None, None); loop { - if let Some(v) = LockScope::qread(xml).await? { - m_scope = Some(v); - } else if let Some(v) = LockType::qread(xml).await? { - m_type = Some(v); - } else if let Some(v) = Depth::qread(xml).await? { - m_depth = Some(v); - } else if let Some(v) = Owner::qread(xml).await? { - owner = Some(v); - } else if let Some(v) = Timeout::qread(xml).await? { - timeout = Some(v); - } else if let Some(v) = LockToken::qread(xml).await? { - locktoken = Some(v); - } else if let Some(v) = LockRoot::qread(xml).await? { - m_root = Some(v); - } else { + let mut dirty = false; + xml.maybe_read::(&mut m_scope, &mut dirty).await?; + xml.maybe_read::(&mut m_type, &mut dirty).await?; + xml.maybe_read::(&mut m_depth, &mut dirty).await?; + xml.maybe_read::(&mut owner, &mut dirty).await?; + xml.maybe_read::(&mut timeout, &mut dirty).await?; + xml.maybe_read::(&mut locktoken, &mut dirty).await?; + xml.maybe_read::(&mut m_root, &mut dirty).await?; + + if !dirty { match xml.peek() { Event::End(_) => break, _ => { xml.skip().await?; }, @@ -657,31 +474,29 @@ impl QRead for ActiveLock { xml.tag_stop(DAV_URN, "activelock").await?; match (m_scope, m_type, m_depth, m_root) { (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => - Ok(Some(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot })), + Ok(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot }), _ => Err(ParsingError::MissingChild), } } } impl QRead for Depth { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "depth").await?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "depth").await?; let depth_str = xml.tag_string().await?; xml.tag_stop(DAV_URN, "depth").await?; match depth_str.as_str() { - "0" => Ok(Some(Depth::Zero)), - "1" => Ok(Some(Depth::One)), - "infinity" => Ok(Some(Depth::Infinity)), + "0" => Ok(Depth::Zero), + "1" => Ok(Depth::One), + "infinity" => Ok(Depth::Infinity), _ => Err(ParsingError::WrongToken), } } } impl QRead for Owner { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "owner").await?.is_none() { - return Ok(None) - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "owner").await?; let mut owner = Owner::Unknown; loop { @@ -693,28 +508,25 @@ impl QRead for Owner { } } Event::Start(_) | Event::Empty(_) => { - if let Some(href) = Href::qread(xml).await? { - owner = Owner::Href(href) + match Href::qread(xml).await { + Ok(href) => { owner = Owner::Href(href); }, + Err(ParsingError::Recoverable) => { xml.skip().await?; }, + Err(e) => return Err(e), } - xml.skip().await?; } Event::End(_) => break, _ => { xml.skip().await?; }, } }; xml.tag_stop(DAV_URN, "owner").await?; - Ok(Some(owner)) + Ok(owner) } } impl QRead for Timeout { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { + async fn qread(xml: &mut Reader) -> Result { const SEC_PFX: &str = "SEC_PFX"; - - match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "timeout") => xml.next().await?, - _ => return Ok(None), - }; + xml.open(DAV_URN, "timeout").await?; let timeout = match xml.tag_string().await?.as_str() { "Infinite" => Timeout::Infinite, @@ -725,79 +537,70 @@ impl QRead for Timeout { }; xml.tag_stop(DAV_URN, "timeout").await?; - Ok(Some(timeout)) + Ok(timeout) } } impl QRead for LockToken { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "locktoken") => xml.next().await?, - _ => return Ok(None), - }; - let href = Href::qread(xml).await?.ok_or(ParsingError::MissingChild)?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "locktoken").await?; + let href = Href::qread(xml).await?; xml.tag_stop(DAV_URN, "locktoken").await?; - Ok(Some(LockToken(href))) + Ok(LockToken(href)) } } impl QRead for LockRoot { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "lockroot").await?; - let href = Href::qread(xml).await?.ok_or(ParsingError::MissingChild)?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockroot").await?; + let href = Href::qread(xml).await?; xml.tag_stop(DAV_URN, "lockroot").await?; - Ok(Some(LockRoot(href))) + Ok(LockRoot(href)) } } impl QRead> for ResourceType { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { + async fn qread(xml: &mut Reader) -> Result { match xml.peek() { Event::Empty(b) if xml.is_tag(DAV_URN, "collection") => { xml.next().await?; - Ok(Some(ResourceType::Collection)) + Ok(ResourceType::Collection) }, - _ => Ok(E::ResourceType::qread(xml).await?.map(ResourceType::Extension)), + _ => E::ResourceType::qread(xml).await.map(ResourceType::Extension), } } } impl QRead for LockEntry { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - xml.tag_start(DAV_URN, "lockentry").await?; + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockentry").await?; let (mut maybe_scope, mut maybe_type) = (None, None); loop { - match xml.peek() { - Event::Start(_) if xml.is_tag(DAV_URN, "lockscope") => { - maybe_scope = LockScope::qread(xml).await?; - }, - Event::Start(_) if xml.is_tag(DAV_URN, "lockentry") => { - maybe_type = LockType::qread(xml).await?; - } - Event::End(_) => break, - _ => { xml.skip().await?; }, + let mut dirty = false; + xml.maybe_read::(&mut maybe_scope, &mut dirty).await?; + xml.maybe_read::(&mut maybe_type, &mut dirty).await?; + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; } } - let lockentry = match (maybe_scope, maybe_type) { - (Some(lockscope), Some(locktype)) => LockEntry { lockscope, locktype }, - _ => return Err(ParsingError::MissingChild), - }; - xml.tag_stop(DAV_URN, "lockentry").await?; - Ok(Some(lockentry)) + match (maybe_scope, maybe_type) { + (Some(lockscope), Some(locktype)) => Ok(LockEntry { lockscope, locktype }), + _ => Err(ParsingError::MissingChild), + } } } impl QRead for LockScope { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "lockscope").await?.is_none() { - return Ok(None) - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockscope").await?; let lockscope = loop { - println!("lockscope tag: {:?}", xml.peek()); match xml.peek() { Event::Empty(_) if xml.is_tag(DAV_URN, "exclusive") => { xml.next().await?; @@ -812,15 +615,13 @@ impl QRead for LockScope { }; xml.tag_stop(DAV_URN, "lockscope").await?; - Ok(Some(lockscope)) + Ok(lockscope) } } impl QRead for LockType { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - if xml.maybe_tag_start(DAV_URN, "locktype").await?.is_none() { - return Ok(None) - } + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "locktype").await?; let locktype = loop { match xml.peek() { @@ -832,20 +633,16 @@ impl QRead for LockType { }; }; xml.tag_stop(DAV_URN, "locktype").await?; - Ok(Some(locktype)) + Ok(locktype) } } impl QRead for Href { - async fn qread(xml: &mut Reader) -> Result, ParsingError> { - match xml.peek() { - Event::Start(b) if xml.is_tag(DAV_URN, "href") => xml.next().await?, - _ => return Ok(None), - }; - + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "href").await?; let mut url = xml.tag_string().await?; xml.tag_stop(DAV_URN, "href").await?; - Ok(Some(Href(url))) + Ok(Href(url)) } } @@ -865,7 +662,7 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = PropFind::::qread(&mut rdr).await.unwrap().unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, PropFind::::PropName); } @@ -889,7 +686,7 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = PropFind::::qread(&mut rdr).await.unwrap().unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, PropFind::Prop(PropName(vec![ PropertyRequest::DisplayName, @@ -912,7 +709,7 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = Error::::qread(&mut rdr).await.unwrap().unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, Error(vec![ Violation::LockTokenSubmitted(vec![ @@ -941,7 +738,7 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = PropertyUpdate::::qread(&mut rdr).await.unwrap().unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, PropertyUpdate(vec![ PropertyUpdateItem::Set(Set(PropValue(vec![]))), @@ -950,7 +747,7 @@ mod tests { } #[tokio::test] - async fn rfc_lockinfo1() { + async fn rfc_lockinfo() { let src = r#" @@ -963,7 +760,8 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = LockInfo::qread(&mut rdr).await.unwrap().unwrap(); + let got = rdr.find::().await.unwrap(); + assert_eq!(got, LockInfo { lockscope: LockScope::Exclusive, locktype: LockType::Write, @@ -971,4 +769,58 @@ mod tests { }); } + #[tokio::test] + async fn rfc_multistatus_name() { + let src = r#" + + + + http://www.example.com/container/ + + + + + + + + + + HTTP/1.1 200 OK + + + + http://www.example.com/container/front.html + + + + + + + + + + + + + HTTP/1.1 200 OK + + + +"#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>>().await.unwrap(); + + /*assert_eq!(got, Multistatus { + responses: vec![ + Response { + status_or_propstat: + }, + Response {}, + ], + responsedescription: None, + });*/ + + } + } diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs index 9e60f29..4de5440 100644 --- a/src/dav/encoder.rs +++ b/src/dav/encoder.rs @@ -6,7 +6,7 @@ use quick_xml::writer::ElementWriter; use quick_xml::name::PrefixDeclaration; use tokio::io::AsyncWrite; use super::types::*; -use super::xml::{Writer,QWrite,IWrite}; +use super::xml::{Node, Writer,QWrite,IWrite}; // --- XML ROOTS diff --git a/src/dav/error.rs b/src/dav/error.rs index 88a5e60..78c6d6b 100644 --- a/src/dav/error.rs +++ b/src/dav/error.rs @@ -2,6 +2,7 @@ use quick_xml::events::attributes::AttrError; #[derive(Debug)] pub enum ParsingError { + Recoverable, MissingChild, NamespacePrefixAlreadyUsed, WrongToken, diff --git a/src/dav/realization.rs b/src/dav/realization.rs index 1898173..33a556e 100644 --- a/src/dav/realization.rs +++ b/src/dav/realization.rs @@ -6,8 +6,8 @@ use super::error; #[derive(Debug, PartialEq)] pub struct Disabled(()); impl xml::QRead for Disabled { - async fn qread(xml: &mut xml::Reader) -> Result, error::ParsingError> { - Ok(None) + async fn qread(xml: &mut xml::Reader) -> Result { + Err(error::ParsingError::Recoverable) } } impl xml::QWrite for Disabled { diff --git a/src/dav/types.rs b/src/dav/types.rs index 246a4bd..5ea38d1 100644 --- a/src/dav/types.rs +++ b/src/dav/types.rs @@ -7,12 +7,11 @@ use super::error; /// It's how we implement a DAV extension /// (That's the dark magic part...) -pub trait Node = xml::QRead + xml::QWrite + Debug + PartialEq; -pub trait Extension { - type Error: Node; - type Property: Node; - type PropertyRequest: Node; - type ResourceType: Node; +pub trait Extension: std::fmt::Debug + PartialEq { + type Error: xml::Node; + type Property: xml::Node; + type PropertyRequest: xml::Node; + type ResourceType: xml::Node; } /// 14.1. activelock XML Element @@ -333,7 +332,7 @@ pub enum LockType { /// /// #[derive(Debug, PartialEq)] -pub struct Multistatus> { +pub struct Multistatus> { pub responses: Vec>, pub responsedescription: Option, } @@ -465,7 +464,7 @@ pub enum PropFind { /// /// #[derive(Debug, PartialEq)] -pub struct PropStat> { +pub struct PropStat> { pub prop: N, pub status: Status, pub error: Option>, @@ -514,7 +513,7 @@ pub struct Remove(pub PropName); /// --- rewritten as --- /// #[derive(Debug, PartialEq)] -pub enum StatusOrPropstat> { +pub enum StatusOrPropstat> { // One status, multiple hrefs... Status(Vec, Status), // A single href, multiple properties... @@ -522,7 +521,7 @@ pub enum StatusOrPropstat> { } #[derive(Debug, PartialEq)] -pub struct Response> { +pub struct Response> { pub status_or_propstat: StatusOrPropstat, pub error: Option>, pub responsedescription: Option, diff --git a/src/dav/xml.rs b/src/dav/xml.rs index ff121f4..d465d60 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -19,9 +19,14 @@ pub trait QWrite { async fn qwrite(&self, xml: &mut Writer) -> Result<(), quick_xml::Error>; } pub trait QRead { - async fn qread(xml: &mut Reader) -> Result, ParsingError>; + async fn qread(xml: &mut Reader) -> Result; } +// The representation of an XML node in Rust +pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq; + +// --------------- + /// Transform a Rust object into an XML stream of characters pub struct Writer { pub q: quick_xml::writer::Writer, @@ -106,6 +111,8 @@ impl Reader { } } + /* + * Disabled /// maybe find start tag pub async fn maybe_tag_start(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { println!("maybe start tag {}", key); @@ -118,7 +125,6 @@ impl Reader { /// find start tag pub async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - println!("search start tag {}", key); loop { match self.peek() { Event::Start(b) if self.is_tag(ns, key) => break, @@ -127,6 +133,7 @@ impl Reader { } self.next().await } + */ // find stop tag pub async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { @@ -157,5 +164,81 @@ impl Reader { }; } } + + // NEW API + pub async fn maybe_read>(&mut self, t: &mut Option, dirty: &mut bool) -> Result<(), ParsingError> { + match N::qread(self).await { + Ok(v) => { + *t = Some(v); + *dirty = true; + Ok(()) + }, + Err(ParsingError::Recoverable) => Ok(()), + Err(e) => Err(e), + } + } + + pub async fn maybe_push>(&mut self, t: &mut Vec, dirty: &mut bool) -> Result<(), ParsingError> { + match N::qread(self).await { + Ok(v) => { + t.push(v); + *dirty = true; + Ok(()) + }, + Err(ParsingError::Recoverable) => Ok(()), + Err(e) => Err(e), + } + } + + pub async fn find>(&mut self) -> Result { + loop { + // Try parse + match N::qread(self).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise, + } + + // If recovered, skip the element + self.skip().await?; + } + } + + pub async fn maybe_find>(&mut self) -> Result, ParsingError> { + loop { + // Try parse + match N::qread(self).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Some), + } + + match self.peek() { + Event::End(_) => return Ok(None), + _ => self.skip().await?, + }; + } + } + + pub async fn collect>(&mut self) -> Result, ParsingError> { + let mut acc = Vec::new(); + loop { + match N::qread(self).await { + Err(ParsingError::Recoverable) => match self.peek() { + Event::End(_) => return Ok(acc), + _ => { + self.skip().await?; + }, + }, + Ok(v) => acc.push(v), + Err(e) => return Err(e), + } + } + } + + pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + if self.is_tag(ns, key) { + return self.next().await + } + return Err(ParsingError::Recoverable); + } } -- cgit v1.2.3 From db115ca2478af75e9b9bd712cb8592cdd2a62476 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 7 Mar 2024 09:49:09 +0100 Subject: successful multistatus decoding test --- src/dav/decoder.rs | 48 ++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 42 insertions(+), 6 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 144cc4e..4acd3e8 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -811,16 +811,52 @@ mod tests { let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let got = rdr.find::>>().await.unwrap(); - /*assert_eq!(got, Multistatus { + assert_eq!(got, Multistatus { responses: vec![ Response { - status_or_propstat: + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/front.html".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, }, - Response {}, ], responsedescription: None, - });*/ - + }); } - } -- cgit v1.2.3 From 2d14587d83f5c90158bc5b5193b07b49ed6946ee Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 7 Mar 2024 12:25:22 +0100 Subject: Refactor decoder --- src/dav/decoder.rs | 433 ++++++++++++++++++++++++++++------------------------- src/dav/xml.rs | 131 +++++++++------- 2 files changed, 305 insertions(+), 259 deletions(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index 4acd3e8..fd11cf0 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -25,36 +25,32 @@ use super::xml::{Node, QRead, Reader, IRead, DAV_URN, CAL_URN}; /// Propfind request impl QRead> for PropFind { async fn qread(xml: &mut Reader) -> Result { - // Find propfind xml.open(DAV_URN, "propfind").await?; - - // Find any tag let propfind: PropFind = loop { - match xml.peek() { - Event::Start(_) if xml.is_tag(DAV_URN, "allprop") => { - xml.open(DAV_URN, "allprop").await?; - let includ = xml.maybe_find::>().await?; - let r = PropFind::AllProp(includ); - xml.tag_stop(DAV_URN, "allprop").await?; - break r - }, - Event::Start(_) if xml.is_tag(DAV_URN, "prop") => { - break PropFind::Prop(xml.find::>().await?); - }, - Event::Empty(_) if xml.is_tag(DAV_URN, "allprop") => { - xml.next().await?; - break PropFind::AllProp(None) - }, - Event::Empty(_) if xml.is_tag(DAV_URN, "propname") => { - xml.next().await?; - break PropFind::PropName - }, - _ => { xml.skip().await?; }, + // allprop + if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { + let includ = xml.maybe_find::>().await?; + xml.close().await?; + break PropFind::AllProp(includ) } - }; - // Close tag - xml.tag_stop(DAV_URN, "propfind").await?; + // propname + if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { + xml.close().await?; + break PropFind::PropName + } + + // prop + let (mut maybe_prop, mut dirty) = (None, false); + xml.maybe_read::>(&mut maybe_prop, &mut dirty).await?; + if let Some(prop) = maybe_prop { + break PropFind::Prop(prop) + } + + // not found, skipping + xml.skip().await?; + }; + xml.close().await?; Ok(propfind) } @@ -65,7 +61,7 @@ impl QRead> for PropertyUpdate { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "propertyupdate").await?; let collected_items = xml.collect::>().await?; - xml.tag_stop(DAV_URN, "propertyupdate").await?; + xml.close().await?; Ok(PropertyUpdate(collected_items)) } } @@ -89,7 +85,7 @@ impl> QRead> for Multistatus { } } - xml.tag_stop(DAV_URN, "multistatus").await?; + xml.close().await?; Ok(Multistatus { responses, responsedescription }) } } @@ -112,7 +108,7 @@ impl QRead for LockInfo { }; } } - xml.tag_stop(DAV_URN, "lockinfo").await?; + xml.close().await?; match (m_scope, m_type) { (Some(lockscope), Some(locktype)) => Ok(LockInfo { lockscope, locktype, owner }), _ => Err(ParsingError::MissingChild), @@ -125,7 +121,7 @@ impl QRead> for PropValue { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "prop").await?; let mut acc = xml.collect::>().await?; - xml.tag_stop(DAV_URN, "prop").await?; + xml.close().await?; Ok(PropValue(acc)) } } @@ -136,7 +132,7 @@ impl QRead> for Error { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "error").await?; let violations = xml.collect::>().await?; - xml.tag_stop(DAV_URN, "error").await?; + xml.close().await?; Ok(Error(violations)) } } @@ -168,7 +164,7 @@ impl> QRead> for Response { } } - xml.tag_stop(DAV_URN, "response").await?; + xml.close().await?; match (status, &propstat[..], &href[..]) { (Some(status), &[], &[_, ..]) => Ok(Response { status_or_propstat: StatusOrPropstat::Status(href, status), @@ -205,7 +201,7 @@ impl> QRead> for PropStat { } } - xml.tag_stop(DAV_URN, "propstat").await?; + xml.close().await?; match (m_prop, m_status) { (Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), _ => Err(ParsingError::MissingChild), @@ -219,7 +215,7 @@ impl QRead for Status { let fullcode = xml.tag_string().await?; let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; - xml.tag_stop(DAV_URN, "status").await?; + xml.close().await?; Ok(Status(code)) } } @@ -228,7 +224,7 @@ impl QRead for ResponseDescription { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "responsedescription").await?; let cnt = xml.tag_string().await?; - xml.tag_stop(DAV_URN, "responsedescription").await?; + xml.close().await?; Ok(ResponseDescription(cnt)) } } @@ -237,7 +233,7 @@ impl QRead for Location { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "location").await?; let href = xml.find::().await?; - xml.tag_stop(DAV_URN, "location").await?; + xml.close().await?; Ok(Location(href)) } } @@ -256,7 +252,7 @@ impl QRead> for Remove { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "remove").await?; let propname = xml.find::>().await?; - xml.tag_stop(DAV_URN, "remove").await?; + xml.close().await?; Ok(Remove(propname)) } } @@ -265,61 +261,39 @@ impl QRead> for Set { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "set").await?; let propvalue = xml.find::>().await?; - xml.tag_stop(DAV_URN, "set").await?; + xml.close().await?; Ok(Set(propvalue)) } } impl QRead> for Violation { async fn qread(xml: &mut Reader) -> Result { - let bs = match xml.peek() { - Event::Start(b) | Event::Empty(b) => b, - _ => return Err(ParsingError::Recoverable), - }; - - // Option 1: a pure DAV property - let (ns, loc) = xml.rdr.resolve_element(bs.name()); - if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - match loc.into_inner() { - b"lock-token-matches-request-uri" => { - xml.next().await?; - return Ok(Violation::LockTokenMatchesRequestUri) - }, - b"lock-token-submitted" => { - xml.next().await?; - let links = xml.collect::().await?; - xml.tag_stop(DAV_URN, "lock-token-submitted").await?; - return Ok(Violation::LockTokenSubmitted(links)) - }, - b"no-conflicting-lock" => { - // start tag - xml.next().await?; - let links = xml.collect::().await?; - xml.tag_stop(DAV_URN, "no-conflicting-lock").await?; - return Ok(Violation::NoConflictingLock(links)) - }, - b"no-external-entities" => { - xml.next().await?; - return Ok(Violation::NoExternalEntities) - }, - b"preserved-live-properties" => { - xml.next().await?; - return Ok(Violation::PreservedLiveProperties) - }, - b"propfind-finite-depth" => { - xml.next().await?; - return Ok(Violation::PropfindFiniteDepth) - }, - b"cannot-modify-protected-property" => { - xml.next().await?; - return Ok(Violation::CannotModifyProtectedProperty) - }, - _ => (), - }; + if xml.maybe_open(DAV_URN, "lock-token-matches-request-uri").await?.is_some() { + xml.close().await?; + Ok(Violation::LockTokenMatchesRequestUri) + } else if xml.maybe_open(DAV_URN, "lock-token-submitted").await?.is_some() { + let links = xml.collect::().await?; + xml.close().await?; + Ok(Violation::LockTokenSubmitted(links)) + } else if xml.maybe_open(DAV_URN, "no-conflicting-lock").await?.is_some() { + let links = xml.collect::().await?; + xml.close().await?; + Ok(Violation::NoConflictingLock(links)) + } else if xml.maybe_open(DAV_URN, "no-external-entities").await?.is_some() { + xml.close().await?; + Ok(Violation::NoExternalEntities) + } else if xml.maybe_open(DAV_URN, "preserved-live-properties").await?.is_some() { + xml.close().await?; + Ok(Violation::PreservedLiveProperties) + } else if xml.maybe_open(DAV_URN, "propfind-finite-depth").await?.is_some() { + xml.close().await?; + Ok(Violation::PropfindFiniteDepth) + } else if xml.maybe_open(DAV_URN, "cannot-modify-protected-property").await?.is_some() { + xml.close().await?; + Ok(Violation::CannotModifyProtectedProperty) + } else { + E::Error::qread(xml).await.map(Violation::Extension) } - - // Option 2: an extension property, delegating - E::Error::qread(xml).await.map(Violation::Extension) } } @@ -327,7 +301,7 @@ impl QRead> for Include { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "include").await?; let acc = xml.collect::>().await?; - xml.tag_stop(DAV_URN, "include").await?; + xml.close().await?; Ok(Include(acc)) } } @@ -336,43 +310,44 @@ impl QRead> for PropName { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "prop").await?; let acc = xml.collect::>().await?; - xml.tag_stop(DAV_URN, "prop").await?; + xml.close().await?; Ok(PropName(acc)) } } impl QRead> for PropertyRequest { async fn qread(xml: &mut Reader) -> Result { - let bs = match xml.peek() { - Event::Start(b) | Event::Empty(b) => b, - _ => return Err(ParsingError::Recoverable), + let maybe = if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { + Some(PropertyRequest::CreationDate) + } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { + Some(PropertyRequest::DisplayName) + } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { + Some(PropertyRequest::GetContentLanguage) + } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { + Some(PropertyRequest::GetContentLength) + } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { + Some(PropertyRequest::GetContentType) + } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { + Some(PropertyRequest::GetEtag) + } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { + Some(PropertyRequest::GetLastModified) + } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { + Some(PropertyRequest::LockDiscovery) + } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { + Some(PropertyRequest::ResourceType) + } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { + Some(PropertyRequest::SupportedLock) + } else { + None }; - // Option 1: a pure core DAV property - let (ns, loc) = xml.rdr.resolve_element(bs.name()); - if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - let maybe_res = match loc.into_inner() { - b"creationdate" => Some(PropertyRequest::CreationDate), - b"displayname" => Some(PropertyRequest::DisplayName), - b"getcontentlanguage" => Some(PropertyRequest::GetContentLanguage), - b"getcontentlength" => Some(PropertyRequest::GetContentLength), - b"getcontenttype" => Some(PropertyRequest::GetContentType), - b"getetag" => Some(PropertyRequest::GetEtag), - b"getlastmodified" => Some(PropertyRequest::GetLastModified), - b"lockdiscovery" => Some(PropertyRequest::LockDiscovery), - b"resourcetype" => Some(PropertyRequest::ResourceType), - b"supportedlock" => Some(PropertyRequest::SupportedLock), - _ => None, - }; - // Close the current tag if we read something - if let Some(res) = maybe_res { - xml.skip().await?; - return Ok(res) - } + match maybe { + Some(pr) => { + xml.close().await?; + Ok(pr) + }, + None => E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension), } - - // Option 2: an extension property, delegating - E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension) } } @@ -380,66 +355,47 @@ impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result { use chrono::{DateTime, FixedOffset, TimeZone}; - let bs = match xml.peek() { - Event::Start(b) | Event::Empty(b) => b, - _ => return Err(ParsingError::Recoverable), - }; - - // Option 1: a pure core DAV property - let (ns, loc) = xml.rdr.resolve_element(bs.name()); - if matches!(ns, Bound(Namespace(ns)) if ns == DAV_URN) { - match loc.into_inner() { - b"creationdate" => { - xml.next().await?; - let datestr = xml.tag_string().await?; - return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) - }, - b"displayname" => { - xml.next().await?; - return Ok(Property::DisplayName(xml.tag_string().await?)) - }, - b"getcontentlanguage" => { - xml.next().await?; - return Ok(Property::GetContentLanguage(xml.tag_string().await?)) - }, - b"getcontentlength" => { - xml.next().await?; - let cl = xml.tag_string().await?.parse::()?; - return Ok(Property::GetContentLength(cl)) - }, - b"getcontenttype" => { - xml.next().await?; - return Ok(Property::GetContentType(xml.tag_string().await?)) - }, - b"getetag" => { - xml.next().await?; - return Ok(Property::GetEtag(xml.tag_string().await?)) - }, - b"getlastmodified" => { - xml.next().await?; - let datestr = xml.tag_string().await?; - return Ok(Property::CreationDate(DateTime::parse_from_rfc2822(datestr.as_str())?)) - }, - b"lockdiscovery" => { - xml.next().await?; - let acc = xml.collect::().await?; - xml.tag_stop(DAV_URN, "lockdiscovery").await?; - return Ok(Property::LockDiscovery(acc)) - }, - b"resourcetype" => { - xml.next().await?; - let acc = xml.collect::>().await?; - xml.tag_stop(DAV_URN, "resourcetype").await?; - return Ok(Property::ResourceType(acc)) - }, - b"supportedlock" => { - xml.next().await?; - let acc = xml.collect::().await?; - xml.tag_stop(DAV_URN, "supportedlock").await?; - return Ok(Property::SupportedLock(acc)) - }, - _ => (), - }; + // Core WebDAV properties + if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { + let datestr = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) + } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { + let name = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::DisplayName(name)) + } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { + let lang = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetContentLanguage(lang)) + } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { + let cl = xml.tag_string().await?.parse::()?; + xml.close().await?; + return Ok(Property::GetContentLength(cl)) + } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { + let ct = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetContentType(ct)) + } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { + let etag = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetEtag(etag)) + } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { + let datestr = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::CreationDate(DateTime::parse_from_rfc2822(datestr.as_str())?)) + } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { + let acc = xml.collect::().await?; + xml.close().await?; + return Ok(Property::LockDiscovery(acc)) + } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { + let acc = xml.collect::>().await?; + xml.close().await?; + return Ok(Property::ResourceType(acc)) + } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { + let acc = xml.collect::().await?; + xml.close().await?; + return Ok(Property::SupportedLock(acc)) } // Option 2: an extension property, delegating @@ -471,7 +427,7 @@ impl QRead for ActiveLock { } } - xml.tag_stop(DAV_URN, "activelock").await?; + xml.close().await?; match (m_scope, m_type, m_depth, m_root) { (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => Ok(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot }), @@ -484,7 +440,7 @@ impl QRead for Depth { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "depth").await?; let depth_str = xml.tag_string().await?; - xml.tag_stop(DAV_URN, "depth").await?; + xml.close().await?; match depth_str.as_str() { "0" => Ok(Depth::Zero), "1" => Ok(Depth::One), @@ -518,7 +474,7 @@ impl QRead for Owner { _ => { xml.skip().await?; }, } }; - xml.tag_stop(DAV_URN, "owner").await?; + xml.close().await?; Ok(owner) } } @@ -536,7 +492,7 @@ impl QRead for Timeout { }, }; - xml.tag_stop(DAV_URN, "timeout").await?; + xml.close().await?; Ok(timeout) } } @@ -545,7 +501,7 @@ impl QRead for LockToken { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "locktoken").await?; let href = Href::qread(xml).await?; - xml.tag_stop(DAV_URN, "locktoken").await?; + xml.close().await?; Ok(LockToken(href)) } } @@ -554,20 +510,19 @@ impl QRead for LockRoot { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "lockroot").await?; let href = Href::qread(xml).await?; - xml.tag_stop(DAV_URN, "lockroot").await?; + xml.close().await?; Ok(LockRoot(href)) } } impl QRead> for ResourceType { async fn qread(xml: &mut Reader) -> Result { - match xml.peek() { - Event::Empty(b) if xml.is_tag(DAV_URN, "collection") => { - xml.next().await?; - Ok(ResourceType::Collection) - }, - _ => E::ResourceType::qread(xml).await.map(ResourceType::Extension), + if xml.maybe_open(DAV_URN, "collection").await?.is_some() { + xml.close().await?; + return Ok(ResourceType::Collection) } + + E::ResourceType::qread(xml).await.map(ResourceType::Extension) } } @@ -588,7 +543,7 @@ impl QRead for LockEntry { } } - xml.tag_stop(DAV_URN, "lockentry").await?; + xml.close().await?; match (maybe_scope, maybe_type) { (Some(lockscope), Some(locktype)) => Ok(LockEntry { lockscope, locktype }), _ => Err(ParsingError::MissingChild), @@ -601,20 +556,18 @@ impl QRead for LockScope { xml.open(DAV_URN, "lockscope").await?; let lockscope = loop { - match xml.peek() { - Event::Empty(_) if xml.is_tag(DAV_URN, "exclusive") => { - xml.next().await?; - break LockScope::Exclusive - }, - Event::Empty(_) if xml.is_tag(DAV_URN, "shared") => { - xml.next().await?; - break LockScope::Shared - } - _ => xml.skip().await?, - }; + if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() { + xml.close().await?; + break LockScope::Exclusive + } else if xml.maybe_open(DAV_URN, "shared").await?.is_some() { + xml.close().await?; + break LockScope::Shared + } + + xml.skip().await?; }; - xml.tag_stop(DAV_URN, "lockscope").await?; + xml.close().await?; Ok(lockscope) } } @@ -624,15 +577,15 @@ impl QRead for LockType { xml.open(DAV_URN, "locktype").await?; let locktype = loop { - match xml.peek() { - Event::Empty(b) if xml.is_tag(DAV_URN, "write") => { - xml.next().await?; - break LockType::Write - } - _ => xml.skip().await?, - }; + if xml.maybe_open(DAV_URN, "write").await?.is_some() { + xml.close().await?; + break LockType::Write + } + + xml.skip().await?; }; - xml.tag_stop(DAV_URN, "locktype").await?; + + xml.close().await?; Ok(locktype) } } @@ -641,7 +594,7 @@ impl QRead for Href { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "href").await?; let mut url = xml.tag_string().await?; - xml.tag_stop(DAV_URN, "href").await?; + xml.close().await?; Ok(Href(url)) } } @@ -859,4 +812,68 @@ mod tests { responsedescription: None, }); } + + + #[tokio::test] + async fn rfc_multistatus_value() { + let src = r#" + + + + /container/ + + + Box type A + Hadrian + 1997-12-01T17:42:21-08:00 + Example collection + + + + + + + + + + + + + HTTP/1.1 200 OK + + + + /container/front.html + + + Box type B + + 1997-12-01T18:27:21-08:00 + Example HTML resource + 4525 + text/html + "zzyzx" + Mon, 12 Jan 1998 09:25:56 GMT + + + + + + + + + + + + + HTTP/1.1 200 OK + + + "#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>>().await.unwrap(); + } + } diff --git a/src/dav/xml.rs b/src/dav/xml.rs index d465d60..d34322a 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -53,25 +53,33 @@ impl Writer { /// Transform an XML stream of characters into a Rust object pub struct Reader { pub rdr: NsReader, - evt: Event<'static>, + cur: Event<'static>, + parents: Vec>, buf: Vec, } impl Reader { pub async fn new(mut rdr: NsReader) -> Result { let mut buf: Vec = vec![]; - let evt = rdr.read_event_into_async(&mut buf).await?.into_owned(); + let cur = rdr.read_event_into_async(&mut buf).await?.into_owned(); + let parents = vec![]; buf.clear(); - Ok(Self { evt, rdr, buf }) + Ok(Self { cur, parents, rdr, buf }) } - pub fn peek(&self) -> &Event<'static> { - &self.evt + /// read one more tag + /// do not expose it publicly + async fn next(&mut self) -> Result, ParsingError> { + let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); + self.buf.clear(); + let old_evt = std::mem::replace(&mut self.cur, evt); + Ok(old_evt) } - /// skip tag. Can't skip end, can't skip eof. + /// skip a node at current level + /// I would like to make this one private but not ready pub async fn skip(&mut self) -> Result, ParsingError> { - println!("skip on {:?}", &self.evt); - match &self.evt { + println!("skipping inside node {:?}", self.parents.last()); + match &self.cur { Event::Start(b) => { let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; self.next().await @@ -82,17 +90,8 @@ impl Reader { } } - /// read one more tag - pub async fn next(&mut self) -> Result, ParsingError> { - let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); - self.buf.clear(); - let old_evt = std::mem::replace(&mut self.evt, evt); - Ok(old_evt) - } - - /// check if this is the desired tag - pub fn is_tag(&self, ns: &[u8], key: &str) -> bool { + fn is_tag(&self, ns: &[u8], key: &str) -> bool { let qname = match self.peek() { Event::Start(bs) | Event::Empty(bs) => bs.name(), Event::End(be) => be.name(), @@ -111,43 +110,25 @@ impl Reader { } } - /* - * Disabled - /// maybe find start tag - pub async fn maybe_tag_start(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { - println!("maybe start tag {}", key); - let peek = self.peek(); - match peek { - Event::Start(_) | Event::Empty(_) if self.is_tag(ns, key) => Ok(Some(self.next().await?)), - _ => Ok(None), - } + fn parent_has_child(&self) -> bool { + matches!(self.parents.last(), Some(Event::Start(_)) | None) } - /// find start tag - pub async fn tag_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - loop { - match self.peek() { - Event::Start(b) if self.is_tag(ns, key) => break, - _ => { self.skip().await?; }, - } + fn ensure_parent_has_child(&self) -> Result<(), ParsingError> { + match self.parent_has_child() { + true => Ok(()), + false => Err(ParsingError::Recoverable), } - self.next().await } - */ - // find stop tag - pub async fn tag_stop(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - println!("search stop tag {}", key); - loop { - match self.peek() { - Event::End(b) if self.is_tag(ns, key) => break, - _ => { self.skip().await?; }, - } - } - self.next().await + pub fn peek(&self) -> &Event<'static> { + &self.cur } + // NEW API pub async fn tag_string(&mut self) -> Result { + self.ensure_parent_has_child()?; + let mut acc = String::new(); loop { match self.peek() { @@ -165,8 +146,11 @@ impl Reader { } } - // NEW API pub async fn maybe_read>(&mut self, t: &mut Option, dirty: &mut bool) -> Result<(), ParsingError> { + if !self.parent_has_child() { + return Ok(()) + } + match N::qread(self).await { Ok(v) => { *t = Some(v); @@ -179,6 +163,10 @@ impl Reader { } pub async fn maybe_push>(&mut self, t: &mut Vec, dirty: &mut bool) -> Result<(), ParsingError> { + if !self.parent_has_child() { + return Ok(()) + } + match N::qread(self).await { Ok(v) => { t.push(v); @@ -191,6 +179,8 @@ impl Reader { } pub async fn find>(&mut self) -> Result { + self.ensure_parent_has_child()?; + loop { // Try parse match N::qread(self).await { @@ -204,6 +194,8 @@ impl Reader { } pub async fn maybe_find>(&mut self) -> Result, ParsingError> { + self.ensure_parent_has_child()?; + loop { // Try parse match N::qread(self).await { @@ -219,7 +211,9 @@ impl Reader { } pub async fn collect>(&mut self) -> Result, ParsingError> { + self.ensure_parent_has_child()?; let mut acc = Vec::new(); + loop { match N::qread(self).await { Err(ParsingError::Recoverable) => match self.peek() { @@ -235,10 +229,45 @@ impl Reader { } pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - if self.is_tag(ns, key) { - return self.next().await + let evt = match self.peek() { + Event::Empty(_) if self.is_tag(ns, key) => self.cur.clone(), + Event::Start(_) if self.is_tag(ns, key) => self.next().await?, + _ => return Err(ParsingError::Recoverable), + }; + + println!("open tag {:?}", evt); + self.parents.push(evt.clone()); + Ok(evt) + } + + pub async fn maybe_open(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { + match self.open(ns, key).await { + Ok(v) => Ok(Some(v)), + Err(ParsingError::Recoverable) => Ok(None), + Err(e) => Err(e), + } + } + + // find stop tag + pub async fn close(&mut self) -> Result, ParsingError> { + println!("close tag {:?}", self.parents.last()); + + // Handle the empty case + if !self.parent_has_child() { + self.parents.pop(); + return self.next().await + } + + // Handle the start/end case + loop { + match self.peek() { + Event::End(_) => { + self.parents.pop(); + return self.next().await + }, + _ => self.skip().await?, + }; } - return Err(ParsingError::Recoverable); } } -- cgit v1.2.3 From e52ce4a61dc8bffabb8e4c5152c973d739e2b499 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 7 Mar 2024 14:25:08 +0100 Subject: Testing decoder against RFC --- src/dav/decoder.rs | 71 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 70 insertions(+), 1 deletion(-) diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs index fd11cf0..aa3c7e5 100644 --- a/src/dav/decoder.rs +++ b/src/dav/decoder.rs @@ -383,7 +383,7 @@ impl QRead> for Property { } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { let datestr = xml.tag_string().await?; xml.close().await?; - return Ok(Property::CreationDate(DateTime::parse_from_rfc2822(datestr.as_str())?)) + return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(datestr.as_str())?)) } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { let acc = xml.collect::().await?; xml.close().await?; @@ -602,6 +602,7 @@ impl QRead for Href { #[cfg(test)] mod tests { use super::*; + use chrono::{FixedOffset, DateTime, TimeZone, Utc}; use crate::dav::realization::Core; #[tokio::test] @@ -874,6 +875,74 @@ mod tests { let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let got = rdr.find::>>().await.unwrap(); + + assert_eq!(got, Multistatus { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap()), + Property::DisplayName("Example collection".into()), + Property::ResourceType(vec![ResourceType::Collection]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/front.html".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap()), + Property::DisplayName("Example HTML resource".into()), + Property::GetContentLength(4525), + Property::GetContentType("text/html".into()), + Property::GetEtag(r#""zzyzx""#.into()), + Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap()), + //Property::ResourceType(vec![]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + + }, + ], + responsedescription: None, + }); } } -- cgit v1.2.3 From bb9cb386b65834c44cae86bd100f800883022062 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 7 Mar 2024 15:45:05 +0100 Subject: add a fuzzer --- Cargo.toml | 4 + fuzz/.gitignore | 4 + fuzz/Cargo.lock | 4249 ++++++++++++++++++++++++++++++++++++++++++++++ fuzz/Cargo.toml | 27 + fuzz/fuzz_targets/dav.rs | 48 + src/dav/mod.rs | 12 +- src/dav/xml.rs | 8 +- src/lib.rs | 19 + src/main.rs | 26 +- 9 files changed, 4364 insertions(+), 33 deletions(-) create mode 100644 fuzz/.gitignore create mode 100644 fuzz/Cargo.lock create mode 100644 fuzz/Cargo.toml create mode 100644 fuzz/fuzz_targets/dav.rs create mode 100644 src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index e362c07..543b463 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,10 @@ edition = "2021" license = "EUPL-1.2" description = "A robust email server" +[lib] +name = "aerogramme" +path = "src/lib.rs" + [dependencies] # async runtime tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } diff --git a/fuzz/.gitignore b/fuzz/.gitignore new file mode 100644 index 0000000..1a45eee --- /dev/null +++ b/fuzz/.gitignore @@ -0,0 +1,4 @@ +target +corpus +artifacts +coverage diff --git a/fuzz/Cargo.lock b/fuzz/Cargo.lock new file mode 100644 index 0000000..08fa951 --- /dev/null +++ b/fuzz/Cargo.lock @@ -0,0 +1,4249 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "abnf-core" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182d1f071b906a9f59269c89af101515a5cbe58f723eb6717e7fe7445c0dea" +dependencies = [ + "nom 7.1.3", +] + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aerogramme" +version = "0.3.0" +dependencies = [ + "anyhow", + "argon2", + "async-trait", + "aws-config", + "aws-sdk-s3", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "backtrace", + "base64 0.21.7", + "chrono", + "clap", + "console-subscriber", + "duplexify", + "eml-codec", + "futures", + "hex", + "http 1.1.0", + "http-body-util", + "hyper 1.2.0", + "hyper-rustls 0.26.0", + "hyper-util", + "im", + "imap-codec", + "imap-flow", + "itertools 0.10.5", + "k2v-client", + "lazy_static", + "ldap3", + "log", + "nix", + "nom 7.1.3", + "quick-xml", + "rand", + "rmp-serde", + "rpassword", + "rustls 0.22.2", + "rustls-pemfile 2.1.1", + "serde", + "smtp-message", + "smtp-server", + "sodiumoxide", + "thiserror", + "tokio", + "tokio-rustls 0.25.0", + "tokio-util", + "toml", + "tracing", + "tracing-subscriber", + "zstd", +] + +[[package]] +name = "aerogramme-fuzz" +version = "0.0.0" +dependencies = [ + "aerogramme", + "libfuzzer-sys", + "quick-xml", + "tokio", +] + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" + +[[package]] +name = "arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures", + "password-hash", +] + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "asn1-rs" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ff05a702273012438132f449575dbc804e27b2f3cbe3069aa237d26c98fa33" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom 7.1.3", + "num-traits", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db8b7511298d5b7784b40b092d9e9dcd3a627a5707e4b5e507931ab0d44eeebf" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +dependencies = [ + "concurrent-queue", + "event-listener 5.2.0", + "event-listener-strategy 0.5.0", + "futures-core", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-executor" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c" +dependencies = [ + "async-lock 3.3.0", + "async-task", + "concurrent-queue", + "fastrand 2.0.1", + "futures-lite 2.2.0", + "slab", +] + +[[package]] +name = "async-fs" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.2.0", + "async-executor", + "async-io 2.3.1", + "async-lock 3.3.0", + "blocking", + "futures-lite 2.2.0", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite 1.13.0", + "log", + "parking", + "polling 2.8.0", + "rustix 0.37.27", + "slab", + "socket2 0.4.10", + "waker-fn", +] + +[[package]] +name = "async-io" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65" +dependencies = [ + "async-lock 3.3.0", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite 2.2.0", + "parking", + "polling 3.5.0", + "rustix 0.38.31", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener 2.5.3", +] + +[[package]] +name = "async-lock" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +dependencies = [ + "event-listener 4.0.3", + "event-listener-strategy 0.4.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-net" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" +dependencies = [ + "async-io 1.13.0", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-process" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" +dependencies = [ + "async-io 1.13.0", + "async-lock 2.8.0", + "async-signal", + "blocking", + "cfg-if", + "event-listener 3.1.0", + "futures-lite 1.13.0", + "rustix 0.38.31", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-signal" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +dependencies = [ + "async-io 2.3.1", + "async-lock 2.8.0", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix 0.38.31", + "signal-hook-registry", + "slab", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io 1.13.0", + "async-lock 2.8.0", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite 1.13.0", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite 0.2.13", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "async-task" +version = "4.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" + +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "auto_enums" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe0dfe45d75158751e195799f47ea02e81f570aa24bc5ef999cdd9e888c4b5c3" +dependencies = [ + "auto_enums_core", + "auto_enums_derive", +] + +[[package]] +name = "auto_enums_core" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da47c46001293a2c4b744d731958be22cff408a2ab76e2279328f9713b1267b4" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "auto_enums_derive" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41aed1da83ecdc799503b7cb94da1b45a34d72b49caf40a61d9cf5b88ec07cfd" +dependencies = [ + "autocfg", + "derive_utils", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "aws-config" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b96342ea8948ab9bef3e6234ea97fc32e2d8a88d8fb6a084e52267317f94b6b" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand 2.0.1", + "hex", + "http 0.2.12", + "hyper 0.14.28", + "ring 0.17.8", + "time", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-runtime" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e38bab716c8bf07da24be07ecc02e0f5656ce8f30a891322ecdcb202f943b85" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand 2.0.1", + "http 0.2.12", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite 0.2.13", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-config" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07979fd68679736ba306d6ea2a4dc2fd835ac4d454942c5d8920ef83ed2f979f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d35d39379445970fc3e4ddf7559fff2c32935ce0b279f9cb27080d6b7c6d94" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "regex-lite", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d84bd3925a17c9adbf6ec65d52104a44a09629d8f70290542beeee69a95aee7f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c2dae39e997f58bc4d6292e6244b26ba630c01ab671b6f9f44309de3eb80ab8" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17fd9a53869fee17cea77e352084e1aa71e2c5e323d974c13a9c2bcfd9544c7f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "crypto-bigint 0.5.5", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.1.0", + "once_cell", + "p256", + "percent-encoding", + "ring 0.17.8", + "sha2", + "subtle", + "time", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-async" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" +dependencies = [ + "futures-util", + "pin-project-lite 0.2.13", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fd4b66f2a8e7c84d7e97bda2666273d41d2a2e25302605bcf906b7b2661ae5e" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "crc32c", + "crc32fast", + "hex", + "http 0.2.12", + "http-body 0.4.6", + "md-5", + "pin-project-lite 0.2.13", + "sha1", + "sha2", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858" +dependencies = [ + "aws-smithy-types", + "bytes", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6ca214a6a26f1b7ebd63aa8d4f5e2194095643023f9608edf99a58247b9d80d" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite 0.2.13", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1af80ecf3057fb25fe38d1687e94c4601a7817c6a1e87c1b0635f7ecb644ace5" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb27084f72ea5fc20033efe180618677ff4a2f474b53d84695cfe310a6526cbc" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbb5fca54a532a36ff927fbd7407a7c8eb9c3b4faf72792ba2965ea2cad8ed55" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand 2.0.1", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "hyper-rustls 0.24.2", + "once_cell", + "pin-project-lite 0.2.13", + "pin-utils", + "rustls 0.21.10", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22389cb6f7cac64f266fb9f137745a9349ced7b47e0d2ba503e9e40ede4f7060" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.1.0", + "pin-project-lite 0.2.13", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f081da5481210523d44ffd83d9f0740320050054006c719eae0232d411f024d3" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "itoa", + "num-integer", + "pin-project-lite 0.2.13", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fccd8f595d0ca839f9f2548e66b99514a85f92feb4c01cf2868d93eb4888a42" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "http 0.2.12", + "rustc_version", + "tracing", +] + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite 0.2.13", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" + +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "bitvec" +version = "0.19.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" +dependencies = [ + "async-channel 2.2.0", + "async-lock 3.3.0", + "async-task", + "fastrand 2.0.1", + "futures-io", + "futures-lite 2.2.0", + "piper", + "tracing", +] + +[[package]] +name = "bounded-static" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2325bd33fa7e3018e7e37f5b0591ba009124963b5a3f8b7cae6d0a8c1028ed4" +dependencies = [ + "bounded-static-derive", +] + +[[package]] +name = "bounded-static-derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f10dd247355bf631d98d2753d87ae62c84c8dcb996ad9b24a4168e0aec29bd6b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "bumpalo" +version = "3.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea184aa71bb362a1157c896979544cc23974e08fd265f29ea96b59f0b4a555b" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "cc" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-targets 0.52.4", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_derive", + "clap_lex", + "indexmap 1.9.3", + "once_cell", + "strsim", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap_derive" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "concurrent-queue" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console-api" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" +dependencies = [ + "futures-core", + "prost", + "prost-types", + "tonic", + "tracing-core", +] + +[[package]] +name = "console-subscriber" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" +dependencies = [ + "console-api", + "crossbeam-channel", + "crossbeam-utils", + "futures-task", + "hdrhistogram", + "humantime", + "prost-types", + "serde", + "serde_json", + "thread_local", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32c" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "crc32fast" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "data-encoding" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der-parser" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe398ac75057914d7d07307bf67dc7f3f574a26783b4fc7805a20ffa9f506e82" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom 7.1.3", + "num-bigint", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive_utils" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "532b4c15dccee12c7044f1fcad956e98410860b22231e44a3b827464797ca7bf" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "duplexify" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1cc346cd6db38ceab2d33f59b26024c3ddb8e75f047c6cafbcbc016ea8065d5" +dependencies = [ + "async-std", + "pin-project-lite 0.1.12", +] + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "signature", +] + +[[package]] +name = "either" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint 0.4.9", + "der", + "digest", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "eml-codec" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4499124d87abce26a57ef96ece800fa8babc38fbedd81c607c340ae83d46d2e" +dependencies = [ + "base64 0.21.7", + "chrono", + "encoding_rs", + "nom 7.1.3", +] + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener-strategy" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" +dependencies = [ + "event-listener 4.0.3", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291" +dependencies = [ + "event-listener 5.2.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "flate2" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite 0.2.13", + "waker-fn", +] + +[[package]] +name = "futures-lite" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" +dependencies = [ + "fastrand 2.0.1", + "futures-core", + "futures-io", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite 0.2.13", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 1.1.0", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "base64 0.21.7", + "byteorder", + "flate2", + "nom 7.1.3", + "num-traits", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite 0.2.13", + "socket2 0.5.6", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.2", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "httpdate", + "itoa", + "pin-project-lite 0.2.13", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.28", + "log", + "rustls 0.21.10", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.2.0", + "hyper-util", + "log", + "rustls 0.22.2", + "rustls-native-certs 0.7.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.25.0", + "tower-service", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper 0.14.28", + "pin-project-lite 0.2.13", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.2.0", + "pin-project-lite 0.2.13", + "socket2 0.5.6", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "imap-codec" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" +dependencies = [ + "abnf-core", + "base64 0.21.7", + "bounded-static", + "chrono", + "imap-types", + "log", + "nom 7.1.3", + "thiserror", +] + +[[package]] +name = "imap-flow" +version = "0.1.0" +source = "git+https://github.com/duesee/imap-flow.git?branch=main#dce759a8531f317e8d7311fb032b366db6698e38" +dependencies = [ + "bounded-static", + "bytes", + "imap-codec", + "imap-types", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "imap-types" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" +dependencies = [ + "base64 0.21.7", + "bounded-static", + "chrono", + "thiserror", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "jobserver" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "k2v-client" +version = "0.0.4" +source = "git+https://git.deuxfleurs.fr/Deuxfleurs/garage.git?branch=k2v/shared_http_client#8b35a946d9f6b31b26b9783acbfab984316051f4" +dependencies = [ + "aws-sdk-config", + "aws-sigv4", + "base64 0.21.7", + "hex", + "http 1.1.0", + "http-body-util", + "hyper 1.2.0", + "hyper-rustls 0.26.0", + "hyper-util", + "log", + "percent-encoding", + "serde", + "serde_json", + "sha2", + "thiserror", + "tokio", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lber" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a99b520993b21a6faab32643cf4726573dc18ca4cf2d48cbeb24d248c86c930" +dependencies = [ + "byteorder", + "bytes", + "nom 2.2.1", +] + +[[package]] +name = "ldap3" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce38dafca0608c64cc0146fb782b06abb8d946dae7a3af23c89a95da24f6b84d" +dependencies = [ + "async-trait", + "bytes", + "futures", + "futures-util", + "lazy_static", + "lber", + "log", + "nom 2.2.1", + "percent-encoding", + "ring 0.16.20", + "rustls 0.20.9", + "rustls-native-certs 0.6.3", + "thiserror", + "tokio", + "tokio-rustls 0.23.4", + "tokio-stream", + "tokio-util", + "url", + "x509-parser", +] + +[[package]] +name = "lexical-core" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" +dependencies = [ + "arrayvec", + "bitflags 1.3.2", + "cfg-if", + "ryu", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "libfuzzer-sys" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96cfd5557eb82f2b83fed4955246c988d331975a002961b07c81584d107e7f7" +dependencies = [ + "arbitrary", + "cc", + "once_cell", +] + +[[package]] +name = "libsodium-sys" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b779387cd56adfbc02ea4a668e704f729be8d6a6abd2c27ca5ee537849a92fd" +dependencies = [ + "cc", + "libc", + "pkg-config", + "walkdir", +] + +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +dependencies = [ + "value-bag", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "nix" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "libc", +] + +[[package]] +name = "nom" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf51a729ecf40266a2368ad335a5fdde43471f545a967109cd62146ecf8b66ff" + +[[package]] +name = "nom" +version = "6.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" +dependencies = [ + "bitvec", + "funty", + "lexical-core", + "memchr", + "version_check", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "oid-registry" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e20717fa0541f39bd146692035c37bedfa532b3e5071b35761082407546b2a" +dependencies = [ + "asn1-rs", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p256" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" +dependencies = [ + "ecdsa", + "elliptic-curve", + "sha2", +] + +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "pin-project-lite" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.1", + "futures-io", +] + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite 0.2.13", + "windows-sys 0.48.0", +] + +[[package]] +name = "polling" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24f040dee2588b4963afb4e420540439d126f73fdacf4a9c486a96d840bac3c9" +dependencies = [ + "cfg-if", + "concurrent-queue", + "pin-project-lite 0.2.13", + "rustix 0.38.31", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" +dependencies = [ + "anyhow", + "itertools 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "prost-types" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +dependencies = [ + "prost", +] + +[[package]] +name = "quick-xml" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +dependencies = [ + "memchr", + "tokio", +] + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core", +] + +[[package]] +name = "regex" +version = "1.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.6", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-lite" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rmp" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "723ecff9ad04f4ad92fe1c8ca6c20d2196d9286e9c60727c4cb5511629260e9d" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "rpassword" +version = "7.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" +dependencies = [ + "libc", + "rtoolbox", + "windows-sys 0.48.0", +] + +[[package]] +name = "rtoolbox" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom 7.1.3", +] + +[[package]] +name = "rustix" +version = "0.37.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +dependencies = [ + "bitflags 2.4.2", + "errno", + "libc", + "linux-raw-sys 0.4.13", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.1", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" +dependencies = [ + "base64 0.21.7", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" + +[[package]] +name = "serde" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "serde_json" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" + +[[package]] +name = "smol" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" +dependencies = [ + "async-channel 1.9.0", + "async-executor", + "async-fs", + "async-io 1.13.0", + "async-lock 2.8.0", + "async-net", + "async-process", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "smtp-message" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "auto_enums", + "futures", + "idna 0.2.3", + "lazy_static", + "nom 6.1.2", + "pin-project", + "regex-automata 0.1.10", + "serde", +] + +[[package]] +name = "smtp-server" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "async-trait", + "chrono", + "duplexify", + "futures", + "smol", + "smtp-message", + "smtp-server-types", +] + +[[package]] +name = "smtp-server-types" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "serde", + "smtp-message", +] + +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "sodiumoxide" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" +dependencies = [ + "ed25519", + "libc", + "libsodium-sys", + "serde", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "unicode-xid", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + +[[package]] +name = "thiserror" +version = "1.0.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite 0.2.13", + "signal-hook-registry", + "socket2 0.5.6", + "tokio-macros", + "tracing", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite 0.2.13", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.10", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.2", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite 0.2.13", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite 0.2.13", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "tonic" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite 0.2.13", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite 0.2.13", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna 0.5.0", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "uuid" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "waker-fn" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "x509-parser" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb9bace5b5589ffead1afb76e43e34cff39cd0f3ce7e170ae0c29e53b88eb1c" +dependencies = [ + "asn1-rs", + "base64 0.13.1", + "data-encoding", + "der-parser", + "lazy_static", + "nom 7.1.3", + "oid-registry", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" + +[[package]] +name = "zstd" +version = "0.9.2+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2390ea1bf6c038c39674f22d95f0564725fc06034a47129179810b2fc58caa54" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "4.1.3+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e99d81b99fb3c2c2c794e3fe56c305c63d5173a16a46b5850b07c935ffc7db79" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "1.6.2+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2daf2f248d9ea44454bfcb2516534e8b8ad2fc91bf818a1885495fc42bc8ac9f" +dependencies = [ + "cc", + "libc", +] diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml new file mode 100644 index 0000000..25c1f15 --- /dev/null +++ b/fuzz/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "aerogramme-fuzz" +version = "0.0.0" +publish = false +edition = "2021" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } +quick-xml = { version = "0.31", features = ["async-tokio"] } + +[dependencies.aerogramme] +path = ".." + +[patch.crates-io] +imap-types = { git = "https://github.com/superboum/imap-codec", branch = "custom/aerogramme" } +imap-codec = { git = "https://github.com/superboum/imap-codec", branch = "custom/aerogramme" } + +[[bin]] +name = "dav" +path = "fuzz_targets/dav.rs" +test = false +doc = false +bench = false diff --git a/fuzz/fuzz_targets/dav.rs b/fuzz/fuzz_targets/dav.rs new file mode 100644 index 0000000..7549a03 --- /dev/null +++ b/fuzz/fuzz_targets/dav.rs @@ -0,0 +1,48 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use aerogramme::dav::{types, realization, xml}; +use quick_xml::reader::NsReader; +use tokio::runtime::Runtime; +use tokio::io::AsyncWriteExt; + +async fn serialize(elem: &impl xml::QWrite) -> Vec { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let mut writer = xml::Writer { q, ns_to_apply }; + + elem.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + + return buffer +} + +type Object = types::Multistatus>; + +fuzz_target!(|data: &[u8]| { + let rt = Runtime::new().expect("tokio runtime initialization"); + + rt.block_on(async { + // 1. Setup fuzzing by finding an input that seems correct, do not crash yet then. + let mut rdr = match xml::Reader::new(NsReader::from_reader(data)).await { + Err(_) => return, + Ok(r) => r, + }; + let reference = match rdr.find::().await { + Err(_) => return, + Ok(m) => m, + }; + + // 2. Re-serialize the input + let my_serialization = serialize(&reference).await; + + // 3. De-serialize my serialization + let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())).await.expect("XML Reader init"); + let comparison = rdr2.find::().await.expect("Deserialize again"); + + // 4. Both the first decoding and last decoding must be identical + assert_eq!(reference, comparison); + }) +}); diff --git a/src/dav/mod.rs b/src/dav/mod.rs index bff95e7..906cfdd 100644 --- a/src/dav/mod.rs +++ b/src/dav/mod.rs @@ -1,11 +1,11 @@ // utils -mod error; -mod xml; +pub mod error; +pub mod xml; // webdav -mod types; -mod encoder; -mod decoder; +pub mod types; +pub mod encoder; +pub mod decoder; // calendar mod caltypes; @@ -17,7 +17,7 @@ mod acltypes; mod versioningtypes; // final type -mod realization; +pub mod realization; use std::net::SocketAddr; diff --git a/src/dav/xml.rs b/src/dav/xml.rs index d34322a..02263fd 100644 --- a/src/dav/xml.rs +++ b/src/dav/xml.rs @@ -12,7 +12,7 @@ pub const CARD_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; // Async traits pub trait IWrite = AsyncWrite + Unpin; -pub trait IRead = AsyncBufRead + Unpin + 'static; +pub trait IRead = AsyncBufRead + Unpin; // Serialization/Deserialization traits pub trait QWrite { @@ -78,7 +78,7 @@ impl Reader { /// skip a node at current level /// I would like to make this one private but not ready pub async fn skip(&mut self) -> Result, ParsingError> { - println!("skipping inside node {:?}", self.parents.last()); + //println!("skipping inside node {:?}", self.parents.last()); match &self.cur { Event::Start(b) => { let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; @@ -235,7 +235,7 @@ impl Reader { _ => return Err(ParsingError::Recoverable), }; - println!("open tag {:?}", evt); + //println!("open tag {:?}", evt); self.parents.push(evt.clone()); Ok(evt) } @@ -250,7 +250,7 @@ impl Reader { // find stop tag pub async fn close(&mut self) -> Result, ParsingError> { - println!("close tag {:?}", self.parents.last()); + //println!("close tag {:?}", self.parents.last()); // Handle the empty case if !self.parent_has_child() { diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..f065478 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,19 @@ +#![feature(type_alias_impl_trait)] +#![feature(async_fn_in_trait)] +#![feature(async_closure)] +#![feature(trait_alias)] + +pub mod auth; +pub mod bayou; +pub mod config; +pub mod cryptoblob; +pub mod dav; +pub mod imap; +pub mod k2v_util; +pub mod lmtp; +pub mod login; +pub mod mail; +pub mod server; +pub mod storage; +pub mod timestamp; +pub mod user; diff --git a/src/main.rs b/src/main.rs index e098d44..43b4dca 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,23 +1,3 @@ -#![feature(type_alias_impl_trait)] -#![feature(async_fn_in_trait)] -#![feature(async_closure)] -#![feature(trait_alias)] - -mod auth; -mod bayou; -mod config; -mod cryptoblob; -mod dav; -mod imap; -mod k2v_util; -mod lmtp; -mod login; -mod mail; -mod server; -mod storage; -mod timestamp; -mod user; - use std::io::Read; use std::path::PathBuf; @@ -25,9 +5,9 @@ use anyhow::{bail, Context, Result}; use clap::{Parser, Subcommand}; use nix::{sys::signal, unistd::Pid}; -use config::*; -use login::{static_provider::*, *}; -use server::Server; +use aerogramme::config::*; +use aerogramme::login::{static_provider::*, *}; +use aerogramme::server::Server; #[derive(Parser, Debug)] #[clap(author, version, about, long_about = None)] -- cgit v1.2.3 From 1a43ce5ac7033c148f64a033f2b1d335e95e11d5 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 08:17:03 +0100 Subject: WIP refactor --- Cargo.lock | 1785 ++----------- Cargo.toml | 41 +- aero-bayou/Cargo.toml | 17 + aero-bayou/src/lib.rs | 517 ++++ aero-bayou/src/timestamp.rs | 66 + aero-collections/mail/incoming.rs | 445 ++++ aero-collections/mail/mailbox.rs | 524 ++++ aero-collections/mail/mod.rs | 27 + aero-collections/mail/namespace.rs | 209 ++ aero-collections/mail/query.rs | 137 + aero-collections/mail/snapshot.rs | 60 + aero-collections/mail/uidindex.rs | 474 ++++ aero-collections/mail/unique_ident.rs | 101 + aero-collections/user.rs | 313 +++ aero-dav/.gitignore | 1 + aero-dav/Cargo.toml | 14 + aero-dav/fuzz/.gitignore | 4 + aero-dav/fuzz/Cargo.lock | 4249 ++++++++++++++++++++++++++++++ aero-dav/fuzz/Cargo.toml | 24 + aero-dav/fuzz/dav.dict | 126 + aero-dav/fuzz/fuzz_targets/dav.rs | 196 ++ aero-dav/src/acltypes.rs | 4 + aero-dav/src/caldecoder.rs | 33 + aero-dav/src/calencoder.rs | 886 +++++++ aero-dav/src/caltypes.rs | 1453 ++++++++++ aero-dav/src/decoder.rs | 947 +++++++ aero-dav/src/encoder.rs | 1112 ++++++++ aero-dav/src/error.rs | 42 + aero-dav/src/lib.rs | 25 + aero-dav/src/realization.rs | 42 + aero-dav/src/types.rs | 949 +++++++ aero-dav/src/versioningtypes.rs | 3 + aero-dav/src/xml.rs | 274 ++ aero-proto/dav.rs | 145 + aero-proto/imap/attributes.rs | 77 + aero-proto/imap/capability.rs | 159 ++ aero-proto/imap/command/anonymous.rs | 83 + aero-proto/imap/command/anystate.rs | 54 + aero-proto/imap/command/authenticated.rs | 683 +++++ aero-proto/imap/command/mod.rs | 20 + aero-proto/imap/command/selected.rs | 424 +++ aero-proto/imap/flags.rs | 30 + aero-proto/imap/flow.rs | 114 + aero-proto/imap/imf_view.rs | 109 + aero-proto/imap/index.rs | 211 ++ aero-proto/imap/mail_view.rs | 306 +++ aero-proto/imap/mailbox_view.rs | 772 ++++++ aero-proto/imap/mime_view.rs | 580 ++++ aero-proto/imap/mod.rs | 421 +++ aero-proto/imap/request.rs | 9 + aero-proto/imap/response.rs | 124 + aero-proto/imap/search.rs | 477 ++++ aero-proto/imap/session.rs | 173 ++ aero-proto/lmtp.rs | 221 ++ aero-proto/sasl.rs | 140 + aero-sasl/Cargo.toml | 22 + aero-sasl/src/decode.rs | 243 ++ aero-sasl/src/encode.rs | 157 ++ aero-sasl/src/flow.rs | 201 ++ aero-sasl/src/lib.rs | 43 + aero-sasl/src/types.rs | 163 ++ aero-user/Cargo.toml | 30 + aero-user/src/config.rs | 191 ++ aero-user/src/cryptoblob.rs | 67 + aero-user/src/lib.rs | 9 + aero-user/src/login/demo_provider.rs | 51 + aero-user/src/login/ldap_provider.rs | 264 ++ aero-user/src/login/mod.rs | 245 ++ aero-user/src/login/static_provider.rs | 188 ++ aero-user/src/storage/garage.rs | 538 ++++ aero-user/src/storage/in_memory.rs | 334 +++ aero-user/src/storage/mod.rs | 180 ++ aerogramme/Cargo.toml | 12 + aerogramme/src/k2v_util.rs | 26 + aerogramme/src/lib.rs | 19 + aerogramme/src/main.rs | 407 +++ aerogramme/src/server.rs | 147 ++ flake.nix | 8 +- fuzz/.gitignore | 4 - fuzz/Cargo.lock | 4249 ------------------------------ fuzz/Cargo.toml | 27 - fuzz/fuzz_targets/dav.rs | 48 - src/auth.rs | 941 ------- src/bayou.rs | 514 ---- src/config.rs | 191 -- src/cryptoblob.rs | 67 - src/dav/acltypes.rs | 4 - src/dav/caldecoder.rs | 33 - src/dav/calencoder.rs | 886 ------- src/dav/caltypes.rs | 1440 ---------- src/dav/decoder.rs | 948 ------- src/dav/encoder.rs | 1117 -------- src/dav/error.rs | 42 - src/dav/mod.rs | 167 -- src/dav/realization.rs | 42 - src/dav/types.rs | 950 ------- src/dav/versioningtypes.rs | 3 - src/dav/xml.rs | 273 -- src/imap/attributes.rs | 77 - src/imap/capability.rs | 159 -- src/imap/command/anonymous.rs | 83 - src/imap/command/anystate.rs | 54 - src/imap/command/authenticated.rs | 683 ----- src/imap/command/mod.rs | 20 - src/imap/command/selected.rs | 424 --- src/imap/flags.rs | 30 - src/imap/flow.rs | 114 - src/imap/imf_view.rs | 109 - src/imap/index.rs | 211 -- src/imap/mail_view.rs | 306 --- src/imap/mailbox_view.rs | 772 ------ src/imap/mime_view.rs | 580 ---- src/imap/mod.rs | 421 --- src/imap/request.rs | 9 - src/imap/response.rs | 124 - src/imap/search.rs | 477 ---- src/imap/session.rs | 173 -- src/k2v_util.rs | 26 - src/lib.rs | 19 - src/lmtp.rs | 221 -- src/login/demo_provider.rs | 51 - src/login/ldap_provider.rs | 265 -- src/login/mod.rs | 245 -- src/login/static_provider.rs | 189 -- src/mail/incoming.rs | 445 ---- src/mail/mailbox.rs | 524 ---- src/mail/mod.rs | 27 - src/mail/namespace.rs | 209 -- src/mail/query.rs | 137 - src/mail/snapshot.rs | 60 - src/mail/uidindex.rs | 474 ---- src/mail/unique_ident.rs | 101 - src/main.rs | 407 --- src/server.rs | 147 -- src/storage/garage.rs | 538 ---- src/storage/in_memory.rs | 334 --- src/storage/mod.rs | 179 -- src/timestamp.rs | 65 - src/user.rs | 313 --- 139 files changed, 22372 insertions(+), 23353 deletions(-) create mode 100644 aero-bayou/Cargo.toml create mode 100644 aero-bayou/src/lib.rs create mode 100644 aero-bayou/src/timestamp.rs create mode 100644 aero-collections/mail/incoming.rs create mode 100644 aero-collections/mail/mailbox.rs create mode 100644 aero-collections/mail/mod.rs create mode 100644 aero-collections/mail/namespace.rs create mode 100644 aero-collections/mail/query.rs create mode 100644 aero-collections/mail/snapshot.rs create mode 100644 aero-collections/mail/uidindex.rs create mode 100644 aero-collections/mail/unique_ident.rs create mode 100644 aero-collections/user.rs create mode 100644 aero-dav/.gitignore create mode 100644 aero-dav/Cargo.toml create mode 100644 aero-dav/fuzz/.gitignore create mode 100644 aero-dav/fuzz/Cargo.lock create mode 100644 aero-dav/fuzz/Cargo.toml create mode 100644 aero-dav/fuzz/dav.dict create mode 100644 aero-dav/fuzz/fuzz_targets/dav.rs create mode 100644 aero-dav/src/acltypes.rs create mode 100644 aero-dav/src/caldecoder.rs create mode 100644 aero-dav/src/calencoder.rs create mode 100644 aero-dav/src/caltypes.rs create mode 100644 aero-dav/src/decoder.rs create mode 100644 aero-dav/src/encoder.rs create mode 100644 aero-dav/src/error.rs create mode 100644 aero-dav/src/lib.rs create mode 100644 aero-dav/src/realization.rs create mode 100644 aero-dav/src/types.rs create mode 100644 aero-dav/src/versioningtypes.rs create mode 100644 aero-dav/src/xml.rs create mode 100644 aero-proto/dav.rs create mode 100644 aero-proto/imap/attributes.rs create mode 100644 aero-proto/imap/capability.rs create mode 100644 aero-proto/imap/command/anonymous.rs create mode 100644 aero-proto/imap/command/anystate.rs create mode 100644 aero-proto/imap/command/authenticated.rs create mode 100644 aero-proto/imap/command/mod.rs create mode 100644 aero-proto/imap/command/selected.rs create mode 100644 aero-proto/imap/flags.rs create mode 100644 aero-proto/imap/flow.rs create mode 100644 aero-proto/imap/imf_view.rs create mode 100644 aero-proto/imap/index.rs create mode 100644 aero-proto/imap/mail_view.rs create mode 100644 aero-proto/imap/mailbox_view.rs create mode 100644 aero-proto/imap/mime_view.rs create mode 100644 aero-proto/imap/mod.rs create mode 100644 aero-proto/imap/request.rs create mode 100644 aero-proto/imap/response.rs create mode 100644 aero-proto/imap/search.rs create mode 100644 aero-proto/imap/session.rs create mode 100644 aero-proto/lmtp.rs create mode 100644 aero-proto/sasl.rs create mode 100644 aero-sasl/Cargo.toml create mode 100644 aero-sasl/src/decode.rs create mode 100644 aero-sasl/src/encode.rs create mode 100644 aero-sasl/src/flow.rs create mode 100644 aero-sasl/src/lib.rs create mode 100644 aero-sasl/src/types.rs create mode 100644 aero-user/Cargo.toml create mode 100644 aero-user/src/config.rs create mode 100644 aero-user/src/cryptoblob.rs create mode 100644 aero-user/src/lib.rs create mode 100644 aero-user/src/login/demo_provider.rs create mode 100644 aero-user/src/login/ldap_provider.rs create mode 100644 aero-user/src/login/mod.rs create mode 100644 aero-user/src/login/static_provider.rs create mode 100644 aero-user/src/storage/garage.rs create mode 100644 aero-user/src/storage/in_memory.rs create mode 100644 aero-user/src/storage/mod.rs create mode 100644 aerogramme/Cargo.toml create mode 100644 aerogramme/src/k2v_util.rs create mode 100644 aerogramme/src/lib.rs create mode 100644 aerogramme/src/main.rs create mode 100644 aerogramme/src/server.rs delete mode 100644 fuzz/.gitignore delete mode 100644 fuzz/Cargo.lock delete mode 100644 fuzz/Cargo.toml delete mode 100644 fuzz/fuzz_targets/dav.rs delete mode 100644 src/auth.rs delete mode 100644 src/bayou.rs delete mode 100644 src/config.rs delete mode 100644 src/cryptoblob.rs delete mode 100644 src/dav/acltypes.rs delete mode 100644 src/dav/caldecoder.rs delete mode 100644 src/dav/calencoder.rs delete mode 100644 src/dav/caltypes.rs delete mode 100644 src/dav/decoder.rs delete mode 100644 src/dav/encoder.rs delete mode 100644 src/dav/error.rs delete mode 100644 src/dav/mod.rs delete mode 100644 src/dav/realization.rs delete mode 100644 src/dav/types.rs delete mode 100644 src/dav/versioningtypes.rs delete mode 100644 src/dav/xml.rs delete mode 100644 src/imap/attributes.rs delete mode 100644 src/imap/capability.rs delete mode 100644 src/imap/command/anonymous.rs delete mode 100644 src/imap/command/anystate.rs delete mode 100644 src/imap/command/authenticated.rs delete mode 100644 src/imap/command/mod.rs delete mode 100644 src/imap/command/selected.rs delete mode 100644 src/imap/flags.rs delete mode 100644 src/imap/flow.rs delete mode 100644 src/imap/imf_view.rs delete mode 100644 src/imap/index.rs delete mode 100644 src/imap/mail_view.rs delete mode 100644 src/imap/mailbox_view.rs delete mode 100644 src/imap/mime_view.rs delete mode 100644 src/imap/mod.rs delete mode 100644 src/imap/request.rs delete mode 100644 src/imap/response.rs delete mode 100644 src/imap/search.rs delete mode 100644 src/imap/session.rs delete mode 100644 src/k2v_util.rs delete mode 100644 src/lib.rs delete mode 100644 src/lmtp.rs delete mode 100644 src/login/demo_provider.rs delete mode 100644 src/login/ldap_provider.rs delete mode 100644 src/login/mod.rs delete mode 100644 src/login/static_provider.rs delete mode 100644 src/mail/incoming.rs delete mode 100644 src/mail/mailbox.rs delete mode 100644 src/mail/mod.rs delete mode 100644 src/mail/namespace.rs delete mode 100644 src/mail/query.rs delete mode 100644 src/mail/snapshot.rs delete mode 100644 src/mail/uidindex.rs delete mode 100644 src/mail/unique_ident.rs delete mode 100644 src/main.rs delete mode 100644 src/server.rs delete mode 100644 src/storage/garage.rs delete mode 100644 src/storage/in_memory.rs delete mode 100644 src/storage/mod.rs delete mode 100644 src/timestamp.rs delete mode 100644 src/user.rs diff --git a/Cargo.lock b/Cargo.lock index a4af312..20b9d95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,15 +2,6 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "abnf-core" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182d1f071b906a9f59269c89af101515a5cbe58f723eb6717e7fe7445c0dea" -dependencies = [ - "nom 7.1.3", -] - [[package]] name = "addr2line" version = "0.21.0" @@ -27,7 +18,45 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "aerogramme" +name = "aero-bayou" +version = "0.3.0" +dependencies = [ + "aero-user", + "anyhow", + "log", + "rand", + "serde", + "tokio", +] + +[[package]] +name = "aero-dav" +version = "0.3.0" +dependencies = [ + "chrono", + "futures", + "http 1.1.0", + "quick-xml", + "tokio", +] + +[[package]] +name = "aero-sasl" +version = "0.3.0" +dependencies = [ + "anyhow", + "base64 0.21.7", + "futures", + "hex", + "nom 7.1.3", + "rand", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "aero-user" version = "0.3.0" dependencies = [ "anyhow", @@ -37,85 +66,53 @@ dependencies = [ "aws-sdk-s3", "aws-smithy-runtime", "aws-smithy-runtime-api", - "backtrace", "base64 0.21.7", - "chrono", - "clap", - "console-subscriber", - "duplexify", - "eml-codec", - "futures", - "hex", - "http 1.0.0", - "http-body-util", - "hyper 1.2.0", "hyper-rustls 0.26.0", "hyper-util", - "im", - "imap-codec", - "imap-flow", - "itertools", "k2v-client", - "lazy_static", "ldap3", "log", - "nix", - "nom 7.1.3", - "quick-xml", "rand", "rmp-serde", - "rpassword", - "rustls 0.22.2", - "rustls-pemfile 2.0.0", "serde", - "smtp-message", - "smtp-server", "sodiumoxide", - "thiserror", "tokio", - "tokio-rustls 0.25.0", - "tokio-util", "toml", "tracing", - "tracing-subscriber", "zstd", ] [[package]] -name = "aho-corasick" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +name = "aerogramme-fuzz" +version = "0.0.0" dependencies = [ - "memchr", + "aero-dav", + "arbitrary", + "libfuzzer-sys", + "quick-xml", + "tokio", ] [[package]] -name = "android-tzdata" -version = "0.1.1" +name = "anyhow" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" +checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" [[package]] -name = "android_system_properties" -version = "0.1.5" +name = "arbitrary" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" dependencies = [ - "libc", + "derive_arbitrary", ] -[[package]] -name = "anyhow" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" - [[package]] name = "argon2" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ba4cac0a46bc1d2912652a751c47f2a9f3a7fe89bcae2275d418f5270402f9" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" dependencies = [ "base64ct", "blake2", @@ -123,12 +120,6 @@ dependencies = [ "password-hash", ] -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - [[package]] name = "asn1-rs" version = "0.3.1" @@ -168,230 +159,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca33f4bc4ed1babef42cad36cc1f51fa88be00420404e5b1e80ab1b18f7678c" -dependencies = [ - "concurrent-queue", - "event-listener 4.0.3", - "event-listener-strategy", - "futures-core", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "async-executor" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c" -dependencies = [ - "async-lock 3.3.0", - "async-task", - "concurrent-queue", - "fastrand 2.0.1", - "futures-lite 2.2.0", - "slab", -] - -[[package]] -name = "async-fs" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.1.1", - "async-executor", - "async-io 2.3.0", - "async-lock 3.3.0", - "blocking", - "futures-lite 2.2.0", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb41eb19024a91746eba0773aa5e16036045bbf45733766661099e182ea6a744" -dependencies = [ - "async-lock 3.3.0", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite 2.2.0", - "parking", - "polling 3.3.2", - "rustix 0.38.30", - "slab", - "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-lock" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" -dependencies = [ - "event-listener 4.0.3", - "event-listener-strategy", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "async-net" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" -dependencies = [ - "async-io 1.13.0", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "async-process" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" -dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", - "async-signal", - "blocking", - "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.30", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-signal" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" -dependencies = [ - "async-io 2.3.0", - "async-lock 2.8.0", - "atomic-waker", - "cfg-if", - "futures-core", - "futures-io", - "rustix 0.38.30", - "signal-hook-registry", - "slab", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 1.13.0", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite 0.2.13", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - -[[package]] -name = "async-stream" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - -[[package]] -name = "async-task" -version = "4.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" - [[package]] name = "async-trait" version = "0.1.77" @@ -403,57 +170,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - -[[package]] -name = "auto_enums" -version = "0.7.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe0dfe45d75158751e195799f47ea02e81f570aa24bc5ef999cdd9e888c4b5c3" -dependencies = [ - "auto_enums_core", - "auto_enums_derive", -] - -[[package]] -name = "auto_enums_core" -version = "0.7.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da47c46001293a2c4b744d731958be22cff408a2ab76e2279328f9713b1267b4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "auto_enums_derive" -version = "0.7.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41aed1da83ecdc799503b7cb94da1b45a34d72b49caf40a61d9cf5b88ec07cfd" -dependencies = [ - "autocfg", - "derive_utils", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -462,9 +178,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aws-config" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3182c19847238b50b62ae0383a6dbfc14514e552eb5e307e1ea83ccf5840b8a6" +checksum = "0b96342ea8948ab9bef3e6234ea97fc32e2d8a88d8fb6a084e52267317f94b6b" dependencies = [ "aws-credential-types", "aws-runtime", @@ -479,9 +195,9 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.0.1", + "fastrand", "hex", - "http 0.2.11", + "http 0.2.12", "hyper 0.14.28", "ring 0.17.7", "time", @@ -492,9 +208,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5635d8707f265c773282a22abe1ecd4fbe96a8eb2f0f14c0796f8016f11a41a" +checksum = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -504,9 +220,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f82b9ae2adfd9d6582440d0eeb394c07f74d21b4c0cc72bdb73735c9e1a9c0e" +checksum = "6e38bab716c8bf07da24be07ecc02e0f5656ce8f30a891322ecdcb202f943b85" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -517,20 +233,20 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.0.1", - "http 0.2.11", + "fastrand", + "http 0.2.12", "http-body 0.4.6", "percent-encoding", - "pin-project-lite 0.2.13", + "pin-project-lite", "tracing", "uuid", ] [[package]] name = "aws-sdk-config" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cb71960e3e197c3f512f3bf0f47f444acd708db59733416107ec2ff161ff5c4" +checksum = "07979fd68679736ba306d6ea2a4dc2fd835ac4d454942c5d8920ef83ed2f979f" dependencies = [ "aws-credential-types", "aws-runtime", @@ -542,7 +258,7 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "http 0.2.11", + "http 0.2.12", "once_cell", "regex-lite", "tracing", @@ -550,9 +266,9 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5076637347e7d0218e61facae853110682ae58efabd2f4e2a9e530c203d5fa7b" +checksum = "93d35d39379445970fc3e4ddf7559fff2c32935ce0b279f9cb27080d6b7c6d94" dependencies = [ "aws-credential-types", "aws-runtime", @@ -568,7 +284,7 @@ dependencies = [ "aws-smithy-xml", "aws-types", "bytes", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "once_cell", "percent-encoding", @@ -579,9 +295,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca7e8097448832fcd22faf6bb227e97d76b40e354509d1307653a885811c7151" +checksum = "d84bd3925a17c9adbf6ec65d52104a44a09629d8f70290542beeee69a95aee7f" dependencies = [ "aws-credential-types", "aws-runtime", @@ -593,7 +309,7 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "http 0.2.11", + "http 0.2.12", "once_cell", "regex-lite", "tracing", @@ -601,9 +317,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75073590e23d63044606771afae309fada8eb10ded54a1ce4598347221d3fef" +checksum = "2c2dae39e997f58bc4d6292e6244b26ba630c01ab671b6f9f44309de3eb80ab8" dependencies = [ "aws-credential-types", "aws-runtime", @@ -615,7 +331,7 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "http 0.2.11", + "http 0.2.12", "once_cell", "regex-lite", "tracing", @@ -623,9 +339,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650e4aaae41547151dea4d8142f7ffcc8ab8ba76d5dccc8933936ef2102c3356" +checksum = "17fd9a53869fee17cea77e352084e1aa71e2c5e323d974c13a9c2bcfd9544c7f" dependencies = [ "aws-credential-types", "aws-runtime", @@ -638,7 +354,7 @@ dependencies = [ "aws-smithy-types", "aws-smithy-xml", "aws-types", - "http 0.2.11", + "http 0.2.12", "once_cell", "regex-lite", "tracing", @@ -646,9 +362,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "404c64a104188ac70dd1684718765cb5559795458e446480e41984e68e57d888" +checksum = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -660,8 +376,8 @@ dependencies = [ "form_urlencoded", "hex", "hmac", - "http 0.2.11", - "http 1.0.0", + "http 0.2.12", + "http 1.1.0", "once_cell", "p256", "percent-encoding", @@ -680,7 +396,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" dependencies = [ "futures-util", - "pin-project-lite 0.2.13", + "pin-project-lite", "tokio", ] @@ -696,10 +412,10 @@ dependencies = [ "crc32c", "crc32fast", "hex", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "md-5", - "pin-project-lite 0.2.13", + "pin-project-lite", "sha1", "sha2", "tracing", @@ -728,11 +444,11 @@ dependencies = [ "bytes", "bytes-utils", "futures-core", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "once_cell", "percent-encoding", - "pin-project-lite 0.2.13", + "pin-project-lite", "pin-utils", "tracing", ] @@ -767,14 +483,14 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "bytes", - "fastrand 2.0.1", + "fastrand", "h2 0.3.24", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "hyper 0.14.28", "hyper-rustls 0.24.2", "once_cell", - "pin-project-lite 0.2.13", + "pin-project-lite", "pin-utils", "rustls 0.21.10", "tokio", @@ -790,9 +506,9 @@ dependencies = [ "aws-smithy-async", "aws-smithy-types", "bytes", - "http 0.2.11", - "http 1.0.0", - "pin-project-lite 0.2.13", + "http 0.2.12", + "http 1.1.0", + "pin-project-lite", "tokio", "tracing", "zeroize", @@ -808,11 +524,11 @@ dependencies = [ "bytes", "bytes-utils", "futures-core", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "itoa", "num-integer", - "pin-project-lite 0.2.13", + "pin-project-lite", "pin-utils", "ryu", "serde", @@ -832,64 +548,19 @@ dependencies = [ [[package]] name = "aws-types" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fbb5d48aae496f628e7aa2e41991dd4074f606d9e3ade1ce1059f293d40f9a2" +checksum = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1" dependencies = [ "aws-credential-types", "aws-smithy-async", "aws-smithy-runtime-api", "aws-smithy-types", - "http 0.2.11", + "http 0.2.12", "rustc_version", "tracing", ] -[[package]] -name = "axum" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" -dependencies = [ - "async-trait", - "axum-core", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http 0.2.11", - "http-body 0.4.6", - "hyper 0.14.28", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite 0.2.13", - "rustversion", - "serde", - "sync_wrapper", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 0.2.11", - "http-body 0.4.6", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - [[package]] name = "backtrace" version = "0.3.69" @@ -945,33 +616,6 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" -[[package]] -name = "bitflags" -version = "2.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" - -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - -[[package]] -name = "bitvec" -version = "0.19.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - [[package]] name = "blake2" version = "0.10.6" @@ -990,42 +634,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" -dependencies = [ - "async-channel 2.1.1", - "async-lock 3.3.0", - "async-task", - "fastrand 2.0.1", - "futures-io", - "futures-lite 2.2.0", - "piper", - "tracing", -] - -[[package]] -name = "bounded-static" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2325bd33fa7e3018e7e37f5b0591ba009124963b5a3f8b7cae6d0a8c1028ed4" -dependencies = [ - "bounded-static-derive", -] - -[[package]] -name = "bounded-static-derive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f10dd247355bf631d98d2753d87ae62c84c8dcb996ad9b24a4168e0aec29bd6b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - [[package]] name = "bumpalo" version = "3.14.0" @@ -1072,101 +680,11 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", "num-traits", - "wasm-bindgen", - "windows-targets 0.48.5", -] - -[[package]] -name = "clap" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" -dependencies = [ - "atty", - "bitflags 1.3.2", - "clap_derive", - "clap_lex", - "indexmap 1.9.3", - "once_cell", - "strsim", - "termcolor", - "textwrap", -] - -[[package]] -name = "clap_derive" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "clap_lex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", -] - -[[package]] -name = "concurrent-queue" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "console-api" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" -dependencies = [ - "futures-core", - "prost", - "prost-types", - "tonic", - "tracing-core", -] - -[[package]] -name = "console-subscriber" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" -dependencies = [ - "console-api", - "crossbeam-channel", - "crossbeam-utils", - "futures-task", - "hdrhistogram", - "humantime", - "prost-types", - "serde", - "serde_json", - "thread_local", - "tokio", - "tokio-stream", - "tonic", - "tracing", - "tracing-core", - "tracing-subscriber", ] [[package]] @@ -1211,28 +729,13 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ "cfg-if", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" - [[package]] name = "crypto-bigint" version = "0.4.9" @@ -1305,14 +808,14 @@ dependencies = [ ] [[package]] -name = "derive_utils" -version = "0.11.2" +name = "derive_arbitrary" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532b4c15dccee12c7044f1fcad956e98410860b22231e44a3b827464797ca7bf" +checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.48", ] [[package]] @@ -1337,16 +840,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "duplexify" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1cc346cd6db38ceab2d33f59b26024c3ddb8e75f047c6cafbcbc016ea8065d5" -dependencies = [ - "async-std", - "pin-project-lite 0.1.12", -] - [[package]] name = "ecdsa" version = "0.14.8" @@ -1370,9 +863,9 @@ dependencies = [ [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" [[package]] name = "elliptic-curve" @@ -1394,90 +887,12 @@ dependencies = [ "zeroize", ] -[[package]] -name = "eml-codec" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4499124d87abce26a57ef96ece800fa8babc38fbedd81c607c340ae83d46d2e" -dependencies = [ - "base64 0.21.7", - "chrono", - "encoding_rs", - "nom 7.1.3", -] - -[[package]] -name = "encoding_rs" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" -dependencies = [ - "cfg-if", -] - [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" -[[package]] -name = "errno" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "event-listener" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "event-listener-strategy" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" -dependencies = [ - "event-listener 4.0.3", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - [[package]] name = "fastrand" version = "2.0.1" @@ -1494,16 +909,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "flate2" -version = "1.0.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - [[package]] name = "fnv" version = "1.0.7" @@ -1519,12 +924,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "funty" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" - [[package]] name = "futures" version = "0.3.30" @@ -1573,34 +972,6 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite 0.2.13", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" -dependencies = [ - "fastrand 2.0.1", - "futures-core", - "futures-io", - "parking", - "pin-project-lite 0.2.13", -] - [[package]] name = "futures-macro" version = "0.3.30" @@ -1637,7 +1008,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.13", + "pin-project-lite", "pin-utils", "slab", ] @@ -1669,18 +1040,6 @@ version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "group" version = "0.12.1" @@ -1703,8 +1062,8 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.11", - "indexmap 2.1.0", + "http 0.2.12", + "indexmap", "slab", "tokio", "tokio-util", @@ -1722,8 +1081,8 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 1.0.0", - "indexmap 2.1.0", + "http 1.1.0", + "indexmap", "slab", "tokio", "tokio-util", @@ -1731,46 +1090,12 @@ dependencies = [ ] [[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" - -[[package]] -name = "hdrhistogram" -version = "7.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" -dependencies = [ - "base64 0.21.7", - "byteorder", - "flate2", - "nom 7.1.3", - "num-traits", -] - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] name = "hermit-abi" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1793,9 +1118,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -1804,9 +1129,9 @@ dependencies = [ [[package]] name = "http" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -1820,8 +1145,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.11", - "pin-project-lite 0.2.13", + "http 0.2.12", + "pin-project-lite", ] [[package]] @@ -1831,7 +1156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ "bytes", - "http 1.0.0", + "http 1.1.0", ] [[package]] @@ -1842,9 +1167,9 @@ checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" dependencies = [ "bytes", "futures-util", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", - "pin-project-lite 0.2.13", + "pin-project-lite", ] [[package]] @@ -1859,12 +1184,6 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - [[package]] name = "hyper" version = "0.14.28" @@ -1876,13 +1195,13 @@ dependencies = [ "futures-core", "futures-util", "h2 0.3.24", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", - "pin-project-lite 0.2.13", - "socket2 0.5.5", + "pin-project-lite", + "socket2", "tokio", "tower-service", "tracing", @@ -1899,12 +1218,12 @@ dependencies = [ "futures-channel", "futures-util", "h2 0.4.2", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "httparse", "httpdate", "itoa", - "pin-project-lite 0.2.13", + "pin-project-lite", "smallvec", "tokio", "want", @@ -1917,7 +1236,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http 0.2.11", + "http 0.2.12", "hyper 0.14.28", "log", "rustls 0.21.10", @@ -1933,7 +1252,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" dependencies = [ "futures-util", - "http 1.0.0", + "http 1.1.0", "hyper 1.2.0", "hyper-util", "log", @@ -1945,18 +1264,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "hyper-timeout" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" -dependencies = [ - "hyper 0.14.28", - "pin-project-lite 0.2.13", - "tokio", - "tokio-io-timeout", -] - [[package]] name = "hyper-util" version = "0.1.3" @@ -1966,51 +1273,17 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "hyper 1.2.0", - "pin-project-lite 0.2.13", - "socket2 0.5.5", + "pin-project-lite", + "socket2", "tokio", "tower", "tower-service", "tracing", ] -[[package]] -name = "iana-time-zone" -version = "0.1.59" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6a67363e2aa4443928ce15e57ebae94fd8949958fd1223c4cfc0cd473ad7539" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "idna" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - [[package]] name = "idna" version = "0.5.0" @@ -2021,107 +1294,14 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "im" -version = "15.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" -dependencies = [ - "bitmaps", - "rand_core", - "rand_xoshiro", - "sized-chunks", - "typenum", - "version_check", -] - -[[package]] -name = "imap-codec" -version = "2.0.0" -source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" -dependencies = [ - "abnf-core", - "base64 0.21.7", - "bounded-static", - "chrono", - "imap-types", - "log", - "nom 7.1.3", - "thiserror", -] - -[[package]] -name = "imap-flow" -version = "0.1.0" -source = "git+https://github.com/duesee/imap-flow.git?branch=main#68c1da5d1c56dbe543d9736de9683259d1d28191" -dependencies = [ - "bounded-static", - "bytes", - "imap-codec", - "imap-types", - "thiserror", - "tokio", - "tracing", -] - -[[package]] -name = "imap-types" -version = "2.0.0" -source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" -dependencies = [ - "base64 0.21.7", - "bounded-static", - "chrono", - "thiserror", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - [[package]] name = "indexmap" -version = "2.1.0" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" dependencies = [ "equivalent", - "hashbrown 0.14.3", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.4", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", + "hashbrown", ] [[package]] @@ -2157,7 +1337,7 @@ dependencies = [ "aws-sigv4", "base64 0.21.7", "hex", - "http 1.0.0", + "http 1.1.0", "http-body-util", "hyper 1.2.0", "hyper-rustls 0.26.0", @@ -2171,15 +1351,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "lazy_static" version = "1.4.0" @@ -2224,25 +1395,23 @@ dependencies = [ "x509-parser", ] -[[package]] -name = "lexical-core" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" -dependencies = [ - "arrayvec", - "bitflags 1.3.2", - "cfg-if", - "ryu", - "static_assertions", -] - [[package]] name = "libc" version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" +[[package]] +name = "libfuzzer-sys" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96cfd5557eb82f2b83fed4955246c988d331975a002961b07c81584d107e7f7" +dependencies = [ + "arbitrary", + "cc", + "once_cell", +] + [[package]] name = "libsodium-sys" version = "0.2.7" @@ -2255,47 +1424,11 @@ dependencies = [ "walkdir", ] -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - -[[package]] -name = "linux-raw-sys" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" - [[package]] name = "log" version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" -dependencies = [ - "value-bag", -] - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "matches" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" - -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "md-5" @@ -2313,12 +1446,6 @@ version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "minimal-lexical" version = "0.2.1" @@ -2345,36 +1472,12 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "nix" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" -dependencies = [ - "bitflags 2.4.2", - "cfg-if", - "libc", -] - [[package]] name = "nom" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf51a729ecf40266a2368ad335a5fdde43471f545a967109cd62146ecf8b66ff" -[[package]] -name = "nom" -version = "6.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" -dependencies = [ - "bitvec", - "funty", - "lexical-core", - "memchr", - "version_check", -] - [[package]] name = "nom" version = "7.1.3" @@ -2385,16 +1488,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - [[package]] name = "num-bigint" version = "0.4.4" @@ -2431,7 +1524,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.4", + "hermit-abi", "libc", ] @@ -2465,24 +1558,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" - [[package]] name = "outref" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "p256" version = "0.11.1" @@ -2494,12 +1575,6 @@ dependencies = [ "sha2", ] -[[package]] -name = "parking" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" - [[package]] name = "password-hash" version = "0.5.0" @@ -2525,30 +1600,24 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", "syn 2.0.48", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.13" @@ -2561,17 +1630,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "piper" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" -dependencies = [ - "atomic-waker", - "fastrand 2.0.1", - "futures-io", -] - [[package]] name = "pkcs8" version = "0.9.0" @@ -2588,111 +1646,25 @@ version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite 0.2.13", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545c980a3880efd47b2e262f6a4bb6daad6555cf3367aa9c4e52895f69537a41" -dependencies = [ - "cfg-if", - "concurrent-queue", - "pin-project-lite 0.2.13", - "rustix 0.38.30", - "tracing", - "windows-sys 0.52.0", -] - [[package]] name = "powerfmt" version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "prost" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" -dependencies = [ - "bytes", - "prost-derive", -] +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] -name = "prost-derive" -version = "0.12.3" +name = "ppv-lite86" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" -dependencies = [ - "anyhow", - "itertools", - "proc-macro2", - "quote", - "syn 2.0.48", -] +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] -name = "prost-types" -version = "0.12.3" +name = "proc-macro2" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" dependencies = [ - "prost", + "unicode-ident", ] [[package]] @@ -2714,12 +1686,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "radium" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" - [[package]] name = "rand" version = "0.8.5" @@ -2750,65 +1716,12 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rand_xoshiro" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" -dependencies = [ - "rand_core", -] - -[[package]] -name = "regex" -version = "1.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.8.2", -] - [[package]] name = "regex-lite" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" - [[package]] name = "rfc6979" version = "0.3.1" @@ -2871,27 +1784,6 @@ dependencies = [ "serde", ] -[[package]] -name = "rpassword" -version = "7.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" -dependencies = [ - "libc", - "rtoolbox", - "windows-sys 0.48.0", -] - -[[package]] -name = "rtoolbox" -version = "0.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" -dependencies = [ - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -2916,33 +1808,6 @@ dependencies = [ "nom 7.1.3", ] -[[package]] -name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" -dependencies = [ - "bitflags 2.4.2", - "errno", - "libc", - "linux-raw-sys 0.4.13", - "windows-sys 0.52.0", -] - [[package]] name = "rustls" version = "0.20.9" @@ -2976,7 +1841,7 @@ dependencies = [ "log", "ring 0.17.7", "rustls-pki-types", - "rustls-webpki 0.102.1", + "rustls-webpki 0.102.2", "subtle", "zeroize", ] @@ -3000,7 +1865,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" dependencies = [ "openssl-probe", - "rustls-pemfile 2.0.0", + "rustls-pemfile 2.1.1", "rustls-pki-types", "schannel", "security-framework", @@ -3017,9 +1882,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4" +checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" dependencies = [ "base64 0.21.7", "rustls-pki-types", @@ -3027,9 +1892,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.1.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e9d979b3ce68192e42760c7810125eb6cf2ea10efae545a156063e61f314e2a" +checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8" [[package]] name = "rustls-webpki" @@ -3043,26 +1908,20 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.1" +version = "0.102.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef4ca26037c909dedb327b48c3327d0ba91d3dd3c4e05dad328f210ffb68e95b" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" dependencies = [ "ring 0.17.7", "rustls-pki-types", "untrusted 0.9.0", ] -[[package]] -name = "rustversion" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" - [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] name = "same-file" @@ -3112,7 +1971,7 @@ version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ - "bitflags 1.3.2", + "bitflags", "core-foundation", "core-foundation-sys", "libc", @@ -3131,9 +1990,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" [[package]] name = "serde" @@ -3157,9 +2016,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.111" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -3188,15 +2047,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -3216,16 +2066,6 @@ dependencies = [ "rand_core", ] -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - [[package]] name = "slab" version = "0.4.9" @@ -3241,71 +2081,6 @@ version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" -[[package]] -name = "smol" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" -dependencies = [ - "async-channel 1.9.0", - "async-executor", - "async-fs", - "async-io 1.13.0", - "async-lock 2.8.0", - "async-net", - "async-process", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "smtp-message" -version = "0.1.0" -source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" -dependencies = [ - "auto_enums", - "futures", - "idna 0.2.3", - "lazy_static", - "nom 6.1.2", - "pin-project", - "regex-automata 0.1.10", - "serde", -] - -[[package]] -name = "smtp-server" -version = "0.1.0" -source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" -dependencies = [ - "async-trait", - "chrono", - "duplexify", - "futures", - "smol", - "smtp-message", - "smtp-server-types", -] - -[[package]] -name = "smtp-server-types" -version = "0.1.0" -source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" -dependencies = [ - "serde", - "smtp-message", -] - -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.5" @@ -3350,18 +2125,6 @@ dependencies = [ "der", ] -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "subtle" version = "2.5.0" @@ -3390,12 +2153,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - [[package]] name = "synstructure" version = "0.12.6" @@ -3408,27 +2165,6 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "textwrap" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" - [[package]] name = "thiserror" version = "1.0.56" @@ -3449,16 +2185,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "thread_local" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" -dependencies = [ - "cfg-if", - "once_cell", -] - [[package]] name = "time" version = "0.3.31" @@ -3514,24 +2240,13 @@ dependencies = [ "libc", "mio", "num_cpus", - "pin-project-lite 0.2.13", + "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", - "tracing", "windows-sys 0.48.0", ] -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite 0.2.13", - "tokio", -] - [[package]] name = "tokio-macros" version = "2.2.0" @@ -3582,7 +2297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" dependencies = [ "futures-core", - "pin-project-lite 0.2.13", + "pin-project-lite", "tokio", ] @@ -3596,7 +2311,7 @@ dependencies = [ "futures-core", "futures-io", "futures-sink", - "pin-project-lite 0.2.13", + "pin-project-lite", "tokio", "tracing", ] @@ -3610,33 +2325,6 @@ dependencies = [ "serde", ] -[[package]] -name = "tonic" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" -dependencies = [ - "async-stream", - "async-trait", - "axum", - "base64 0.21.7", - "bytes", - "h2 0.3.24", - "http 0.2.11", - "http-body 0.4.6", - "hyper 0.14.28", - "hyper-timeout", - "percent-encoding", - "pin-project", - "prost", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "tower" version = "0.4.13" @@ -3645,13 +2333,9 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", - "indexmap 1.9.3", "pin-project", - "pin-project-lite 0.2.13", - "rand", - "slab", + "pin-project-lite", "tokio", - "tokio-util", "tower-layer", "tower-service", "tracing", @@ -3676,7 +2360,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "log", - "pin-project-lite 0.2.13", + "pin-project-lite", "tracing-attributes", "tracing-core", ] @@ -3699,36 +2383,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", ] [[package]] @@ -3789,7 +2443,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", - "idna 0.5.0", + "idna", "percent-encoding", ] @@ -3805,18 +2459,6 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "value-bag" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cdbaf5e132e593e9fc1de6a15bbec912395b11fb9719e061cf64f804524c503" - [[package]] name = "version_check" version = "0.9.4" @@ -3829,12 +2471,6 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" -[[package]] -name = "waker-fn" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" - [[package]] name = "walkdir" version = "2.4.0" @@ -3885,18 +2521,6 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasm-bindgen-macro" version = "0.2.90" @@ -3977,15 +2601,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.0", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -4118,12 +2733,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" -[[package]] -name = "wyz" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" - [[package]] name = "x509-parser" version = "0.13.2" @@ -4182,3 +2791,13 @@ dependencies = [ "cc", "libc", ] + +[[patch.unused]] +name = "imap-codec" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" + +[[patch.unused]] +name = "imap-types" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" diff --git a/Cargo.toml b/Cargo.toml index 543b463..56d5cf3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,16 +1,28 @@ -[package] -name = "aerogramme" -version = "0.3.0" -authors = ["Alex Auvolat ", "Quentin Dufour "] -edition = "2021" -license = "EUPL-1.2" -description = "A robust email server" +[workspace] +resolver = "2" +members = [ + "aero-user", + "aero-bayou", + "aero-sasl", + "aero-dav", + "aero-dav/fuzz", +# "aero-collections", +# "aero-proto", +# "aerogramme", +] -[lib] -name = "aerogramme" -path = "src/lib.rs" +default-members = ["aerogramme"] + +[workspace.dependencies] +# internal crates +aero-user = { version = "0.3.0", path = "aero-user" } +aero-bayou = { version = "0.3.0", path = "aero-bayou" } +aero-sasl = { version = "0.3.0", path = "aero-sasl" } +aero-dav = { version = "0.3.0", path = "aero-dav" } +#aero-collections = { version = "0.3.0", path = "aero-collections" } +#aero-proto = { version = "0.3.0", path = "aero-proto" } +#aerogramme = { version = "0.3.0", path = "aerogramme" } -[dependencies] # async runtime tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } tokio-util = { version = "0.7", features = [ "compat" ] } @@ -80,13 +92,6 @@ aws-sdk-s3 = "1" aws-smithy-runtime = "1" aws-smithy-runtime-api = "1" -[dev-dependencies] - [patch.crates-io] imap-types = { git = "https://github.com/superboum/imap-codec", branch = "custom/aerogramme" } imap-codec = { git = "https://github.com/superboum/imap-codec", branch = "custom/aerogramme" } - -[[test]] -name = "behavior" -path = "tests/behavior.rs" -harness = false diff --git a/aero-bayou/Cargo.toml b/aero-bayou/Cargo.toml new file mode 100644 index 0000000..d271f4a --- /dev/null +++ b/aero-bayou/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "aero-bayou" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "A simplified version of Bayou by Terry et al. (ACM SIGOPS 1995)" + +[dependencies] +aero-user.workspace = true + +anyhow.workspace = true +log.workspace = true +rand.workspace = true +serde.workspace = true +tokio.workspace = true + diff --git a/aero-bayou/src/lib.rs b/aero-bayou/src/lib.rs new file mode 100644 index 0000000..7756964 --- /dev/null +++ b/aero-bayou/src/lib.rs @@ -0,0 +1,517 @@ +mod timestamp + +use std::sync::{Arc, Weak}; +use std::time::{Duration, Instant}; + +use anyhow::{anyhow, bail, Result}; +use log::error; +use rand::prelude::*; +use serde::{Deserialize, Serialize}; +use tokio::sync::{watch, Notify}; + +use aero_foundations::cryptoblob::*; +use aero_foundations::login::Credentials; +use aero_foundations::storage; + +use crate::timestamp::*; + +const KEEP_STATE_EVERY: usize = 64; + +// Checkpointing interval constants: a checkpoint is not made earlier +// than CHECKPOINT_INTERVAL time after the last one, and is not made +// if there are less than CHECKPOINT_MIN_OPS new operations since last one. +const CHECKPOINT_INTERVAL: Duration = Duration::from_secs(6 * 3600); +const CHECKPOINT_MIN_OPS: usize = 16; +// HYPOTHESIS: processes are able to communicate in a synchronous +// fashion in times that are small compared to CHECKPOINT_INTERVAL. +// More precisely, if a process tried to save an operation within the last +// CHECKPOINT_INTERVAL, we are sure to read it from storage if it was +// successfully saved (and if we don't read it, it means it has been +// definitely discarded due to an error). + +// Keep at least two checkpoints, here three, to avoid race conditions +// between processes doing .checkpoint() and those doing .sync() +const CHECKPOINTS_TO_KEEP: usize = 3; + +const WATCH_SK: &str = "watch"; + +pub trait BayouState: + Default + Clone + Serialize + for<'de> Deserialize<'de> + Send + Sync + 'static +{ + type Op: Clone + Serialize + for<'de> Deserialize<'de> + std::fmt::Debug + Send + Sync + 'static; + + fn apply(&self, op: &Self::Op) -> Self; +} + +pub struct Bayou { + path: String, + key: Key, + + storage: storage::Store, + + checkpoint: (Timestamp, S), + history: Vec<(Timestamp, S::Op, Option)>, + + last_sync: Option, + last_try_checkpoint: Option, + + watch: Arc, + last_sync_watch_ct: storage::RowRef, +} + +impl Bayou { + pub async fn new(creds: &Credentials, path: String) -> Result { + let storage = creds.storage.build().await?; + + //let target = k2v_client.row(&path, WATCH_SK); + let target = storage::RowRef::new(&path, WATCH_SK); + let watch = K2vWatch::new(creds, target.clone()).await?; + + Ok(Self { + path, + storage, + key: creds.keys.master.clone(), + checkpoint: (Timestamp::zero(), S::default()), + history: vec![], + last_sync: None, + last_try_checkpoint: None, + watch, + last_sync_watch_ct: target, + }) + } + + /// Re-reads the state from persistent storage backend + pub async fn sync(&mut self) -> Result<()> { + let new_last_sync = Some(Instant::now()); + let new_last_sync_watch_ct = self.watch.rx.borrow().clone(); + + // 1. List checkpoints + let checkpoints = self.list_checkpoints().await?; + tracing::debug!("(sync) listed checkpoints: {:?}", checkpoints); + + // 2. Load last checkpoint if different from currently used one + let checkpoint = if let Some((ts, key)) = checkpoints.last() { + if *ts == self.checkpoint.0 { + (*ts, None) + } else { + tracing::debug!("(sync) loading checkpoint: {}", key); + + let buf = self + .storage + .blob_fetch(&storage::BlobRef(key.to_string())) + .await? + .value; + tracing::debug!("(sync) checkpoint body length: {}", buf.len()); + + let ck = open_deserialize::(&buf, &self.key)?; + (*ts, Some(ck)) + } + } else { + (Timestamp::zero(), None) + }; + + if self.checkpoint.0 > checkpoint.0 { + bail!("Loaded checkpoint is more recent than stored one"); + } + + if let Some(ck) = checkpoint.1 { + tracing::debug!( + "(sync) updating checkpoint to loaded state at {:?}", + checkpoint.0 + ); + self.checkpoint = (checkpoint.0, ck); + }; + + // remove from history events before checkpoint + self.history = std::mem::take(&mut self.history) + .into_iter() + .skip_while(|(ts, _, _)| *ts < self.checkpoint.0) + .collect(); + + // 3. List all operations starting from checkpoint + let ts_ser = self.checkpoint.0.to_string(); + tracing::debug!("(sync) looking up operations starting at {}", ts_ser); + let ops_map = self + .storage + .row_fetch(&storage::Selector::Range { + shard: &self.path, + sort_begin: &ts_ser, + sort_end: WATCH_SK, + }) + .await?; + + let mut ops = vec![]; + for row_value in ops_map { + let row = row_value.row_ref; + let sort_key = row.uid.sort; + let ts = sort_key + .parse::() + .map_err(|_| anyhow!("Invalid operation timestamp: {}", sort_key))?; + + let val = row_value.value; + if val.len() != 1 { + bail!("Invalid operation, has {} values", val.len()); + } + match &val[0] { + storage::Alternative::Value(v) => { + let op = open_deserialize::(v, &self.key)?; + tracing::trace!("(sync) operation {}: {:?}", sort_key, op); + ops.push((ts, op)); + } + storage::Alternative::Tombstone => { + continue; + } + } + } + ops.sort_by_key(|(ts, _)| *ts); + tracing::debug!("(sync) {} operations", ops.len()); + + if ops.len() < self.history.len() { + bail!("Some operations have disappeared from storage!"); + } + + // 4. Check that first operation has same timestamp as checkpoint (if not zero) + if self.checkpoint.0 != Timestamp::zero() && ops[0].0 != self.checkpoint.0 { + bail!( + "First operation in listing doesn't have timestamp that corresponds to checkpoint" + ); + } + + // 5. Apply all operations in order + // Hypothesis: before the loaded checkpoint, operations haven't changed + // between what's on storage and what we used to calculate the state in RAM here. + let i0 = self + .history + .iter() + .zip(ops.iter()) + .take_while(|((ts1, _, _), (ts2, _))| ts1 == ts2) + .count(); + + if ops.len() > i0 { + // Remove operations from first position where histories differ + self.history.truncate(i0); + + // Look up last calculated state which we have saved and start from there. + let mut last_state = (0, &self.checkpoint.1); + for (i, (_, _, state_opt)) in self.history.iter().enumerate().rev() { + if let Some(state) = state_opt { + last_state = (i + 1, state); + break; + } + } + + // Calculate state at the end of this common part of the history + let mut state = last_state.1.clone(); + for (_, op, _) in self.history[last_state.0..].iter() { + state = state.apply(op); + } + + // Now, apply all operations retrieved from storage after the common part + for (ts, op) in ops.drain(i0..) { + state = state.apply(&op); + if (self.history.len() + 1) % KEEP_STATE_EVERY == 0 { + self.history.push((ts, op, Some(state.clone()))); + } else { + self.history.push((ts, op, None)); + } + } + + // Always save final state as result of last operation + self.history.last_mut().unwrap().2 = Some(state); + } + + // Save info that sync has been done + self.last_sync = new_last_sync; + self.last_sync_watch_ct = new_last_sync_watch_ct; + Ok(()) + } + + /// Does a sync() if either of the two conditions is met: + /// - last sync was more than CHECKPOINT_INTERVAL/5 ago + /// - a change was detected + pub async fn opportunistic_sync(&mut self) -> Result<()> { + let too_old = match self.last_sync { + Some(t) => Instant::now() > t + (CHECKPOINT_INTERVAL / 5), + _ => true, + }; + let changed = self.last_sync_watch_ct != *self.watch.rx.borrow(); + if too_old || changed { + self.sync().await?; + } + Ok(()) + } + + pub fn notifier(&self) -> std::sync::Weak { + Arc::downgrade(&self.watch.learnt_remote_update) + } + + /// Applies a new operation on the state. Once this function returns, + /// the operation has been safely persisted to storage backend. + /// Make sure to call `.opportunistic_sync()` before doing this, + /// and even before calculating the `op` argument given here. + pub async fn push(&mut self, op: S::Op) -> Result<()> { + tracing::debug!("(push) add operation: {:?}", op); + + let ts = Timestamp::after( + self.history + .last() + .map(|(ts, _, _)| ts) + .unwrap_or(&self.checkpoint.0), + ); + + let row_val = storage::RowVal::new( + storage::RowRef::new(&self.path, &ts.to_string()), + seal_serialize(&op, &self.key)?, + ); + self.storage.row_insert(vec![row_val]).await?; + self.watch.propagate_local_update.notify_one(); + + let new_state = self.state().apply(&op); + self.history.push((ts, op, Some(new_state))); + + // Clear previously saved state in history if not required + let hlen = self.history.len(); + if hlen >= 2 && (hlen - 1) % KEEP_STATE_EVERY != 0 { + self.history[hlen - 2].2 = None; + } + + self.checkpoint().await?; + + Ok(()) + } + + /// Save a new checkpoint if previous checkpoint is too old + pub async fn checkpoint(&mut self) -> Result<()> { + match self.last_try_checkpoint { + Some(ts) if Instant::now() - ts < CHECKPOINT_INTERVAL / 5 => Ok(()), + _ => { + let res = self.checkpoint_internal().await; + if res.is_ok() { + self.last_try_checkpoint = Some(Instant::now()); + } + res + } + } + } + + async fn checkpoint_internal(&mut self) -> Result<()> { + self.sync().await?; + + // Check what would be the possible time for a checkpoint in the history we have + let now = now_msec() as i128; + let i_cp = match self + .history + .iter() + .enumerate() + .rev() + .skip_while(|(_, (ts, _, _))| { + (now - ts.msec as i128) < CHECKPOINT_INTERVAL.as_millis() as i128 + }) + .map(|(i, _)| i) + .next() + { + Some(i) => i, + None => { + tracing::debug!("(cp) Oldest operation is too recent to trigger checkpoint"); + return Ok(()); + } + }; + + if i_cp < CHECKPOINT_MIN_OPS { + tracing::debug!("(cp) Not enough old operations to trigger checkpoint"); + return Ok(()); + } + + let ts_cp = self.history[i_cp].0; + tracing::debug!( + "(cp) we could checkpoint at time {} (index {} in history)", + ts_cp.to_string(), + i_cp + ); + + // Check existing checkpoints: if last one is too recent, don't checkpoint again. + let existing_checkpoints = self.list_checkpoints().await?; + tracing::debug!("(cp) listed checkpoints: {:?}", existing_checkpoints); + + if let Some(last_cp) = existing_checkpoints.last() { + if (ts_cp.msec as i128 - last_cp.0.msec as i128) + < CHECKPOINT_INTERVAL.as_millis() as i128 + { + tracing::debug!( + "(cp) last checkpoint is too recent: {}, not checkpointing", + last_cp.0.to_string() + ); + return Ok(()); + } + } + + tracing::debug!("(cp) saving checkpoint at {}", ts_cp.to_string()); + + // Calculate state at time of checkpoint + let mut last_known_state = (0, &self.checkpoint.1); + for (i, (_, _, st)) in self.history[..i_cp].iter().enumerate() { + if let Some(s) = st { + last_known_state = (i + 1, s); + } + } + let mut state_cp = last_known_state.1.clone(); + for (_, op, _) in self.history[last_known_state.0..i_cp].iter() { + state_cp = state_cp.apply(op); + } + + // Serialize and save checkpoint + let cryptoblob = seal_serialize(&state_cp, &self.key)?; + tracing::debug!("(cp) checkpoint body length: {}", cryptoblob.len()); + + let blob_val = storage::BlobVal::new( + storage::BlobRef(format!("{}/checkpoint/{}", self.path, ts_cp.to_string())), + cryptoblob.into(), + ); + self.storage.blob_insert(blob_val).await?; + + // Drop old checkpoints (but keep at least CHECKPOINTS_TO_KEEP of them) + let ecp_len = existing_checkpoints.len(); + if ecp_len + 1 > CHECKPOINTS_TO_KEEP { + let last_to_keep = ecp_len + 1 - CHECKPOINTS_TO_KEEP; + + // Delete blobs + for (_ts, key) in existing_checkpoints[..last_to_keep].iter() { + tracing::debug!("(cp) drop old checkpoint {}", key); + self.storage + .blob_rm(&storage::BlobRef(key.to_string())) + .await?; + } + + // Delete corresponding range of operations + let ts_ser = existing_checkpoints[last_to_keep].0.to_string(); + self.storage + .row_rm(&storage::Selector::Range { + shard: &self.path, + sort_begin: "", + sort_end: &ts_ser, + }) + .await? + } + + Ok(()) + } + + pub fn state(&self) -> &S { + if let Some(last) = self.history.last() { + last.2.as_ref().unwrap() + } else { + &self.checkpoint.1 + } + } + + // ---- INTERNAL ---- + + async fn list_checkpoints(&self) -> Result> { + let prefix = format!("{}/checkpoint/", self.path); + + let checkpoints_res = self.storage.blob_list(&prefix).await?; + + let mut checkpoints = vec![]; + for object in checkpoints_res { + let key = object.0; + if let Some(ckid) = key.strip_prefix(&prefix) { + if let Ok(ts) = ckid.parse::() { + checkpoints.push((ts, key.into())); + } + } + } + checkpoints.sort_by_key(|(ts, _)| *ts); + Ok(checkpoints) + } +} + +// ---- Bayou watch in K2V ---- + +struct K2vWatch { + target: storage::RowRef, + rx: watch::Receiver, + propagate_local_update: Notify, + learnt_remote_update: Arc, +} + +impl K2vWatch { + /// Creates a new watch and launches subordinate threads. + /// These threads hold Weak pointers to the struct; + /// they exit when the Arc is dropped. + async fn new(creds: &Credentials, target: storage::RowRef) -> Result> { + let storage = creds.storage.build().await?; + + let (tx, rx) = watch::channel::(target.clone()); + let propagate_local_update = Notify::new(); + let learnt_remote_update = Arc::new(Notify::new()); + + let watch = Arc::new(K2vWatch { + target, + rx, + propagate_local_update, + learnt_remote_update, + }); + + tokio::spawn(Self::background_task(Arc::downgrade(&watch), storage, tx)); + + Ok(watch) + } + + async fn background_task( + self_weak: Weak, + storage: storage::Store, + tx: watch::Sender, + ) { + let (mut row, remote_update) = match Weak::upgrade(&self_weak) { + Some(this) => (this.target.clone(), this.learnt_remote_update.clone()), + None => return, + }; + + while let Some(this) = Weak::upgrade(&self_weak) { + tracing::debug!( + "bayou k2v watch bg loop iter ({}, {})", + this.target.uid.shard, + this.target.uid.sort + ); + tokio::select!( + // Needed to exit: will force a loop iteration every minutes, + // that will stop the loop if other Arc references have been dropped + // and free resources. Otherwise we would be blocked waiting forever... + _ = tokio::time::sleep(Duration::from_secs(60)) => continue, + + // Watch if another instance has modified the log + update = storage.row_poll(&row) => { + match update { + Err(e) => { + error!("Error in bayou k2v wait value changed: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + Ok(new_value) => { + row = new_value.row_ref; + if let Err(e) = tx.send(row.clone()) { + tracing::warn!(err=?e, "(watch) can't record the new log ref"); + break; + } + tracing::debug!(row=?row, "(watch) learnt remote update"); + this.learnt_remote_update.notify_waiters(); + } + } + } + + // It appears we have modified the log, informing other people + _ = this.propagate_local_update.notified() => { + let rand = u128::to_be_bytes(thread_rng().gen()).to_vec(); + let row_val = storage::RowVal::new(row.clone(), rand); + if let Err(e) = storage.row_insert(vec![row_val]).await + { + tracing::error!("Error in bayou k2v watch updater loop: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + } + ); + } + // unblock listeners + remote_update.notify_waiters(); + tracing::info!("bayou k2v watch bg loop exiting"); + } +} diff --git a/aero-bayou/src/timestamp.rs b/aero-bayou/src/timestamp.rs new file mode 100644 index 0000000..4aa5399 --- /dev/null +++ b/aero-bayou/src/timestamp.rs @@ -0,0 +1,66 @@ +use std::str::FromStr; +use std::time::{SystemTime, UNIX_EPOCH}; + +use rand::prelude::*; + +/// Returns milliseconds since UNIX Epoch +pub fn now_msec() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Fix your clock :o") + .as_millis() as u64 +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] +pub struct Timestamp { + pub msec: u64, + pub rand: u64, +} + +impl Timestamp { + #[allow(dead_code)] + // 2023-05-15 try to make clippy happy and not sure if this fn will be used in the future. + pub fn now() -> Self { + let mut rng = thread_rng(); + Self { + msec: now_msec(), + rand: rng.gen::(), + } + } + + pub fn after(other: &Self) -> Self { + let mut rng = thread_rng(); + Self { + msec: std::cmp::max(now_msec(), other.msec + 1), + rand: rng.gen::(), + } + } + + pub fn zero() -> Self { + Self { msec: 0, rand: 0 } + } +} + +impl ToString for Timestamp { + fn to_string(&self) -> String { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&u64::to_be_bytes(self.msec)); + bytes[8..16].copy_from_slice(&u64::to_be_bytes(self.rand)); + hex::encode(bytes) + } +} + +impl FromStr for Timestamp { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + let bytes = hex::decode(s).map_err(|_| "invalid hex")?; + if bytes.len() != 16 { + return Err("bad length"); + } + Ok(Self { + msec: u64::from_be_bytes(bytes[0..8].try_into().unwrap()), + rand: u64::from_be_bytes(bytes[8..16].try_into().unwrap()), + }) + } +} diff --git a/aero-collections/mail/incoming.rs b/aero-collections/mail/incoming.rs new file mode 100644 index 0000000..e2ad97d --- /dev/null +++ b/aero-collections/mail/incoming.rs @@ -0,0 +1,445 @@ +//use std::collections::HashMap; +use std::convert::TryFrom; + +use std::sync::{Arc, Weak}; +use std::time::Duration; + +use anyhow::{anyhow, bail, Result}; +use base64::Engine; +use futures::{future::BoxFuture, FutureExt}; +//use tokio::io::AsyncReadExt; +use tokio::sync::watch; +use tracing::{debug, error, info, warn}; + +use crate::cryptoblob; +use crate::login::{Credentials, PublicCredentials}; +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::*; +use crate::user::User; +use crate::mail::IMF; +use crate::storage; +use crate::timestamp::now_msec; + +const INCOMING_PK: &str = "incoming"; +const INCOMING_LOCK_SK: &str = "lock"; +const INCOMING_WATCH_SK: &str = "watch"; + +const MESSAGE_KEY: &str = "message-key"; + +// When a lock is held, it is held for LOCK_DURATION (here 5 minutes) +// It is renewed every LOCK_DURATION/3 +// If we are at 2*LOCK_DURATION/3 and haven't renewed, we assume we +// lost the lock. +const LOCK_DURATION: Duration = Duration::from_secs(300); + +// In addition to checking when notified, also check for new mail every 10 minutes +const MAIL_CHECK_INTERVAL: Duration = Duration::from_secs(600); + +pub async fn incoming_mail_watch_process( + user: Weak, + creds: Credentials, + rx_inbox_id: watch::Receiver>, +) { + if let Err(e) = incoming_mail_watch_process_internal(user, creds, rx_inbox_id).await { + error!("Error in incoming mail watch process: {}", e); + } +} + +async fn incoming_mail_watch_process_internal( + user: Weak, + creds: Credentials, + mut rx_inbox_id: watch::Receiver>, +) -> Result<()> { + let mut lock_held = k2v_lock_loop( + creds.storage.build().await?, + storage::RowRef::new(INCOMING_PK, INCOMING_LOCK_SK), + ); + let storage = creds.storage.build().await?; + + let mut inbox: Option> = None; + let mut incoming_key = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); + + loop { + let maybe_updated_incoming_key = if *lock_held.borrow() { + debug!("incoming lock held"); + + let wait_new_mail = async { + loop { + match storage.row_poll(&incoming_key).await { + Ok(row_val) => break row_val.row_ref, + Err(e) => { + error!("Error in wait_new_mail: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + } + } + }; + + tokio::select! { + inc_k = wait_new_mail => Some(inc_k), + _ = tokio::time::sleep(MAIL_CHECK_INTERVAL) => Some(incoming_key.clone()), + _ = lock_held.changed() => None, + _ = rx_inbox_id.changed() => None, + } + } else { + debug!("incoming lock not held"); + tokio::select! { + _ = lock_held.changed() => None, + _ = rx_inbox_id.changed() => None, + } + }; + + let user = match Weak::upgrade(&user) { + Some(user) => user, + None => { + debug!("User no longer available, exiting incoming loop."); + break; + } + }; + debug!("User still available"); + + // If INBOX no longer is same mailbox, open new mailbox + let inbox_id = *rx_inbox_id.borrow(); + if let Some((id, uidvalidity)) = inbox_id { + if Some(id) != inbox.as_ref().map(|b| b.id) { + match user.open_mailbox_by_id(id, uidvalidity).await { + Ok(mb) => { + inbox = Some(mb); + } + Err(e) => { + inbox = None; + error!("Error when opening inbox ({}): {}", id, e); + tokio::time::sleep(Duration::from_secs(30)).await; + continue; + } + } + } + } + + // If we were able to open INBOX, and we have mail, + // fetch new mail + if let (Some(inbox), Some(updated_incoming_key)) = (&inbox, maybe_updated_incoming_key) { + match handle_incoming_mail(&user, &storage, inbox, &lock_held).await { + Ok(()) => { + incoming_key = updated_incoming_key; + } + Err(e) => { + error!("Could not fetch incoming mail: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + } + } + } + drop(rx_inbox_id); + Ok(()) +} + +async fn handle_incoming_mail( + user: &Arc, + storage: &storage::Store, + inbox: &Arc, + lock_held: &watch::Receiver, +) -> Result<()> { + let mails_res = storage.blob_list("incoming/").await?; + + for object in mails_res { + if !*lock_held.borrow() { + break; + } + let key = object.0; + if let Some(mail_id) = key.strip_prefix("incoming/") { + if let Ok(mail_id) = mail_id.parse::() { + move_incoming_message(user, storage, inbox, mail_id).await?; + } + } + } + + Ok(()) +} + +async fn move_incoming_message( + user: &Arc, + storage: &storage::Store, + inbox: &Arc, + id: UniqueIdent, +) -> Result<()> { + info!("Moving incoming message: {}", id); + + let object_key = format!("incoming/{}", id); + + // 1. Fetch message from S3 + let object = storage.blob_fetch(&storage::BlobRef(object_key)).await?; + + // 1.a decrypt message key from headers + //info!("Object metadata: {:?}", get_result.metadata); + let key_encrypted_b64 = object + .meta + .get(MESSAGE_KEY) + .ok_or(anyhow!("Missing key in metadata"))?; + let key_encrypted = base64::engine::general_purpose::STANDARD.decode(key_encrypted_b64)?; + let message_key = sodiumoxide::crypto::sealedbox::open( + &key_encrypted, + &user.creds.keys.public, + &user.creds.keys.secret, + ) + .map_err(|_| anyhow!("Cannot decrypt message key"))?; + let message_key = + cryptoblob::Key::from_slice(&message_key).ok_or(anyhow!("Invalid message key"))?; + + // 1.b retrieve message body + let obj_body = object.value; + let plain_mail = cryptoblob::open(&obj_body, &message_key) + .map_err(|_| anyhow!("Cannot decrypt email content"))?; + + // 2 parse mail and add to inbox + let msg = IMF::try_from(&plain_mail[..]).map_err(|_| anyhow!("Invalid email body"))?; + inbox + .append_from_s3(msg, id, object.blob_ref.clone(), message_key) + .await?; + + // 3 delete from incoming + storage.blob_rm(&object.blob_ref).await?; + + Ok(()) +} + +// ---- UTIL: K2V locking loop, use this to try to grab a lock using a K2V entry as a signal ---- + +fn k2v_lock_loop(storage: storage::Store, row_ref: storage::RowRef) -> watch::Receiver { + let (held_tx, held_rx) = watch::channel(false); + + tokio::spawn(k2v_lock_loop_internal(storage, row_ref, held_tx)); + + held_rx +} + +#[derive(Clone, Debug)] +enum LockState { + Unknown, + Empty, + Held(UniqueIdent, u64, storage::RowRef), +} + +async fn k2v_lock_loop_internal( + storage: storage::Store, + row_ref: storage::RowRef, + held_tx: watch::Sender, +) { + let (state_tx, mut state_rx) = watch::channel::(LockState::Unknown); + let mut state_rx_2 = state_rx.clone(); + + let our_pid = gen_ident(); + + // Loop 1: watch state of lock in K2V, save that in corresponding watch channel + let watch_lock_loop: BoxFuture> = async { + let mut ct = row_ref.clone(); + loop { + debug!("k2v watch lock loop iter: ct = {:?}", ct); + match storage.row_poll(&ct).await { + Err(e) => { + error!( + "Error in k2v wait value changed: {} ; assuming we no longer hold lock.", + e + ); + state_tx.send(LockState::Unknown)?; + tokio::time::sleep(Duration::from_secs(30)).await; + } + Ok(cv) => { + let mut lock_state = None; + for v in cv.value.iter() { + if let storage::Alternative::Value(vbytes) = v { + if vbytes.len() == 32 { + let ts = u64::from_be_bytes(vbytes[..8].try_into().unwrap()); + let pid = UniqueIdent(vbytes[8..].try_into().unwrap()); + if lock_state + .map(|(pid2, ts2)| ts > ts2 || (ts == ts2 && pid > pid2)) + .unwrap_or(true) + { + lock_state = Some((pid, ts)); + } + } + } + } + let new_ct = cv.row_ref; + + debug!( + "k2v watch lock loop: changed, old ct = {:?}, new ct = {:?}, v = {:?}", + ct, new_ct, lock_state + ); + state_tx.send( + lock_state + .map(|(pid, ts)| LockState::Held(pid, ts, new_ct.clone())) + .unwrap_or(LockState::Empty), + )?; + ct = new_ct; + } + } + } + } + .boxed(); + + // Loop 2: notify user whether we are holding the lock or not + let lock_notify_loop: BoxFuture> = async { + loop { + let now = now_msec(); + let held_with_expiration_time = match &*state_rx.borrow_and_update() { + LockState::Held(pid, ts, _ct) if *pid == our_pid => { + let expiration_time = *ts - (LOCK_DURATION / 3).as_millis() as u64; + if now < expiration_time { + Some(expiration_time) + } else { + None + } + } + _ => None, + }; + let held = held_with_expiration_time.is_some(); + if held != *held_tx.borrow() { + held_tx.send(held)?; + } + + let await_expired = async { + match held_with_expiration_time { + None => futures::future::pending().await, + Some(expiration_time) => { + tokio::time::sleep(Duration::from_millis(expiration_time - now)).await + } + }; + }; + + tokio::select!( + r = state_rx.changed() => { + r?; + } + _ = held_tx.closed() => bail!("held_tx closed, don't need to hold lock anymore"), + _ = await_expired => continue, + ); + } + } + .boxed(); + + // Loop 3: acquire lock when relevant + let take_lock_loop: BoxFuture> = async { + loop { + let now = now_msec(); + let state: LockState = state_rx_2.borrow_and_update().clone(); + let (acquire_at, ct) = match state { + LockState::Unknown => { + // If state of the lock is unknown, don't try to acquire + state_rx_2.changed().await?; + continue; + } + LockState::Empty => (now, None), + LockState::Held(pid, ts, ct) => { + if pid == our_pid { + (ts - (2 * LOCK_DURATION / 3).as_millis() as u64, Some(ct)) + } else { + (ts, Some(ct)) + } + } + }; + + // Wait until it is time to acquire lock + if acquire_at > now { + tokio::select!( + r = state_rx_2.changed() => { + // If lock state changed in the meantime, don't acquire and loop around + r?; + continue; + } + _ = tokio::time::sleep(Duration::from_millis(acquire_at - now)) => () + ); + } + + // Acquire lock + let mut lock = vec![0u8; 32]; + lock[..8].copy_from_slice(&u64::to_be_bytes( + now_msec() + LOCK_DURATION.as_millis() as u64, + )); + lock[8..].copy_from_slice(&our_pid.0); + let row = match ct { + Some(existing) => existing, + None => row_ref.clone(), + }; + if let Err(e) = storage + .row_insert(vec![storage::RowVal::new(row, lock)]) + .await + { + error!("Could not take lock: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + + // Wait for new information to loop back + state_rx_2.changed().await?; + } + } + .boxed(); + + let _ = futures::try_join!(watch_lock_loop, lock_notify_loop, take_lock_loop); + + debug!("lock loop exited, releasing"); + + if !held_tx.is_closed() { + warn!("weird..."); + let _ = held_tx.send(false); + } + + // If lock is ours, release it + let release = match &*state_rx.borrow() { + LockState::Held(pid, _, ct) if *pid == our_pid => Some(ct.clone()), + _ => None, + }; + if let Some(ct) = release { + match storage.row_rm(&storage::Selector::Single(&ct)).await { + Err(e) => warn!("Unable to release lock {:?}: {}", ct, e), + Ok(_) => (), + }; + } +} + +// ---- LMTP SIDE: storing messages encrypted with user's pubkey ---- + +pub struct EncryptedMessage { + key: cryptoblob::Key, + encrypted_body: Vec, +} + +impl EncryptedMessage { + pub fn new(body: Vec) -> Result { + let key = cryptoblob::gen_key(); + let encrypted_body = cryptoblob::seal(&body, &key)?; + Ok(Self { + key, + encrypted_body, + }) + } + + pub async fn deliver_to(self: Arc, creds: PublicCredentials) -> Result<()> { + let storage = creds.storage.build().await?; + + // Get causality token of previous watch key + let query = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); + let watch_ct = match storage.row_fetch(&storage::Selector::Single(&query)).await { + Err(_) => query, + Ok(cv) => cv.into_iter().next().map(|v| v.row_ref).unwrap_or(query), + }; + + // Write mail to encrypted storage + let encrypted_key = + sodiumoxide::crypto::sealedbox::seal(self.key.as_ref(), &creds.public_key); + let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_key); + + let blob_val = storage::BlobVal::new( + storage::BlobRef(format!("incoming/{}", gen_ident())), + self.encrypted_body.clone().into(), + ) + .with_meta(MESSAGE_KEY.to_string(), key_header); + storage.blob_insert(blob_val).await?; + + // Update watch key to signal new mail + let watch_val = storage::RowVal::new(watch_ct.clone(), gen_ident().0.to_vec()); + storage.row_insert(vec![watch_val]).await?; + Ok(()) + } +} diff --git a/aero-collections/mail/mailbox.rs b/aero-collections/mail/mailbox.rs new file mode 100644 index 0000000..d1a5473 --- /dev/null +++ b/aero-collections/mail/mailbox.rs @@ -0,0 +1,524 @@ +use anyhow::{anyhow, bail, Result}; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; + +use crate::bayou::Bayou; +use crate::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; +use crate::login::Credentials; +use crate::mail::uidindex::*; +use crate::mail::unique_ident::*; +use crate::mail::IMF; +use crate::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; +use crate::timestamp::now_msec; + +pub struct Mailbox { + pub(super) id: UniqueIdent, + mbox: RwLock, +} + +impl Mailbox { + pub(crate) async fn open( + creds: &Credentials, + id: UniqueIdent, + min_uidvalidity: ImapUidvalidity, + ) -> Result { + let index_path = format!("index/{}", id); + let mail_path = format!("mail/{}", id); + + let mut uid_index = Bayou::::new(creds, index_path).await?; + uid_index.sync().await?; + + let uidvalidity = uid_index.state().uidvalidity; + if uidvalidity < min_uidvalidity { + uid_index + .push( + uid_index + .state() + .op_bump_uidvalidity(min_uidvalidity.get() - uidvalidity.get()), + ) + .await?; + } + + // @FIXME reporting through opentelemetry or some logs + // info on the "shape" of the mailbox would be welcomed + /* + dump(&uid_index); + */ + + let mbox = RwLock::new(MailboxInternal { + id, + encryption_key: creds.keys.master.clone(), + storage: creds.storage.build().await?, + uid_index, + mail_path, + }); + + Ok(Self { id, mbox }) + } + + /// Sync data with backing store + pub async fn force_sync(&self) -> Result<()> { + self.mbox.write().await.force_sync().await + } + + /// Sync data with backing store only if changes are detected + /// or last sync is too old + pub async fn opportunistic_sync(&self) -> Result<()> { + self.mbox.write().await.opportunistic_sync().await + } + + /// Block until a sync has been done (due to changes in the event log) + pub async fn notify(&self) -> std::sync::Weak { + self.mbox.read().await.notifier() + } + + // ---- Functions for reading the mailbox ---- + + /// Get a clone of the current UID Index of this mailbox + /// (cloning is cheap so don't hesitate to use this) + pub async fn current_uid_index(&self) -> UidIndex { + self.mbox.read().await.uid_index.state().clone() + } + + /// Fetch the metadata (headers + some more info) of the specified + /// mail IDs + pub async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { + self.mbox.read().await.fetch_meta(ids).await + } + + /// Fetch an entire e-mail + pub async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { + self.mbox.read().await.fetch_full(id, message_key).await + } + + pub async fn frozen(self: &std::sync::Arc) -> super::snapshot::FrozenMailbox { + super::snapshot::FrozenMailbox::new(self.clone()).await + } + + // ---- Functions for changing the mailbox ---- + + /// Add flags to message + pub async fn add_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { + self.mbox.write().await.add_flags(id, flags).await + } + + /// Delete flags from message + pub async fn del_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { + self.mbox.write().await.del_flags(id, flags).await + } + + /// Define the new flags for this message + pub async fn set_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { + self.mbox.write().await.set_flags(id, flags).await + } + + /// Insert an email into the mailbox + pub async fn append<'a>( + &self, + msg: IMF<'a>, + ident: Option, + flags: &[Flag], + ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { + self.mbox.write().await.append(msg, ident, flags).await + } + + /// Insert an email into the mailbox, copying it from an existing S3 object + pub async fn append_from_s3<'a>( + &self, + msg: IMF<'a>, + ident: UniqueIdent, + blob_ref: storage::BlobRef, + message_key: Key, + ) -> Result<()> { + self.mbox + .write() + .await + .append_from_s3(msg, ident, blob_ref, message_key) + .await + } + + /// Delete a message definitively from the mailbox + pub async fn delete<'a>(&self, id: UniqueIdent) -> Result<()> { + self.mbox.write().await.delete(id).await + } + + /// Copy an email from an other Mailbox to this mailbox + /// (use this when possible, as it allows for a certain number of storage optimizations) + pub async fn copy_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result { + if self.id == from.id { + bail!("Cannot copy into same mailbox"); + } + + let (mut selflock, fromlock); + if self.id < from.id { + selflock = self.mbox.write().await; + fromlock = from.mbox.write().await; + } else { + fromlock = from.mbox.write().await; + selflock = self.mbox.write().await; + }; + selflock.copy_from(&fromlock, uuid).await + } + + /// Move an email from an other Mailbox to this mailbox + /// (use this when possible, as it allows for a certain number of storage optimizations) + pub async fn move_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result<()> { + if self.id == from.id { + bail!("Cannot copy move same mailbox"); + } + + let (mut selflock, mut fromlock); + if self.id < from.id { + selflock = self.mbox.write().await; + fromlock = from.mbox.write().await; + } else { + fromlock = from.mbox.write().await; + selflock = self.mbox.write().await; + }; + selflock.move_from(&mut fromlock, uuid).await + } +} + +// ---- + +// Non standard but common flags: +// https://www.iana.org/assignments/imap-jmap-keywords/imap-jmap-keywords.xhtml +struct MailboxInternal { + // 2023-05-15 will probably be used later. + #[allow(dead_code)] + id: UniqueIdent, + mail_path: String, + encryption_key: Key, + storage: Store, + uid_index: Bayou, +} + +impl MailboxInternal { + async fn force_sync(&mut self) -> Result<()> { + self.uid_index.sync().await?; + Ok(()) + } + + async fn opportunistic_sync(&mut self) -> Result<()> { + self.uid_index.opportunistic_sync().await?; + Ok(()) + } + + fn notifier(&self) -> std::sync::Weak { + self.uid_index.notifier() + } + + // ---- Functions for reading the mailbox ---- + + async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { + let ids = ids.iter().map(|x| x.to_string()).collect::>(); + let ops = ids + .iter() + .map(|id| RowRef::new(self.mail_path.as_str(), id.as_str())) + .collect::>(); + let res_vec = self.storage.row_fetch(&Selector::List(ops)).await?; + + let mut meta_vec = vec![]; + for res in res_vec.into_iter() { + let mut meta_opt = None; + + // Resolve conflicts + for v in res.value.iter() { + match v { + storage::Alternative::Tombstone => (), + storage::Alternative::Value(v) => { + let meta = open_deserialize::(v, &self.encryption_key)?; + match meta_opt.as_mut() { + None => { + meta_opt = Some(meta); + } + Some(prevmeta) => { + prevmeta.try_merge(meta)?; + } + } + } + } + } + if let Some(meta) = meta_opt { + meta_vec.push(meta); + } else { + bail!("No valid meta value in k2v for {:?}", res.row_ref); + } + } + + Ok(meta_vec) + } + + async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { + let obj_res = self + .storage + .blob_fetch(&BlobRef(format!("{}/{}", self.mail_path, id))) + .await?; + let body = obj_res.value; + cryptoblob::open(&body, message_key) + } + + // ---- Functions for changing the mailbox ---- + + async fn add_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { + let add_flag_op = self.uid_index.state().op_flag_add(ident, flags.to_vec()); + self.uid_index.push(add_flag_op).await + } + + async fn del_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { + let del_flag_op = self.uid_index.state().op_flag_del(ident, flags.to_vec()); + self.uid_index.push(del_flag_op).await + } + + async fn set_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { + let set_flag_op = self.uid_index.state().op_flag_set(ident, flags.to_vec()); + self.uid_index.push(set_flag_op).await + } + + async fn append( + &mut self, + mail: IMF<'_>, + ident: Option, + flags: &[Flag], + ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { + let ident = ident.unwrap_or_else(gen_ident); + let message_key = gen_key(); + + futures::try_join!( + async { + // Encrypt and save mail body + let message_blob = cryptoblob::seal(mail.raw, &message_key)?; + self.storage + .blob_insert(BlobVal::new( + BlobRef(format!("{}/{}", self.mail_path, ident)), + message_blob, + )) + .await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Save mail meta + let meta = MailMeta { + internaldate: now_msec(), + headers: mail.parsed.raw_headers.to_vec(), + message_key: message_key.clone(), + rfc822_size: mail.raw.len(), + }; + let meta_blob = seal_serialize(&meta, &self.encryption_key)?; + self.storage + .row_insert(vec![RowVal::new( + RowRef::new(&self.mail_path, &ident.to_string()), + meta_blob, + )]) + .await?; + Ok::<_, anyhow::Error>(()) + }, + self.uid_index.opportunistic_sync() + )?; + + // Add mail to Bayou mail index + let uid_state = self.uid_index.state(); + let add_mail_op = uid_state.op_mail_add(ident, flags.to_vec()); + + let uidvalidity = uid_state.uidvalidity; + let (uid, modseq) = match add_mail_op { + UidIndexOp::MailAdd(_, uid, modseq, _) => (uid, modseq), + _ => unreachable!(), + }; + + self.uid_index.push(add_mail_op).await?; + + Ok((uidvalidity, uid, modseq)) + } + + async fn append_from_s3<'a>( + &mut self, + mail: IMF<'a>, + ident: UniqueIdent, + blob_src: storage::BlobRef, + message_key: Key, + ) -> Result<()> { + futures::try_join!( + async { + // Copy mail body from previous location + let blob_dst = BlobRef(format!("{}/{}", self.mail_path, ident)); + self.storage.blob_copy(&blob_src, &blob_dst).await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Save mail meta + let meta = MailMeta { + internaldate: now_msec(), + headers: mail.parsed.raw_headers.to_vec(), + message_key: message_key.clone(), + rfc822_size: mail.raw.len(), + }; + let meta_blob = seal_serialize(&meta, &self.encryption_key)?; + self.storage + .row_insert(vec![RowVal::new( + RowRef::new(&self.mail_path, &ident.to_string()), + meta_blob, + )]) + .await?; + Ok::<_, anyhow::Error>(()) + }, + self.uid_index.opportunistic_sync() + )?; + + // Add mail to Bayou mail index + let add_mail_op = self.uid_index.state().op_mail_add(ident, vec![]); + self.uid_index.push(add_mail_op).await?; + + Ok(()) + } + + async fn delete(&mut self, ident: UniqueIdent) -> Result<()> { + if !self.uid_index.state().table.contains_key(&ident) { + bail!("Cannot delete mail that doesn't exit"); + } + + let del_mail_op = self.uid_index.state().op_mail_del(ident); + self.uid_index.push(del_mail_op).await?; + + futures::try_join!( + async { + // Delete mail body from S3 + self.storage + .blob_rm(&BlobRef(format!("{}/{}", self.mail_path, ident))) + .await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Delete mail meta from K2V + let sk = ident.to_string(); + let res = self + .storage + .row_fetch(&storage::Selector::Single(&RowRef::new( + &self.mail_path, + &sk, + ))) + .await?; + if let Some(row_val) = res.into_iter().next() { + self.storage + .row_rm(&storage::Selector::Single(&row_val.row_ref)) + .await?; + } + Ok::<_, anyhow::Error>(()) + } + )?; + Ok(()) + } + + async fn copy_from( + &mut self, + from: &MailboxInternal, + source_id: UniqueIdent, + ) -> Result { + let new_id = gen_ident(); + self.copy_internal(from, source_id, new_id).await?; + Ok(new_id) + } + + async fn move_from(&mut self, from: &mut MailboxInternal, id: UniqueIdent) -> Result<()> { + self.copy_internal(from, id, id).await?; + from.delete(id).await?; + Ok(()) + } + + async fn copy_internal( + &mut self, + from: &MailboxInternal, + source_id: UniqueIdent, + new_id: UniqueIdent, + ) -> Result<()> { + if self.encryption_key != from.encryption_key { + bail!("Message to be copied/moved does not belong to same account."); + } + + let flags = from + .uid_index + .state() + .table + .get(&source_id) + .ok_or(anyhow!("Source mail not found"))? + .2 + .clone(); + + futures::try_join!( + async { + let dst = BlobRef(format!("{}/{}", self.mail_path, new_id)); + let src = BlobRef(format!("{}/{}", from.mail_path, source_id)); + self.storage.blob_copy(&src, &dst).await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Copy mail meta in K2V + let meta = &from.fetch_meta(&[source_id]).await?[0]; + let meta_blob = seal_serialize(meta, &self.encryption_key)?; + self.storage + .row_insert(vec![RowVal::new( + RowRef::new(&self.mail_path, &new_id.to_string()), + meta_blob, + )]) + .await?; + Ok::<_, anyhow::Error>(()) + }, + self.uid_index.opportunistic_sync(), + )?; + + // Add mail to Bayou mail index + let add_mail_op = self.uid_index.state().op_mail_add(new_id, flags); + self.uid_index.push(add_mail_op).await?; + + Ok(()) + } +} + +// Can be useful to debug so we want this code +// to be available to developers +#[allow(dead_code)] +fn dump(uid_index: &Bayou) { + let s = uid_index.state(); + println!("---- MAILBOX STATE ----"); + println!("UIDVALIDITY {}", s.uidvalidity); + println!("UIDNEXT {}", s.uidnext); + println!("INTERNALSEQ {}", s.internalseq); + for (uid, ident) in s.idx_by_uid.iter() { + println!( + "{} {} {}", + uid, + hex::encode(ident.0), + s.table.get(ident).cloned().unwrap().2.join(", ") + ); + } + println!(); +} + +// ---- + +/// The metadata of a message that is stored in K2V +/// at pk = mail/, sk = +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MailMeta { + /// INTERNALDATE field (milliseconds since epoch) + pub internaldate: u64, + /// Headers of the message + pub headers: Vec, + /// Secret key for decrypting entire message + pub message_key: Key, + /// RFC822 size + pub rfc822_size: usize, +} + +impl MailMeta { + fn try_merge(&mut self, other: Self) -> Result<()> { + if self.headers != other.headers + || self.message_key != other.message_key + || self.rfc822_size != other.rfc822_size + { + bail!("Conflicting MailMeta values."); + } + self.internaldate = std::cmp::max(self.internaldate, other.internaldate); + Ok(()) + } +} diff --git a/aero-collections/mail/mod.rs b/aero-collections/mail/mod.rs new file mode 100644 index 0000000..03e85cd --- /dev/null +++ b/aero-collections/mail/mod.rs @@ -0,0 +1,27 @@ +use std::convert::TryFrom; + +pub mod incoming; +pub mod mailbox; +pub mod query; +pub mod snapshot; +pub mod uidindex; +pub mod unique_ident; +pub mod namespace; + +// Internet Message Format +// aka RFC 822 - RFC 2822 - RFC 5322 +// 2023-05-15 don't want to refactor this struct now. +#[allow(clippy::upper_case_acronyms)] +pub struct IMF<'a> { + raw: &'a [u8], + parsed: eml_codec::part::composite::Message<'a>, +} + +impl<'a> TryFrom<&'a [u8]> for IMF<'a> { + type Error = (); + + fn try_from(body: &'a [u8]) -> Result, ()> { + let parsed = eml_codec::parse_message(body).or(Err(()))?.1; + Ok(Self { raw: body, parsed }) + } +} diff --git a/aero-collections/mail/namespace.rs b/aero-collections/mail/namespace.rs new file mode 100644 index 0000000..5e67173 --- /dev/null +++ b/aero-collections/mail/namespace.rs @@ -0,0 +1,209 @@ +use std::collections::{BTreeMap, HashMap}; +use std::sync::{Arc, Weak}; + +use anyhow::{anyhow, bail, Result}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use tokio::sync::watch; + +use crate::cryptoblob::{open_deserialize, seal_serialize}; +use crate::login::Credentials; +use crate::mail::incoming::incoming_mail_watch_process; +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::{gen_ident, UniqueIdent}; +use crate::storage; +use crate::timestamp::now_msec; + +pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; + +/// INBOX is the only mailbox that must always exist. +/// It is created automatically when the account is created. +/// IMAP allows the user to rename INBOX to something else, +/// in this case all messages from INBOX are moved to a mailbox +/// with the new name and the INBOX mailbox still exists and is empty. +/// In our implementation, we indeed move the underlying mailbox +/// to the new name (i.e. the new name has the same id as the previous +/// INBOX), and we create a new empty mailbox for INBOX. +pub const INBOX: &str = "INBOX"; + +/// For convenience purpose, we also create some special mailbox +/// that are described in RFC6154 SPECIAL-USE +/// @FIXME maybe it should be a configuration parameter +/// @FIXME maybe we should have a per-mailbox flag mechanism, either an enum or a string, so we +/// track which mailbox is used for what. +/// @FIXME Junk could be useful but we don't have any antispam solution yet so... +/// @FIXME IMAP supports virtual mailbox. \All or \Flagged are intended to be virtual mailboxes. +/// \Trash might be one, or not one. I don't know what we should do there. +pub const DRAFTS: &str = "Drafts"; +pub const ARCHIVE: &str = "Archive"; +pub const SENT: &str = "Sent"; +pub const TRASH: &str = "Trash"; + +pub(crate) const MAILBOX_LIST_PK: &str = "mailboxes"; +pub(crate) const MAILBOX_LIST_SK: &str = "list"; + +// ---- User's mailbox list (serialized in K2V) ---- + +#[derive(Serialize, Deserialize)] +pub(crate) struct MailboxList(BTreeMap); + +#[derive(Serialize, Deserialize, Clone, Copy, Debug)] +pub(crate) struct MailboxListEntry { + id_lww: (u64, Option), + uidvalidity: ImapUidvalidity, +} + +impl MailboxListEntry { + fn merge(&mut self, other: &Self) { + // Simple CRDT merge rule + if other.id_lww.0 > self.id_lww.0 + || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) + { + self.id_lww = other.id_lww; + } + self.uidvalidity = std::cmp::max(self.uidvalidity, other.uidvalidity); + } +} + +impl MailboxList { + pub(crate) fn new() -> Self { + Self(BTreeMap::new()) + } + + pub(crate) fn merge(&mut self, list2: Self) { + for (k, v) in list2.0.into_iter() { + if let Some(e) = self.0.get_mut(&k) { + e.merge(&v); + } else { + self.0.insert(k, v); + } + } + } + + pub(crate) fn existing_mailbox_names(&self) -> Vec { + self.0 + .iter() + .filter(|(_, v)| v.id_lww.1.is_some()) + .map(|(k, _)| k.to_string()) + .collect() + } + + pub(crate) fn has_mailbox(&self, name: &str) -> bool { + matches!( + self.0.get(name), + Some(MailboxListEntry { + id_lww: (_, Some(_)), + .. + }) + ) + } + + pub(crate) fn get_mailbox(&self, name: &str) -> Option<(ImapUidvalidity, Option)> { + self.0.get(name).map( + |MailboxListEntry { + id_lww: (_, mailbox_id), + uidvalidity, + }| (*uidvalidity, *mailbox_id), + ) + } + + /// Ensures mailbox `name` maps to id `id`. + /// If it already mapped to that, returns None. + /// If a change had to be done, returns Some(new uidvalidity in mailbox). + pub(crate) fn set_mailbox(&mut self, name: &str, id: Option) -> Option { + let (ts, id, uidvalidity) = match self.0.get_mut(name) { + None => { + if id.is_none() { + return None; + } else { + (now_msec(), id, ImapUidvalidity::new(1).unwrap()) + } + } + Some(MailboxListEntry { + id_lww, + uidvalidity, + }) => { + if id_lww.1 == id { + return None; + } else { + ( + std::cmp::max(id_lww.0 + 1, now_msec()), + id, + ImapUidvalidity::new(uidvalidity.get() + 1).unwrap(), + ) + } + } + }; + + self.0.insert( + name.into(), + MailboxListEntry { + id_lww: (ts, id), + uidvalidity, + }, + ); + Some(uidvalidity) + } + + pub(crate) fn update_uidvalidity(&mut self, name: &str, new_uidvalidity: ImapUidvalidity) { + match self.0.get_mut(name) { + None => { + self.0.insert( + name.into(), + MailboxListEntry { + id_lww: (now_msec(), None), + uidvalidity: new_uidvalidity, + }, + ); + } + Some(MailboxListEntry { uidvalidity, .. }) => { + *uidvalidity = std::cmp::max(*uidvalidity, new_uidvalidity); + } + } + } + + pub(crate) fn create_mailbox(&mut self, name: &str) -> CreatedMailbox { + if let Some(MailboxListEntry { + id_lww: (_, Some(id)), + uidvalidity, + }) = self.0.get(name) + { + return CreatedMailbox::Existed(*id, *uidvalidity); + } + + let id = gen_ident(); + let uidvalidity = self.set_mailbox(name, Some(id)).unwrap(); + CreatedMailbox::Created(id, uidvalidity) + } + + pub(crate) fn rename_mailbox(&mut self, old_name: &str, new_name: &str) -> Result<()> { + if let Some((uidvalidity, Some(mbid))) = self.get_mailbox(old_name) { + if self.has_mailbox(new_name) { + bail!( + "Cannot rename {} into {}: {} already exists", + old_name, + new_name, + new_name + ); + } + + self.set_mailbox(old_name, None); + self.set_mailbox(new_name, Some(mbid)); + self.update_uidvalidity(new_name, uidvalidity); + Ok(()) + } else { + bail!( + "Cannot rename {} into {}: {} doesn't exist", + old_name, + new_name, + old_name + ); + } + } +} + +pub(crate) enum CreatedMailbox { + Created(UniqueIdent, ImapUidvalidity), + Existed(UniqueIdent, ImapUidvalidity), +} diff --git a/aero-collections/mail/query.rs b/aero-collections/mail/query.rs new file mode 100644 index 0000000..3e6fe99 --- /dev/null +++ b/aero-collections/mail/query.rs @@ -0,0 +1,137 @@ +use super::mailbox::MailMeta; +use super::snapshot::FrozenMailbox; +use super::unique_ident::UniqueIdent; +use anyhow::Result; +use futures::future::FutureExt; +use futures::stream::{BoxStream, Stream, StreamExt}; + +/// Query is in charge of fetching efficiently +/// requested data for a list of emails +pub struct Query<'a, 'b> { + pub frozen: &'a FrozenMailbox, + pub emails: &'b [UniqueIdent], + pub scope: QueryScope, +} + +#[derive(Debug)] +pub enum QueryScope { + Index, + Partial, + Full, +} +impl QueryScope { + pub fn union(&self, other: &QueryScope) -> QueryScope { + match (self, other) { + (QueryScope::Full, _) | (_, QueryScope::Full) => QueryScope::Full, + (QueryScope::Partial, _) | (_, QueryScope::Partial) => QueryScope::Partial, + (QueryScope::Index, QueryScope::Index) => QueryScope::Index, + } + } +} + +//type QueryResultStream = Box>>; + +impl<'a, 'b> Query<'a, 'b> { + pub fn fetch(&self) -> BoxStream> { + match self.scope { + QueryScope::Index => Box::pin( + futures::stream::iter(self.emails) + .map(|&uuid| Ok(QueryResult::IndexResult { uuid })), + ), + QueryScope::Partial => Box::pin(self.partial()), + QueryScope::Full => Box::pin(self.full()), + } + } + + // --- functions below are private *for reasons* + fn partial<'d>(&'d self) -> impl Stream> + 'd + Send { + async move { + let maybe_meta_list: Result> = + self.frozen.mailbox.fetch_meta(self.emails).await; + let list_res = maybe_meta_list + .map(|meta_list| { + meta_list + .into_iter() + .zip(self.emails) + .map(|(metadata, &uuid)| Ok(QueryResult::PartialResult { uuid, metadata })) + .collect() + }) + .unwrap_or_else(|e| vec![Err(e)]); + + futures::stream::iter(list_res) + } + .flatten_stream() + } + + fn full<'d>(&'d self) -> impl Stream> + 'd + Send { + self.partial().then(move |maybe_meta| async move { + let meta = maybe_meta?; + + let content = self + .frozen + .mailbox + .fetch_full( + *meta.uuid(), + &meta + .metadata() + .expect("meta to be PartialResult") + .message_key, + ) + .await?; + + Ok(meta.into_full(content).expect("meta to be PartialResult")) + }) + } +} + +#[derive(Debug, Clone)] +pub enum QueryResult { + IndexResult { + uuid: UniqueIdent, + }, + PartialResult { + uuid: UniqueIdent, + metadata: MailMeta, + }, + FullResult { + uuid: UniqueIdent, + metadata: MailMeta, + content: Vec, + }, +} +impl QueryResult { + pub fn uuid(&self) -> &UniqueIdent { + match self { + Self::IndexResult { uuid, .. } => uuid, + Self::PartialResult { uuid, .. } => uuid, + Self::FullResult { uuid, .. } => uuid, + } + } + + pub fn metadata(&self) -> Option<&MailMeta> { + match self { + Self::IndexResult { .. } => None, + Self::PartialResult { metadata, .. } => Some(metadata), + Self::FullResult { metadata, .. } => Some(metadata), + } + } + + #[allow(dead_code)] + pub fn content(&self) -> Option<&[u8]> { + match self { + Self::FullResult { content, .. } => Some(content), + _ => None, + } + } + + fn into_full(self, content: Vec) -> Option { + match self { + Self::PartialResult { uuid, metadata } => Some(Self::FullResult { + uuid, + metadata, + content, + }), + _ => None, + } + } +} diff --git a/aero-collections/mail/snapshot.rs b/aero-collections/mail/snapshot.rs new file mode 100644 index 0000000..ed756b5 --- /dev/null +++ b/aero-collections/mail/snapshot.rs @@ -0,0 +1,60 @@ +use std::sync::Arc; + +use anyhow::Result; + +use super::mailbox::Mailbox; +use super::query::{Query, QueryScope}; +use super::uidindex::UidIndex; +use super::unique_ident::UniqueIdent; + +/// A Frozen Mailbox has a snapshot of the current mailbox +/// state that is desynchronized with the real mailbox state. +/// It's up to the user to choose when their snapshot must be updated +/// to give useful information to their clients +pub struct FrozenMailbox { + pub mailbox: Arc, + pub snapshot: UidIndex, +} + +impl FrozenMailbox { + /// Create a snapshot from a mailbox, the mailbox + the snapshot + /// becomes the "Frozen Mailbox". + pub async fn new(mailbox: Arc) -> Self { + let state = mailbox.current_uid_index().await; + + Self { + mailbox, + snapshot: state, + } + } + + /// Force the synchronization of the inner mailbox + /// but do not update the local snapshot + pub async fn sync(&self) -> Result<()> { + self.mailbox.opportunistic_sync().await + } + + /// Peek snapshot without updating the frozen mailbox + /// Can be useful if you want to plan some writes + /// while sending a diff to the client later + pub async fn peek(&self) -> UidIndex { + self.mailbox.current_uid_index().await + } + + /// Update the FrozenMailbox local snapshot. + /// Returns the old snapshot, so you can build a diff + pub async fn update(&mut self) -> UidIndex { + let old_snapshot = self.snapshot.clone(); + self.snapshot = self.mailbox.current_uid_index().await; + + old_snapshot + } + + pub fn query<'a, 'b>(&'a self, uuids: &'b [UniqueIdent], scope: QueryScope) -> Query<'a, 'b> { + Query { + frozen: self, + emails: uuids, + scope, + } + } +} diff --git a/aero-collections/mail/uidindex.rs b/aero-collections/mail/uidindex.rs new file mode 100644 index 0000000..5a06670 --- /dev/null +++ b/aero-collections/mail/uidindex.rs @@ -0,0 +1,474 @@ +use std::num::{NonZeroU32, NonZeroU64}; + +use im::{HashMap, OrdMap, OrdSet}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +use crate::bayou::*; +use crate::mail::unique_ident::UniqueIdent; + +pub type ModSeq = NonZeroU64; +pub type ImapUid = NonZeroU32; +pub type ImapUidvalidity = NonZeroU32; +pub type Flag = String; +pub type IndexEntry = (ImapUid, ModSeq, Vec); + +/// A UidIndex handles the mutable part of a mailbox +/// It is built by running the event log on it +/// Each applied log generates a new UidIndex by cloning the previous one +/// and applying the event. This is why we use immutable datastructures: +/// they are cheap to clone. +#[derive(Clone)] +pub struct UidIndex { + // Source of trust + pub table: OrdMap, + + // Indexes optimized for queries + pub idx_by_uid: OrdMap, + pub idx_by_modseq: OrdMap, + pub idx_by_flag: FlagIndex, + + // "Public" Counters + pub uidvalidity: ImapUidvalidity, + pub uidnext: ImapUid, + pub highestmodseq: ModSeq, + + // "Internal" Counters + pub internalseq: ImapUid, + pub internalmodseq: ModSeq, +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub enum UidIndexOp { + MailAdd(UniqueIdent, ImapUid, ModSeq, Vec), + MailDel(UniqueIdent), + FlagAdd(UniqueIdent, ModSeq, Vec), + FlagDel(UniqueIdent, ModSeq, Vec), + FlagSet(UniqueIdent, ModSeq, Vec), + BumpUidvalidity(u32), +} + +impl UidIndex { + #[must_use] + pub fn op_mail_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::MailAdd(ident, self.internalseq, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_mail_del(&self, ident: UniqueIdent) -> UidIndexOp { + UidIndexOp::MailDel(ident) + } + + #[must_use] + pub fn op_flag_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::FlagAdd(ident, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_flag_del(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::FlagDel(ident, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_flag_set(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::FlagSet(ident, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_bump_uidvalidity(&self, count: u32) -> UidIndexOp { + UidIndexOp::BumpUidvalidity(count) + } + + // INTERNAL functions to keep state consistent + + fn reg_email(&mut self, ident: UniqueIdent, uid: ImapUid, modseq: ModSeq, flags: &[Flag]) { + // Insert the email in our table + self.table.insert(ident, (uid, modseq, flags.to_owned())); + + // Update the indexes/caches + self.idx_by_uid.insert(uid, ident); + self.idx_by_flag.insert(uid, flags); + self.idx_by_modseq.insert(modseq, ident); + } + + fn unreg_email(&mut self, ident: &UniqueIdent) { + // We do nothing if the mail does not exist + let (uid, modseq, flags) = match self.table.get(ident) { + Some(v) => v, + None => return, + }; + + // Delete all cache entries + self.idx_by_uid.remove(uid); + self.idx_by_flag.remove(*uid, flags); + self.idx_by_modseq.remove(modseq); + + // Remove from source of trust + self.table.remove(ident); + } +} + +impl Default for UidIndex { + fn default() -> Self { + Self { + table: OrdMap::new(), + + idx_by_uid: OrdMap::new(), + idx_by_modseq: OrdMap::new(), + idx_by_flag: FlagIndex::new(), + + uidvalidity: NonZeroU32::new(1).unwrap(), + uidnext: NonZeroU32::new(1).unwrap(), + highestmodseq: NonZeroU64::new(1).unwrap(), + + internalseq: NonZeroU32::new(1).unwrap(), + internalmodseq: NonZeroU64::new(1).unwrap(), + } + } +} + +impl BayouState for UidIndex { + type Op = UidIndexOp; + + fn apply(&self, op: &UidIndexOp) -> Self { + let mut new = self.clone(); + match op { + UidIndexOp::MailAdd(ident, uid, modseq, flags) => { + // Change UIDValidity if there is a UID conflict or a MODSEQ conflict + // @FIXME Need to prove that summing work + // The intuition: we increase the UIDValidity by the number of possible conflicts + if *uid < new.internalseq || *modseq < new.internalmodseq { + let bump_uid = new.internalseq.get() - uid.get(); + let bump_modseq = (new.internalmodseq.get() - modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_uid + bump_modseq).unwrap(); + } + + // Assign the real uid of the email + let new_uid = new.internalseq; + + // Assign the real modseq of the email and its new flags + let new_modseq = new.internalmodseq; + + // Delete the previous entry if any. + // Our proof has no assumption on `ident` uniqueness, + // so we must handle this case even it is very unlikely + // In this case, we overwrite the email. + // Note: assigning a new UID is mandatory. + new.unreg_email(ident); + + // We record our email and update ou caches + new.reg_email(*ident, new_uid, new_modseq, flags); + + // Update counters + new.highestmodseq = new.internalmodseq; + + new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + + new.uidnext = new.internalseq; + } + UidIndexOp::MailDel(ident) => { + // If the email is known locally, we remove its references in all our indexes + new.unreg_email(ident); + + // We update the counter + new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); + } + UidIndexOp::FlagAdd(ident, candidate_modseq, new_flags) => { + if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { + // Bump UIDValidity if required + if *candidate_modseq < new.internalmodseq { + let bump_modseq = + (new.internalmodseq.get() - candidate_modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); + } + + // Add flags to the source of trust and the cache + let mut to_add: Vec = new_flags + .iter() + .filter(|f| !existing_flags.contains(f)) + .cloned() + .collect(); + new.idx_by_flag.insert(*uid, &to_add); + *email_modseq = new.internalmodseq; + new.idx_by_modseq.insert(new.internalmodseq, *ident); + existing_flags.append(&mut to_add); + + // Update counters + new.highestmodseq = new.internalmodseq; + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + } + } + UidIndexOp::FlagDel(ident, candidate_modseq, rm_flags) => { + if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { + // Bump UIDValidity if required + if *candidate_modseq < new.internalmodseq { + let bump_modseq = + (new.internalmodseq.get() - candidate_modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); + } + + // Remove flags from the source of trust and the cache + existing_flags.retain(|x| !rm_flags.contains(x)); + new.idx_by_flag.remove(*uid, rm_flags); + + // Register that email has been modified + new.idx_by_modseq.insert(new.internalmodseq, *ident); + *email_modseq = new.internalmodseq; + + // Update counters + new.highestmodseq = new.internalmodseq; + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + } + } + UidIndexOp::FlagSet(ident, candidate_modseq, new_flags) => { + if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { + // Bump UIDValidity if required + if *candidate_modseq < new.internalmodseq { + let bump_modseq = + (new.internalmodseq.get() - candidate_modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); + } + + // Remove flags from the source of trust and the cache + let (keep_flags, rm_flags): (Vec, Vec) = existing_flags + .iter() + .cloned() + .partition(|x| new_flags.contains(x)); + *existing_flags = keep_flags; + let mut to_add: Vec = new_flags + .iter() + .filter(|f| !existing_flags.contains(f)) + .cloned() + .collect(); + existing_flags.append(&mut to_add); + new.idx_by_flag.remove(*uid, &rm_flags); + new.idx_by_flag.insert(*uid, &to_add); + + // Register that email has been modified + new.idx_by_modseq.insert(new.internalmodseq, *ident); + *email_modseq = new.internalmodseq; + + // Update counters + new.highestmodseq = new.internalmodseq; + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + } + } + UidIndexOp::BumpUidvalidity(count) => { + new.uidvalidity = ImapUidvalidity::new(new.uidvalidity.get() + *count) + .unwrap_or(ImapUidvalidity::new(u32::MAX).unwrap()); + } + } + new + } +} + +// ---- FlagIndex implementation ---- + +#[derive(Clone)] +pub struct FlagIndex(HashMap>); +pub type FlagIter<'a> = im::hashmap::Keys<'a, Flag, OrdSet>; + +impl FlagIndex { + fn new() -> Self { + Self(HashMap::new()) + } + fn insert(&mut self, uid: ImapUid, flags: &[Flag]) { + flags.iter().for_each(|flag| { + self.0 + .entry(flag.clone()) + .or_insert(OrdSet::new()) + .insert(uid); + }); + } + fn remove(&mut self, uid: ImapUid, flags: &[Flag]) { + for flag in flags.iter() { + if let Some(set) = self.0.get_mut(flag) { + set.remove(&uid); + if set.is_empty() { + self.0.remove(flag); + } + } + } + } + + pub fn get(&self, f: &Flag) -> Option<&OrdSet> { + self.0.get(f) + } + + pub fn flags(&self) -> FlagIter { + self.0.keys() + } +} + +// ---- CUSTOM SERIALIZATION AND DESERIALIZATION ---- + +#[derive(Serialize, Deserialize)] +struct UidIndexSerializedRepr { + mails: Vec<(ImapUid, ModSeq, UniqueIdent, Vec)>, + + uidvalidity: ImapUidvalidity, + uidnext: ImapUid, + highestmodseq: ModSeq, + + internalseq: ImapUid, + internalmodseq: ModSeq, +} + +impl<'de> Deserialize<'de> for UidIndex { + fn deserialize(d: D) -> Result + where + D: Deserializer<'de>, + { + let val: UidIndexSerializedRepr = UidIndexSerializedRepr::deserialize(d)?; + + let mut uidindex = UidIndex { + table: OrdMap::new(), + + idx_by_uid: OrdMap::new(), + idx_by_modseq: OrdMap::new(), + idx_by_flag: FlagIndex::new(), + + uidvalidity: val.uidvalidity, + uidnext: val.uidnext, + highestmodseq: val.highestmodseq, + + internalseq: val.internalseq, + internalmodseq: val.internalmodseq, + }; + + val.mails + .iter() + .for_each(|(uid, modseq, uuid, flags)| uidindex.reg_email(*uuid, *uid, *modseq, flags)); + + Ok(uidindex) + } +} + +impl Serialize for UidIndex { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut mails = vec![]; + for (ident, (uid, modseq, flags)) in self.table.iter() { + mails.push((*uid, *modseq, *ident, flags.clone())); + } + + let val = UidIndexSerializedRepr { + mails, + uidvalidity: self.uidvalidity, + uidnext: self.uidnext, + highestmodseq: self.highestmodseq, + internalseq: self.internalseq, + internalmodseq: self.internalmodseq, + }; + + val.serialize(serializer) + } +} + +// ---- TESTS ---- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_uidindex() { + let mut state = UidIndex::default(); + + // Add message 1 + { + let m = UniqueIdent([0x01; 24]); + let f = vec!["\\Recent".to_string(), "\\Archive".to_string()]; + let ev = state.op_mail_add(m, f); + state = state.apply(&ev); + + // Early checks + assert_eq!(state.table.len(), 1); + let (uid, modseq, flags) = state.table.get(&m).unwrap(); + assert_eq!(*uid, NonZeroU32::new(1).unwrap()); + assert_eq!(*modseq, NonZeroU64::new(1).unwrap()); + assert_eq!(flags.len(), 2); + let ident = state.idx_by_uid.get(&NonZeroU32::new(1).unwrap()).unwrap(); + assert_eq!(&m, ident); + let recent = state.idx_by_flag.0.get("\\Recent").unwrap(); + assert_eq!(recent.len(), 1); + assert_eq!(recent.iter().next().unwrap(), &NonZeroU32::new(1).unwrap()); + assert_eq!(state.uidnext, NonZeroU32::new(2).unwrap()); + assert_eq!(state.uidvalidity, NonZeroU32::new(1).unwrap()); + } + + // Add message 2 + { + let m = UniqueIdent([0x02; 24]); + let f = vec!["\\Seen".to_string(), "\\Archive".to_string()]; + let ev = state.op_mail_add(m, f); + state = state.apply(&ev); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 2); + } + + // Add flags to message 1 + { + let m = UniqueIdent([0x01; 24]); + let f = vec!["Important".to_string(), "$cl_1".to_string()]; + let ev = state.op_flag_add(m, f); + state = state.apply(&ev); + } + + // Delete flags from message 1 + { + let m = UniqueIdent([0x01; 24]); + let f = vec!["\\Recent".to_string()]; + let ev = state.op_flag_del(m, f); + state = state.apply(&ev); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 2); + } + + // Delete message 2 + { + let m = UniqueIdent([0x02; 24]); + let ev = state.op_mail_del(m); + state = state.apply(&ev); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 1); + } + + // Add a message 3 concurrent to message 1 (trigger a uid validity change) + { + let m = UniqueIdent([0x03; 24]); + let f = vec!["\\Archive".to_string(), "\\Recent".to_string()]; + let ev = UidIndexOp::MailAdd( + m, + NonZeroU32::new(1).unwrap(), + NonZeroU64::new(1).unwrap(), + f, + ); + state = state.apply(&ev); + } + + // Checks + { + assert_eq!(state.table.len(), 2); + assert!(state.uidvalidity > NonZeroU32::new(1).unwrap()); + + let (last_uid, ident) = state.idx_by_uid.get_max().unwrap(); + assert_eq!(ident, &UniqueIdent([0x03; 24])); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 2); + let mut iter = archive.iter(); + assert_eq!(iter.next().unwrap(), &NonZeroU32::new(1).unwrap()); + assert_eq!(iter.next().unwrap(), last_uid); + } + } +} diff --git a/aero-collections/mail/unique_ident.rs b/aero-collections/mail/unique_ident.rs new file mode 100644 index 0000000..0e629db --- /dev/null +++ b/aero-collections/mail/unique_ident.rs @@ -0,0 +1,101 @@ +use std::str::FromStr; +use std::sync::atomic::{AtomicU64, Ordering}; + +use lazy_static::lazy_static; +use rand::prelude::*; +use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; + +use crate::timestamp::now_msec; + +/// An internal Mail Identifier is composed of two components: +/// - a process identifier, 128 bits, itself composed of: +/// - the timestamp of when the process started, 64 bits +/// - a 64-bit random number +/// - a sequence number, 64 bits +/// They are not part of the protocol but an internal representation +/// required by Aerogramme. +/// Their main property is to be unique without having to rely +/// on synchronization between IMAP processes. +#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)] +pub struct UniqueIdent(pub [u8; 24]); + +struct IdentGenerator { + pid: u128, + sn: AtomicU64, +} + +impl IdentGenerator { + fn new() -> Self { + let time = now_msec() as u128; + let rand = thread_rng().gen::() as u128; + Self { + pid: (time << 64) | rand, + sn: AtomicU64::new(0), + } + } + + fn gen(&self) -> UniqueIdent { + let sn = self.sn.fetch_add(1, Ordering::Relaxed); + let mut res = [0u8; 24]; + res[0..16].copy_from_slice(&u128::to_be_bytes(self.pid)); + res[16..24].copy_from_slice(&u64::to_be_bytes(sn)); + UniqueIdent(res) + } +} + +lazy_static! { + static ref GENERATOR: IdentGenerator = IdentGenerator::new(); +} + +pub fn gen_ident() -> UniqueIdent { + GENERATOR.gen() +} + +// -- serde -- + +impl<'de> Deserialize<'de> for UniqueIdent { + fn deserialize(d: D) -> Result + where + D: Deserializer<'de>, + { + let v = String::deserialize(d)?; + UniqueIdent::from_str(&v).map_err(D::Error::custom) + } +} + +impl Serialize for UniqueIdent { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl std::fmt::Display for UniqueIdent { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl std::fmt::Debug for UniqueIdent { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl FromStr for UniqueIdent { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + let bytes = hex::decode(s).map_err(|_| "invalid hex")?; + + if bytes.len() != 24 { + return Err("bad length"); + } + + let mut tmp = [0u8; 24]; + tmp[..].copy_from_slice(&bytes); + Ok(UniqueIdent(tmp)) + } +} diff --git a/aero-collections/user.rs b/aero-collections/user.rs new file mode 100644 index 0000000..a38b9c1 --- /dev/null +++ b/aero-collections/user.rs @@ -0,0 +1,313 @@ +use std::collections::{BTreeMap, HashMap}; +use std::sync::{Arc, Weak}; + +use anyhow::{anyhow, bail, Result}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use tokio::sync::watch; + +use crate::cryptoblob::{open_deserialize, seal_serialize}; +use crate::login::Credentials; +use crate::mail::incoming::incoming_mail_watch_process; +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::{gen_ident, UniqueIdent}; +use crate::storage; +use crate::timestamp::now_msec; + +use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; + +//@FIXME User should be totally rewriten +//to extract the local mailbox list +//to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) + +pub struct User { + pub username: String, + pub creds: Credentials, + pub storage: storage::Store, + pub mailboxes: std::sync::Mutex>>, + + tx_inbox_id: watch::Sender>, +} + +impl User { + pub async fn new(username: String, creds: Credentials) -> Result> { + let cache_key = (username.clone(), creds.storage.unique()); + + { + let cache = USER_CACHE.lock().unwrap(); + if let Some(u) = cache.get(&cache_key).and_then(Weak::upgrade) { + return Ok(u); + } + } + + let user = Self::open(username, creds).await?; + + let mut cache = USER_CACHE.lock().unwrap(); + if let Some(concurrent_user) = cache.get(&cache_key).and_then(Weak::upgrade) { + drop(user); + Ok(concurrent_user) + } else { + cache.insert(cache_key, Arc::downgrade(&user)); + Ok(user) + } + } + + /// Lists user's available mailboxes + pub async fn list_mailboxes(&self) -> Result> { + let (list, _ct) = self.load_mailbox_list().await?; + Ok(list.existing_mailbox_names()) + } + + /// Opens an existing mailbox given its IMAP name. + pub async fn open_mailbox(&self, name: &str) -> Result>> { + let (mut list, ct) = self.load_mailbox_list().await?; + + //@FIXME it could be a trace or an opentelemtry trace thing. + // Be careful to not leak sensible data + /* + eprintln!("List of mailboxes:"); + for ent in list.0.iter() { + eprintln!(" - {:?}", ent); + } + */ + + if let Some((uidvalidity, Some(mbid))) = list.get_mailbox(name) { + let mb = self.open_mailbox_by_id(mbid, uidvalidity).await?; + let mb_uidvalidity = mb.current_uid_index().await.uidvalidity; + if mb_uidvalidity > uidvalidity { + list.update_uidvalidity(name, mb_uidvalidity); + self.save_mailbox_list(&list, ct).await?; + } + Ok(Some(mb)) + } else { + Ok(None) + } + } + + /// Check whether mailbox exists + pub async fn has_mailbox(&self, name: &str) -> Result { + let (list, _ct) = self.load_mailbox_list().await?; + Ok(list.has_mailbox(name)) + } + + /// Creates a new mailbox in the user's IMAP namespace. + pub async fn create_mailbox(&self, name: &str) -> Result<()> { + if name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", name); + } + + let (mut list, ct) = self.load_mailbox_list().await?; + match list.create_mailbox(name) { + CreatedMailbox::Created(_, _) => { + self.save_mailbox_list(&list, ct).await?; + Ok(()) + } + CreatedMailbox::Existed(_, _) => Err(anyhow!("Mailbox {} already exists", name)), + } + } + + /// Deletes a mailbox in the user's IMAP namespace. + pub async fn delete_mailbox(&self, name: &str) -> Result<()> { + if name == INBOX { + bail!("Cannot delete INBOX"); + } + + let (mut list, ct) = self.load_mailbox_list().await?; + if list.has_mailbox(name) { + //@TODO: actually delete mailbox contents + list.set_mailbox(name, None); + self.save_mailbox_list(&list, ct).await?; + Ok(()) + } else { + bail!("Mailbox {} does not exist", name); + } + } + + /// Renames a mailbox in the user's IMAP namespace. + pub async fn rename_mailbox(&self, old_name: &str, new_name: &str) -> Result<()> { + let (mut list, ct) = self.load_mailbox_list().await?; + + if old_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", old_name); + } + if new_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", new_name); + } + + if old_name == INBOX { + list.rename_mailbox(old_name, new_name)?; + if !self.ensure_inbox_exists(&mut list, &ct).await? { + self.save_mailbox_list(&list, ct).await?; + } + } else { + let names = list.existing_mailbox_names(); + + let old_name_w_delim = format!("{}{}", old_name, MAILBOX_HIERARCHY_DELIMITER); + let new_name_w_delim = format!("{}{}", new_name, MAILBOX_HIERARCHY_DELIMITER); + + if names + .iter() + .any(|x| x == new_name || x.starts_with(&new_name_w_delim)) + { + bail!("Mailbox {} already exists", new_name); + } + + for name in names.iter() { + if name == old_name { + list.rename_mailbox(name, new_name)?; + } else if let Some(tail) = name.strip_prefix(&old_name_w_delim) { + let nnew = format!("{}{}", new_name_w_delim, tail); + list.rename_mailbox(name, &nnew)?; + } + } + + self.save_mailbox_list(&list, ct).await?; + } + Ok(()) + } + + // ---- Internal user & mailbox management ---- + + async fn open(username: String, creds: Credentials) -> Result> { + let storage = creds.storage.build().await?; + + let (tx_inbox_id, rx_inbox_id) = watch::channel(None); + + let user = Arc::new(Self { + username, + creds: creds.clone(), + storage, + tx_inbox_id, + mailboxes: std::sync::Mutex::new(HashMap::new()), + }); + + // Ensure INBOX exists (done inside load_mailbox_list) + user.load_mailbox_list().await?; + + tokio::spawn(incoming_mail_watch_process( + Arc::downgrade(&user), + user.creds.clone(), + rx_inbox_id, + )); + + Ok(user) + } + + pub(super) async fn open_mailbox_by_id( + &self, + id: UniqueIdent, + min_uidvalidity: ImapUidvalidity, + ) -> Result> { + { + let cache = self.mailboxes.lock().unwrap(); + if let Some(mb) = cache.get(&id).and_then(Weak::upgrade) { + return Ok(mb); + } + } + + let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); + + let mut cache = self.mailboxes.lock().unwrap(); + if let Some(concurrent_mb) = cache.get(&id).and_then(Weak::upgrade) { + drop(mb); // we worked for nothing but at least we didn't starve someone else + Ok(concurrent_mb) + } else { + cache.insert(id, Arc::downgrade(&mb)); + Ok(mb) + } + } + + // ---- Mailbox list management ---- + + async fn load_mailbox_list(&self) -> Result<(MailboxList, Option)> { + let row_ref = storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK); + let (mut list, row) = match self + .storage + .row_fetch(&storage::Selector::Single(&row_ref)) + .await + { + Err(storage::StorageError::NotFound) => (MailboxList::new(), None), + Err(e) => return Err(e.into()), + Ok(rv) => { + let mut list = MailboxList::new(); + let (row_ref, row_vals) = match rv.into_iter().next() { + Some(row_val) => (row_val.row_ref, row_val.value), + None => (row_ref, vec![]), + }; + + for v in row_vals { + if let storage::Alternative::Value(vbytes) = v { + let list2 = + open_deserialize::(&vbytes, &self.creds.keys.master)?; + list.merge(list2); + } + } + (list, Some(row_ref)) + } + }; + + let is_default_mbx_missing = [DRAFTS, ARCHIVE, SENT, TRASH] + .iter() + .map(|mbx| list.create_mailbox(mbx)) + .fold(false, |acc, r| { + acc || matches!(r, CreatedMailbox::Created(..)) + }); + let is_inbox_missing = self.ensure_inbox_exists(&mut list, &row).await?; + if is_default_mbx_missing && !is_inbox_missing { + // It's the only case where we created some mailboxes and not saved them + // So we save them! + self.save_mailbox_list(&list, row.clone()).await?; + } + + Ok((list, row)) + } + + async fn ensure_inbox_exists( + &self, + list: &mut MailboxList, + ct: &Option, + ) -> Result { + // If INBOX doesn't exist, create a new mailbox with that name + // and save new mailbox list. + // Also, ensure that the mpsc::watch that keeps track of the + // inbox id is up-to-date. + let saved; + let (inbox_id, inbox_uidvalidity) = match list.create_mailbox(INBOX) { + CreatedMailbox::Created(i, v) => { + self.save_mailbox_list(list, ct.clone()).await?; + saved = true; + (i, v) + } + CreatedMailbox::Existed(i, v) => { + saved = false; + (i, v) + } + }; + let inbox_id = Some((inbox_id, inbox_uidvalidity)); + if *self.tx_inbox_id.borrow() != inbox_id { + self.tx_inbox_id.send(inbox_id).unwrap(); + } + + Ok(saved) + } + + async fn save_mailbox_list( + &self, + list: &MailboxList, + ct: Option, + ) -> Result<()> { + let list_blob = seal_serialize(list, &self.creds.keys.master)?; + let rref = ct.unwrap_or(storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK)); + let row_val = storage::RowVal::new(rref, list_blob); + self.storage.row_insert(vec![row_val]).await?; + Ok(()) + } +} + +// ---- User cache ---- + +lazy_static! { + static ref USER_CACHE: std::sync::Mutex>> = + std::sync::Mutex::new(HashMap::new()); +} diff --git a/aero-dav/.gitignore b/aero-dav/.gitignore new file mode 100644 index 0000000..2f7896d --- /dev/null +++ b/aero-dav/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/aero-dav/Cargo.toml b/aero-dav/Cargo.toml new file mode 100644 index 0000000..92929b1 --- /dev/null +++ b/aero-dav/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "aero-dav" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "A partial and standalone implementation of the WebDAV protocol and its extensions (eg. CalDAV or CardDAV)" + +[dependencies] +quick-xml.workspace = true +http.workspace = true +chrono.workspace = true +tokio.workspace = true +futures.workspace = true diff --git a/aero-dav/fuzz/.gitignore b/aero-dav/fuzz/.gitignore new file mode 100644 index 0000000..1a45eee --- /dev/null +++ b/aero-dav/fuzz/.gitignore @@ -0,0 +1,4 @@ +target +corpus +artifacts +coverage diff --git a/aero-dav/fuzz/Cargo.lock b/aero-dav/fuzz/Cargo.lock new file mode 100644 index 0000000..08fa951 --- /dev/null +++ b/aero-dav/fuzz/Cargo.lock @@ -0,0 +1,4249 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "abnf-core" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182d1f071b906a9f59269c89af101515a5cbe58f723eb6717e7fe7445c0dea" +dependencies = [ + "nom 7.1.3", +] + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aerogramme" +version = "0.3.0" +dependencies = [ + "anyhow", + "argon2", + "async-trait", + "aws-config", + "aws-sdk-s3", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "backtrace", + "base64 0.21.7", + "chrono", + "clap", + "console-subscriber", + "duplexify", + "eml-codec", + "futures", + "hex", + "http 1.1.0", + "http-body-util", + "hyper 1.2.0", + "hyper-rustls 0.26.0", + "hyper-util", + "im", + "imap-codec", + "imap-flow", + "itertools 0.10.5", + "k2v-client", + "lazy_static", + "ldap3", + "log", + "nix", + "nom 7.1.3", + "quick-xml", + "rand", + "rmp-serde", + "rpassword", + "rustls 0.22.2", + "rustls-pemfile 2.1.1", + "serde", + "smtp-message", + "smtp-server", + "sodiumoxide", + "thiserror", + "tokio", + "tokio-rustls 0.25.0", + "tokio-util", + "toml", + "tracing", + "tracing-subscriber", + "zstd", +] + +[[package]] +name = "aerogramme-fuzz" +version = "0.0.0" +dependencies = [ + "aerogramme", + "libfuzzer-sys", + "quick-xml", + "tokio", +] + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" + +[[package]] +name = "arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures", + "password-hash", +] + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "asn1-rs" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ff05a702273012438132f449575dbc804e27b2f3cbe3069aa237d26c98fa33" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom 7.1.3", + "num-traits", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db8b7511298d5b7784b40b092d9e9dcd3a627a5707e4b5e507931ab0d44eeebf" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +dependencies = [ + "concurrent-queue", + "event-listener 5.2.0", + "event-listener-strategy 0.5.0", + "futures-core", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-executor" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c" +dependencies = [ + "async-lock 3.3.0", + "async-task", + "concurrent-queue", + "fastrand 2.0.1", + "futures-lite 2.2.0", + "slab", +] + +[[package]] +name = "async-fs" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.2.0", + "async-executor", + "async-io 2.3.1", + "async-lock 3.3.0", + "blocking", + "futures-lite 2.2.0", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite 1.13.0", + "log", + "parking", + "polling 2.8.0", + "rustix 0.37.27", + "slab", + "socket2 0.4.10", + "waker-fn", +] + +[[package]] +name = "async-io" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65" +dependencies = [ + "async-lock 3.3.0", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite 2.2.0", + "parking", + "polling 3.5.0", + "rustix 0.38.31", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener 2.5.3", +] + +[[package]] +name = "async-lock" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +dependencies = [ + "event-listener 4.0.3", + "event-listener-strategy 0.4.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-net" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" +dependencies = [ + "async-io 1.13.0", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-process" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" +dependencies = [ + "async-io 1.13.0", + "async-lock 2.8.0", + "async-signal", + "blocking", + "cfg-if", + "event-listener 3.1.0", + "futures-lite 1.13.0", + "rustix 0.38.31", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-signal" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +dependencies = [ + "async-io 2.3.1", + "async-lock 2.8.0", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix 0.38.31", + "signal-hook-registry", + "slab", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io 1.13.0", + "async-lock 2.8.0", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite 1.13.0", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite 0.2.13", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "async-task" +version = "4.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" + +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "auto_enums" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe0dfe45d75158751e195799f47ea02e81f570aa24bc5ef999cdd9e888c4b5c3" +dependencies = [ + "auto_enums_core", + "auto_enums_derive", +] + +[[package]] +name = "auto_enums_core" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da47c46001293a2c4b744d731958be22cff408a2ab76e2279328f9713b1267b4" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "auto_enums_derive" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41aed1da83ecdc799503b7cb94da1b45a34d72b49caf40a61d9cf5b88ec07cfd" +dependencies = [ + "autocfg", + "derive_utils", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "aws-config" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b96342ea8948ab9bef3e6234ea97fc32e2d8a88d8fb6a084e52267317f94b6b" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand 2.0.1", + "hex", + "http 0.2.12", + "hyper 0.14.28", + "ring 0.17.8", + "time", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-runtime" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e38bab716c8bf07da24be07ecc02e0f5656ce8f30a891322ecdcb202f943b85" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand 2.0.1", + "http 0.2.12", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite 0.2.13", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-config" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07979fd68679736ba306d6ea2a4dc2fd835ac4d454942c5d8920ef83ed2f979f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d35d39379445970fc3e4ddf7559fff2c32935ce0b279f9cb27080d6b7c6d94" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "regex-lite", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d84bd3925a17c9adbf6ec65d52104a44a09629d8f70290542beeee69a95aee7f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c2dae39e997f58bc4d6292e6244b26ba630c01ab671b6f9f44309de3eb80ab8" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17fd9a53869fee17cea77e352084e1aa71e2c5e323d974c13a9c2bcfd9544c7f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "crypto-bigint 0.5.5", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.1.0", + "once_cell", + "p256", + "percent-encoding", + "ring 0.17.8", + "sha2", + "subtle", + "time", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-async" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" +dependencies = [ + "futures-util", + "pin-project-lite 0.2.13", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fd4b66f2a8e7c84d7e97bda2666273d41d2a2e25302605bcf906b7b2661ae5e" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "crc32c", + "crc32fast", + "hex", + "http 0.2.12", + "http-body 0.4.6", + "md-5", + "pin-project-lite 0.2.13", + "sha1", + "sha2", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858" +dependencies = [ + "aws-smithy-types", + "bytes", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6ca214a6a26f1b7ebd63aa8d4f5e2194095643023f9608edf99a58247b9d80d" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite 0.2.13", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1af80ecf3057fb25fe38d1687e94c4601a7817c6a1e87c1b0635f7ecb644ace5" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb27084f72ea5fc20033efe180618677ff4a2f474b53d84695cfe310a6526cbc" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbb5fca54a532a36ff927fbd7407a7c8eb9c3b4faf72792ba2965ea2cad8ed55" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand 2.0.1", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "hyper-rustls 0.24.2", + "once_cell", + "pin-project-lite 0.2.13", + "pin-utils", + "rustls 0.21.10", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22389cb6f7cac64f266fb9f137745a9349ced7b47e0d2ba503e9e40ede4f7060" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.1.0", + "pin-project-lite 0.2.13", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f081da5481210523d44ffd83d9f0740320050054006c719eae0232d411f024d3" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "itoa", + "num-integer", + "pin-project-lite 0.2.13", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fccd8f595d0ca839f9f2548e66b99514a85f92feb4c01cf2868d93eb4888a42" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "http 0.2.12", + "rustc_version", + "tracing", +] + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite 0.2.13", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" + +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "bitvec" +version = "0.19.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" +dependencies = [ + "async-channel 2.2.0", + "async-lock 3.3.0", + "async-task", + "fastrand 2.0.1", + "futures-io", + "futures-lite 2.2.0", + "piper", + "tracing", +] + +[[package]] +name = "bounded-static" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2325bd33fa7e3018e7e37f5b0591ba009124963b5a3f8b7cae6d0a8c1028ed4" +dependencies = [ + "bounded-static-derive", +] + +[[package]] +name = "bounded-static-derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f10dd247355bf631d98d2753d87ae62c84c8dcb996ad9b24a4168e0aec29bd6b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "bumpalo" +version = "3.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea184aa71bb362a1157c896979544cc23974e08fd265f29ea96b59f0b4a555b" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "cc" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-targets 0.52.4", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_derive", + "clap_lex", + "indexmap 1.9.3", + "once_cell", + "strsim", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap_derive" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "concurrent-queue" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console-api" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" +dependencies = [ + "futures-core", + "prost", + "prost-types", + "tonic", + "tracing-core", +] + +[[package]] +name = "console-subscriber" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" +dependencies = [ + "console-api", + "crossbeam-channel", + "crossbeam-utils", + "futures-task", + "hdrhistogram", + "humantime", + "prost-types", + "serde", + "serde_json", + "thread_local", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32c" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "crc32fast" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "data-encoding" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der-parser" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe398ac75057914d7d07307bf67dc7f3f574a26783b4fc7805a20ffa9f506e82" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom 7.1.3", + "num-bigint", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive_utils" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "532b4c15dccee12c7044f1fcad956e98410860b22231e44a3b827464797ca7bf" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "duplexify" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1cc346cd6db38ceab2d33f59b26024c3ddb8e75f047c6cafbcbc016ea8065d5" +dependencies = [ + "async-std", + "pin-project-lite 0.1.12", +] + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "signature", +] + +[[package]] +name = "either" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint 0.4.9", + "der", + "digest", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "eml-codec" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4499124d87abce26a57ef96ece800fa8babc38fbedd81c607c340ae83d46d2e" +dependencies = [ + "base64 0.21.7", + "chrono", + "encoding_rs", + "nom 7.1.3", +] + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener-strategy" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" +dependencies = [ + "event-listener 4.0.3", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291" +dependencies = [ + "event-listener 5.2.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "flate2" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite 0.2.13", + "waker-fn", +] + +[[package]] +name = "futures-lite" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" +dependencies = [ + "fastrand 2.0.1", + "futures-core", + "futures-io", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite 0.2.13", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 1.1.0", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "base64 0.21.7", + "byteorder", + "flate2", + "nom 7.1.3", + "num-traits", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite 0.2.13", + "socket2 0.5.6", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.2", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "httpdate", + "itoa", + "pin-project-lite 0.2.13", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.28", + "log", + "rustls 0.21.10", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.2.0", + "hyper-util", + "log", + "rustls 0.22.2", + "rustls-native-certs 0.7.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.25.0", + "tower-service", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper 0.14.28", + "pin-project-lite 0.2.13", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.2.0", + "pin-project-lite 0.2.13", + "socket2 0.5.6", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "imap-codec" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" +dependencies = [ + "abnf-core", + "base64 0.21.7", + "bounded-static", + "chrono", + "imap-types", + "log", + "nom 7.1.3", + "thiserror", +] + +[[package]] +name = "imap-flow" +version = "0.1.0" +source = "git+https://github.com/duesee/imap-flow.git?branch=main#dce759a8531f317e8d7311fb032b366db6698e38" +dependencies = [ + "bounded-static", + "bytes", + "imap-codec", + "imap-types", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "imap-types" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" +dependencies = [ + "base64 0.21.7", + "bounded-static", + "chrono", + "thiserror", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "jobserver" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "k2v-client" +version = "0.0.4" +source = "git+https://git.deuxfleurs.fr/Deuxfleurs/garage.git?branch=k2v/shared_http_client#8b35a946d9f6b31b26b9783acbfab984316051f4" +dependencies = [ + "aws-sdk-config", + "aws-sigv4", + "base64 0.21.7", + "hex", + "http 1.1.0", + "http-body-util", + "hyper 1.2.0", + "hyper-rustls 0.26.0", + "hyper-util", + "log", + "percent-encoding", + "serde", + "serde_json", + "sha2", + "thiserror", + "tokio", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lber" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a99b520993b21a6faab32643cf4726573dc18ca4cf2d48cbeb24d248c86c930" +dependencies = [ + "byteorder", + "bytes", + "nom 2.2.1", +] + +[[package]] +name = "ldap3" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce38dafca0608c64cc0146fb782b06abb8d946dae7a3af23c89a95da24f6b84d" +dependencies = [ + "async-trait", + "bytes", + "futures", + "futures-util", + "lazy_static", + "lber", + "log", + "nom 2.2.1", + "percent-encoding", + "ring 0.16.20", + "rustls 0.20.9", + "rustls-native-certs 0.6.3", + "thiserror", + "tokio", + "tokio-rustls 0.23.4", + "tokio-stream", + "tokio-util", + "url", + "x509-parser", +] + +[[package]] +name = "lexical-core" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" +dependencies = [ + "arrayvec", + "bitflags 1.3.2", + "cfg-if", + "ryu", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "libfuzzer-sys" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96cfd5557eb82f2b83fed4955246c988d331975a002961b07c81584d107e7f7" +dependencies = [ + "arbitrary", + "cc", + "once_cell", +] + +[[package]] +name = "libsodium-sys" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b779387cd56adfbc02ea4a668e704f729be8d6a6abd2c27ca5ee537849a92fd" +dependencies = [ + "cc", + "libc", + "pkg-config", + "walkdir", +] + +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +dependencies = [ + "value-bag", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "nix" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "libc", +] + +[[package]] +name = "nom" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf51a729ecf40266a2368ad335a5fdde43471f545a967109cd62146ecf8b66ff" + +[[package]] +name = "nom" +version = "6.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" +dependencies = [ + "bitvec", + "funty", + "lexical-core", + "memchr", + "version_check", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "oid-registry" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e20717fa0541f39bd146692035c37bedfa532b3e5071b35761082407546b2a" +dependencies = [ + "asn1-rs", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p256" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" +dependencies = [ + "ecdsa", + "elliptic-curve", + "sha2", +] + +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "pin-project-lite" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.1", + "futures-io", +] + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite 0.2.13", + "windows-sys 0.48.0", +] + +[[package]] +name = "polling" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24f040dee2588b4963afb4e420540439d126f73fdacf4a9c486a96d840bac3c9" +dependencies = [ + "cfg-if", + "concurrent-queue", + "pin-project-lite 0.2.13", + "rustix 0.38.31", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" +dependencies = [ + "anyhow", + "itertools 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "prost-types" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +dependencies = [ + "prost", +] + +[[package]] +name = "quick-xml" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +dependencies = [ + "memchr", + "tokio", +] + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core", +] + +[[package]] +name = "regex" +version = "1.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.6", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-lite" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rmp" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "723ecff9ad04f4ad92fe1c8ca6c20d2196d9286e9c60727c4cb5511629260e9d" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "rpassword" +version = "7.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" +dependencies = [ + "libc", + "rtoolbox", + "windows-sys 0.48.0", +] + +[[package]] +name = "rtoolbox" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom 7.1.3", +] + +[[package]] +name = "rustix" +version = "0.37.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +dependencies = [ + "bitflags 2.4.2", + "errno", + "libc", + "linux-raw-sys 0.4.13", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.1", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" +dependencies = [ + "base64 0.21.7", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" + +[[package]] +name = "serde" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "serde_json" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" + +[[package]] +name = "smol" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" +dependencies = [ + "async-channel 1.9.0", + "async-executor", + "async-fs", + "async-io 1.13.0", + "async-lock 2.8.0", + "async-net", + "async-process", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "smtp-message" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "auto_enums", + "futures", + "idna 0.2.3", + "lazy_static", + "nom 6.1.2", + "pin-project", + "regex-automata 0.1.10", + "serde", +] + +[[package]] +name = "smtp-server" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "async-trait", + "chrono", + "duplexify", + "futures", + "smol", + "smtp-message", + "smtp-server-types", +] + +[[package]] +name = "smtp-server-types" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "serde", + "smtp-message", +] + +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "sodiumoxide" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" +dependencies = [ + "ed25519", + "libc", + "libsodium-sys", + "serde", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "unicode-xid", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + +[[package]] +name = "thiserror" +version = "1.0.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite 0.2.13", + "signal-hook-registry", + "socket2 0.5.6", + "tokio-macros", + "tracing", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite 0.2.13", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.10", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.2", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite 0.2.13", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite 0.2.13", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "tonic" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite 0.2.13", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite 0.2.13", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna 0.5.0", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "uuid" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "waker-fn" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "x509-parser" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb9bace5b5589ffead1afb76e43e34cff39cd0f3ce7e170ae0c29e53b88eb1c" +dependencies = [ + "asn1-rs", + "base64 0.13.1", + "data-encoding", + "der-parser", + "lazy_static", + "nom 7.1.3", + "oid-registry", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" + +[[package]] +name = "zstd" +version = "0.9.2+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2390ea1bf6c038c39674f22d95f0564725fc06034a47129179810b2fc58caa54" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "4.1.3+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e99d81b99fb3c2c2c794e3fe56c305c63d5173a16a46b5850b07c935ffc7db79" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "1.6.2+zstd.1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2daf2f248d9ea44454bfcb2516534e8b8ad2fc91bf818a1885495fc42bc8ac9f" +dependencies = [ + "cc", + "libc", +] diff --git a/aero-dav/fuzz/Cargo.toml b/aero-dav/fuzz/Cargo.toml new file mode 100644 index 0000000..a450853 --- /dev/null +++ b/aero-dav/fuzz/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "aerogramme-fuzz" +version = "0.0.0" +publish = false +edition = "2021" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +arbitrary = { version = "1", optional = true, features = ["derive"] } +libfuzzer-sys = { version = "0.4", features = ["arbitrary-derive"] } +tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } +quick-xml = { version = "0.31", features = ["async-tokio"] } + +[dependencies.aero-dav] +path = ".." + +[[bin]] +name = "dav" +path = "fuzz_targets/dav.rs" +test = false +doc = false +bench = false diff --git a/aero-dav/fuzz/dav.dict b/aero-dav/fuzz/dav.dict new file mode 100644 index 0000000..3ef5b69 --- /dev/null +++ b/aero-dav/fuzz/dav.dict @@ -0,0 +1,126 @@ +# +# AFL dictionary for XML +# ---------------------- +# +# Several basic syntax elements and attributes, modeled on libxml2. +# +# Created by Michal Zalewski +# + +attr_encoding=" encoding=\"1\"" +attr_generic=" a=\"1\"" +attr_href=" href=\"1\"" +attr_standalone=" standalone=\"no\"" +attr_version=" version=\"1\"" +attr_xml_base=" xml:base=\"1\"" +attr_xml_id=" xml:id=\"1\"" +attr_xml_lang=" xml:lang=\"1\"" +attr_xml_space=" xml:space=\"1\"" +attr_xmlns=" xmlns=\"1\"" + +entity_builtin="<" +entity_decimal="" +entity_external="&a;" +entity_hex="" + +string_any="ANY" +string_brackets="[]" +string_cdata="CDATA" +string_col_fallback=":fallback" +string_col_generic=":a" +string_col_include=":include" +string_dashes="--" +string_empty="EMPTY" +string_empty_dblquotes="\"\"" +string_empty_quotes="''" +string_entities="ENTITIES" +string_entity="ENTITY" +string_fixed="#FIXED" +string_id="ID" +string_idref="IDREF" +string_idrefs="IDREFS" +string_implied="#IMPLIED" +string_nmtoken="NMTOKEN" +string_nmtokens="NMTOKENS" +string_notation="NOTATION" +string_parentheses="()" +string_pcdata="#PCDATA" +string_percent="%a" +string_public="PUBLIC" +string_required="#REQUIRED" +string_schema=":schema" +string_system="SYSTEM" +string_ucs4="UCS-4" +string_utf16="UTF-16" +string_utf8="UTF-8" +string_xmlns="xmlns:" + +tag_attlist="" +tag_doctype="" +tag_open_close="" +tag_open_exclamation="" +tag_xml_q="" + +"0" +"1" +"activelock" +"allprop" +"cannot-modify-protected-property" +"collection" +"creationdate" +"DAV:" +"depth" +"displayname" +"error" +"exclusive" +"getcontentlanguage" +"getcontentlength" +"getcontenttype" +"getetag" +"getlastmodified" +"href" +"include" +"Infinite" +"infinity" +"location" +"lockdiscovery" +"lockentry" +"lockinfo" +"lockroot" +"lockscope" +"locktoken" +"lock-token-matches-request-uri" +"lock-token-submitted" +"locktype" +"multistatus" +"no-conflicting-lock" +"no-external-entities" +"owner" +"preserved-live-properties" +"prop" +"propertyupdate" +"propfind" +"propfind-finite-depth" +"propname" +"propstat" +"remove" +"resourcetype" +"response" +"responsedescription" +"set" +"shared" +"status" +"supportedlock" +"text/html" +"timeout" +"write" diff --git a/aero-dav/fuzz/fuzz_targets/dav.rs b/aero-dav/fuzz/fuzz_targets/dav.rs new file mode 100644 index 0000000..a3c6ece --- /dev/null +++ b/aero-dav/fuzz/fuzz_targets/dav.rs @@ -0,0 +1,196 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use libfuzzer_sys::arbitrary; +use libfuzzer_sys::arbitrary::Arbitrary; + +use aero_dav::{types, realization, xml}; +use quick_xml::reader::NsReader; +use tokio::runtime::Runtime; +use tokio::io::AsyncWriteExt; + +const tokens: [&str; 63] = [ +"0", +"1", +"activelock", +"allprop", +"encoding", +"utf-8", +"http://ns.example.com/boxschema/", +"HTTP/1.1 200 OK", +"1997-12-01T18:27:21-08:00", +"Mon, 12 Jan 1998 09:25:56 GMT", +"\"abcdef\"", +"cannot-modify-protected-property", +"collection", +"creationdate", +"DAV:", +"D", +"C", +"xmlns:D", +"depth", +"displayname", +"error", +"exclusive", +"getcontentlanguage", +"getcontentlength", +"getcontenttype", +"getetag", +"getlastmodified", +"href", +"include", +"Infinite", +"infinity", +"location", +"lockdiscovery", +"lockentry", +"lockinfo", +"lockroot", +"lockscope", +"locktoken", +"lock-token-matches-request-uri", +"lock-token-submitted", +"locktype", +"multistatus", +"no-conflicting-lock", +"no-external-entities", +"owner", +"preserved-live-properties", +"prop", +"propertyupdate", +"propfind", +"propfind-finite-depth", +"propname", +"propstat", +"remove", +"resourcetype", +"response", +"responsedescription", +"set", +"shared", +"status", +"supportedlock", +"text/html", +"timeout", +"write", +]; + +#[derive(Arbitrary)] +enum Token { + Known(usize), + //Unknown(String), +} +impl Token { + fn serialize(&self) -> String { + match self { + Self::Known(i) => tokens[i % tokens.len()].to_string(), + //Self::Unknown(v) => v.to_string(), + } + } +} + +#[derive(Arbitrary)] +struct Tag { + //prefix: Option, + name: Token, + attr: Option<(Token, Token)>, +} +impl Tag { + fn start(&self) -> String { + let mut acc = String::new(); + /*if let Some(p) = &self.prefix { + acc.push_str(p.serialize().as_str()); + acc.push_str(":"); + }*/ + acc.push_str("D:"); + acc.push_str(self.name.serialize().as_str()); + + if let Some((k,v)) = &self.attr { + acc.push_str(" "); + acc.push_str(k.serialize().as_str()); + acc.push_str("=\""); + acc.push_str(v.serialize().as_str()); + acc.push_str("\""); + } + acc + } + fn end(&self) -> String { + let mut acc = String::new(); + acc.push_str("D:"); + acc.push_str(self.name.serialize().as_str()); + acc + } +} + + +#[derive(Arbitrary)] +enum XmlNode { + Node(Tag, Vec), + Number(u64), + Text(Token), +} +impl std::fmt::Debug for XmlNode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.serialize()) + } +} +impl XmlNode { + fn serialize(&self) -> String { + match self { + Self::Node(tag, children) => { + let stag = tag.start(); + match children.is_empty() { + true => format!("<{}/>", stag), + false => format!("<{}>{}", stag, children.iter().map(|v| v.serialize()).collect::(), tag.end()), + } + }, + Self::Number(v) => format!("{}", v), + Self::Text(v) => v.serialize(), + } + } +} + +async fn serialize(elem: &impl xml::QWrite) -> Vec { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let mut writer = xml::Writer { q, ns_to_apply }; + + elem.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + + return buffer +} + +type Object = types::Multistatus>; + +fuzz_target!(|nodes: XmlNode| { + let gen = format!("{}", nodes.serialize()); + //println!("--------\n{}", gen); + let data = gen.as_bytes(); + + let rt = Runtime::new().expect("tokio runtime initialization"); + + rt.block_on(async { + // 1. Setup fuzzing by finding an input that seems correct, do not crash yet then. + let mut rdr = match xml::Reader::new(NsReader::from_reader(data)).await { + Err(_) => return, + Ok(r) => r, + }; + let reference = match rdr.find::().await { + Err(_) => return, + Ok(m) => m, + }; + + // 2. Re-serialize the input + let my_serialization = serialize(&reference).await; + + // 3. De-serialize my serialization + let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())).await.expect("XML Reader init"); + let comparison = rdr2.find::().await.expect("Deserialize again"); + + // 4. Both the first decoding and last decoding must be identical + assert_eq!(reference, comparison); + }) +}); diff --git a/aero-dav/src/acltypes.rs b/aero-dav/src/acltypes.rs new file mode 100644 index 0000000..f356813 --- /dev/null +++ b/aero-dav/src/acltypes.rs @@ -0,0 +1,4 @@ +//@FIXME required for a full DAV implementation +// See section 6. of the CalDAV RFC +// It seems mainly required for free-busy that I will not implement now. +// It can also be used for discovering main calendar, not sure it is used. diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs new file mode 100644 index 0000000..5f40c4b --- /dev/null +++ b/aero-dav/src/caldecoder.rs @@ -0,0 +1,33 @@ +use super::types as dav; +use super::caltypes::*; +use super::xml; +use super::error; + +// ---- ROOT ELEMENTS --- + +// ---- EXTENSIONS --- +impl xml::QRead for Violation { + async fn qread(xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl xml::QRead for Property { + async fn qread(xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl xml::QRead for PropertyRequest { + async fn qread(xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl xml::QRead for ResourceType { + async fn qread(xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +// ---- INNER XML ---- diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs new file mode 100644 index 0000000..ff6eb24 --- /dev/null +++ b/aero-dav/src/calencoder.rs @@ -0,0 +1,886 @@ +use quick_xml::Error as QError; +use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; +use quick_xml::name::PrefixDeclaration; +use tokio::io::AsyncWrite; + +use super::caltypes::*; +use super::xml::{Node, QWrite, IWrite, Writer}; +use super::types::Extension; + +const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; + +// ==================== Calendar Types Serialization ========================= + +// -------------------- MKCALENDAR METHOD ------------------------------------ +impl QWrite for MkCalendar { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("mkcalendar"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl> QWrite for MkCalendarResponse { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("mkcalendar-response"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for propstat in self.0.iter() { + propstat.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +// ----------------------- REPORT METHOD ------------------------------------- + +impl QWrite for CalendarQuery { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("calendar-query"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + if let Some(selector) = &self.selector { + selector.qwrite(xml).await?; + } + self.filter.qwrite(xml).await?; + if let Some(tz) = &self.timezone { + tz.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for CalendarMultiget { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("calendar-multiget"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + if let Some(selector) = &self.selector { + selector.qwrite(xml).await?; + } + for href in self.href.iter() { + href.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for FreeBusyQuery { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("free-busy-query"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +// -------------------------- DAV::prop -------------------------------------- +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_cal_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; + + match self { + Self::CalendarDescription => atom("calendar-description").await, + Self::CalendarTimezone => atom("calendar-timezone").await, + Self::SupportedCalendarComponentSet => atom("supported-calendar-component-set").await, + Self::SupportedCalendarData => atom("supported-calendar-data").await, + Self::MaxResourceSize => atom("max-resource-size").await, + Self::MinDateTime => atom("min-date-time").await, + Self::MaxDateTime => atom("max-date-time").await, + Self::MaxInstances => atom("max-instances").await, + Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, + Self::SupportedCollationSet => atom("supported-collation-set").await, + Self::CalendarData(req) => req.qwrite(xml).await, + } + } +} +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::CalendarDescription { lang, text } => { + let mut start = xml.create_cal_element("calendar-description"); + if let Some(the_lang) = lang { + start.push_attribute(("xml:lang", the_lang.as_str())); + } + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(text))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::CalendarTimezone(payload) => { + let start = xml.create_cal_element("calendar-timezone"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(payload))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::SupportedCalendarComponentSet(many_comp) => { + let start = xml.create_cal_element("supported-calendar-component-set"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for comp in many_comp.iter() { + comp.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + }, + Self::SupportedCalendarData(many_mime) => { + let start = xml.create_cal_element("supported-calendar-data"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for mime in many_mime.iter() { + mime.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + }, + Self::MaxResourceSize(bytes) => { + let start = xml.create_cal_element("max-resource-size"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::MinDateTime(dt) => { + let start = xml.create_cal_element("min-date-time"); + let end = start.to_end(); + + let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::MaxDateTime(dt) => { + let start = xml.create_cal_element("max-date-time"); + let end = start.to_end(); + + let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::MaxInstances(count) => { + let start = xml.create_cal_element("max-instances"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::MaxAttendeesPerInstance(count) => { + let start = xml.create_cal_element("max-attendees-per-instance"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::SupportedCollationSet(many_collations) => { + let start = xml.create_cal_element("supported-collation-set"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for collation in many_collations.iter() { + collation.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + }, + Self::CalendarData(inner) => inner.qwrite(xml).await, + } + } +} + +// ---------------------- DAV::resourcetype ---------------------------------- +impl QWrite for ResourceType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Calendar => { + let empty_tag = xml.create_dav_element("calendar"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + } + } +} + +// --------------------------- DAV::error ------------------------------------ +impl QWrite for Violation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_cal_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; + + match self { + //@FIXME + // DAV elements, should not be here but in RFC3744 on ACLs + // (we do not use atom as this error is in the DAV namespace, not the caldav one) + Self::NeedPrivileges => { + let empty_tag = xml.create_dav_element("need-privileges"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + + // Regular CalDAV errors + Self::ResourceMustBeNull => atom("resource-must-be-null").await, + Self::CalendarCollectionLocationOk => atom("calendar-collection-location-ok").await, + Self::ValidCalendarData => atom("valid-calendar-data").await, + Self::InitializeCalendarCollection => atom("initialize-calendar-collection").await, + Self::SupportedCalendarData => atom("supported-calendar-data").await, + Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, + Self::SupportedCalendarComponent => atom("supported-calendar-component").await, + Self::NoUidConflict(href) => { + let start = xml.create_cal_element("no-uid-conflict"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + href.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::MaxResourceSize => atom("max-resource-size").await, + Self::MinDateTime => atom("min-date-time").await, + Self::MaxDateTime => atom("max-date-time").await, + Self::MaxInstances => atom("max-instances").await, + Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, + Self::ValidFilter => atom("valid-filter").await, + Self::SupportedFilter { comp, prop, param } => { + let start = xml.create_cal_element("supported-filter"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for comp_item in comp.iter() { + comp_item.qwrite(xml).await?; + } + for prop_item in prop.iter() { + prop_item.qwrite(xml).await?; + } + for param_item in param.iter() { + param_item.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + }, + Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await, + } + } +} + + +// ---------------------------- Inner XML ------------------------------------ +impl QWrite for SupportedCollation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_cal_element("supported-collation"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + + } +} + +impl QWrite for Collation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let col = match self { + Self::AsciiCaseMap => "i;ascii-casemap", + Self::Octet => "i;octet", + Self::Unknown(v) => v.as_str(), + }; + + xml.q.write_event_async(Event::Text(BytesText::new(col))).await + } +} + +impl QWrite for CalendarDataPayload { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("calendar-data"); + if let Some(mime) = &self.mime { + start.push_attribute(("content-type", mime.content_type.as_str())); + start.push_attribute(("version", mime.version.as_str())); + } + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for CalendarDataRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("calendar-data"); + if let Some(mime) = &self.mime { + start.push_attribute(("content-type", mime.content_type.as_str())); + start.push_attribute(("version", mime.version.as_str())); + } + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + if let Some(comp) = &self.comp { + comp.qwrite(xml).await?; + } + if let Some(recurrence) = &self.recurrence { + recurrence.qwrite(xml).await?; + } + if let Some(freebusy) = &self.limit_freebusy_set { + freebusy.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for CalendarDataEmpty { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("calendar-data"); + if let Some(mime) = &self.0 { + empty.push_attribute(("content-type", mime.content_type.as_str())); + empty.push_attribute(("version", mime.version.as_str())); + } + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +impl QWrite for Comp { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("comp"); + start.push_attribute(("name", self.name.as_str())); + match &self.additional_rules { + None => xml.q.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.prop_kind.qwrite(xml).await?; + rules.comp_kind.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + }, + } + } +} + +impl QWrite for CompSupport { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("comp"); + empty.push_attribute(("name", self.0.as_str())); + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +impl QWrite for CompKind { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::AllComp => { + let empty_tag = xml.create_cal_element("allcomp"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Comp(many_comp) => { + for comp in many_comp.iter() { + // Required: recursion in an async fn requires boxing + // rustc --explain E0733 + Box::pin(comp.qwrite(xml)).await?; + } + Ok(()) + } + } + } +} + +impl QWrite for PropKind { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::AllProp => { + let empty_tag = xml.create_cal_element("allprop"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Prop(many_prop) => { + for prop in many_prop.iter() { + prop.qwrite(xml).await?; + } + Ok(()) + } + } + } +} + +impl QWrite for CalProp { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("prop"); + empty.push_attribute(("name", self.name.0.as_str())); + match self.novalue { + None => (), + Some(true) => empty.push_attribute(("novalue", "yes")), + Some(false) => empty.push_attribute(("novalue", "no")), + } + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +impl QWrite for RecurrenceModifier { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Expand(exp) => exp.qwrite(xml).await, + Self::LimitRecurrenceSet(lrs) => lrs.qwrite(xml).await, + } + } +} + +impl QWrite for Expand { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("expand"); + empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +impl QWrite for LimitRecurrenceSet { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("limit-recurrence-set"); + empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +impl QWrite for LimitFreebusySet { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("limit-freebusy-set"); + empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +impl QWrite for CalendarSelector { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::AllProp => { + let empty_tag = xml.create_dav_element("allprop"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::PropName => { + let empty_tag = xml.create_dav_element("propname"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Prop(prop) => prop.qwrite(xml).await, + } + } +} + +impl QWrite for CompFilter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("comp-filter"); + start.push_attribute(("name", self.name.as_str())); + + match &self.additional_rules { + None => xml.q.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } + } + } +} + +impl QWrite for CompFilterRules { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::IsNotDefined => { + let empty_tag = xml.create_dav_element("is-not-defined"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Matches(cfm) => cfm.qwrite(xml).await, + } + } +} + +impl QWrite for CompFilterMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + if let Some(time_range) = &self.time_range { + time_range.qwrite(xml).await?; + } + + for prop_item in self.prop_filter.iter() { + prop_item.qwrite(xml).await?; + } + for comp_item in self.comp_filter.iter() { + // Required: recursion in an async fn requires boxing + // rustc --explain E0733 + Box::pin(comp_item.qwrite(xml)).await?; + } + Ok(()) + } +} + +impl QWrite for PropFilter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("prop-filter"); + start.push_attribute(("name", self.name.as_str())); + + match &self.additional_rules { + None => xml.q.write_event_async(Event::Empty(start.clone())).await, + Some(rules) => { + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } + } + } +} + +impl QWrite for PropFilterRules { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::IsNotDefined => { + let empty_tag = xml.create_dav_element("is-not-defined"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Match(prop_match) => prop_match.qwrite(xml).await, + } + } +} + +impl QWrite for PropFilterMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + if let Some(time_range) = &self.time_range { + time_range.qwrite(xml).await?; + } + if let Some(time_or_text) = &self.time_or_text { + time_or_text.qwrite(xml).await?; + } + for param_item in self.param_filter.iter() { + param_item.qwrite(xml).await?; + } + Ok(()) + } +} + +impl QWrite for TimeOrText { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Time(time) => time.qwrite(xml).await, + Self::Text(txt) => txt.qwrite(xml).await, + } + } +} + +impl QWrite for TextMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("text-match"); + if let Some(collation) = &self.collation { + start.push_attribute(("collation", collation.as_str())); + } + match self.negate_condition { + None => (), + Some(true) => start.push_attribute(("negate-condition", "yes")), + Some(false) => start.push_attribute(("negate-condition", "no")), + } + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for ParamFilter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("param-filter"); + start.push_attribute(("name", self.name.as_str())); + + match &self.additional_rules { + None => xml.q.write_event_async(Event::Empty(start)).await, + Some(rules) => { + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + rules.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } + } + } +} + +impl QWrite for ParamFilterMatch { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::IsNotDefined => { + let empty_tag = xml.create_dav_element("is-not-defined"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + Self::Match(tm) => tm.qwrite(xml).await, + } + } +} + +impl QWrite for TimeZone { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("timezone"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Filter { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut start = xml.create_cal_element("filter"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for TimeRange { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut empty = xml.create_cal_element("time-range"); + match self { + Self::OnlyStart(start) => empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())), + Self::OnlyEnd(end) => empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())), + Self::FullRange(start, end) => { + empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); + } + } + xml.q.write_event_async(Event::Empty(empty)).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::types as dav; + use crate::realization::Calendar; + use tokio::io::AsyncWriteExt; + use chrono::{Utc,TimeZone,DateTime}; + + async fn serialize(elem: &impl QWrite) -> String { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ + ("xmlns:D".into(), "DAV:".into()), + ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), + ]; + let mut writer = Writer { q, ns_to_apply }; + + elem.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + + return got.into() + } + + #[tokio::test] + async fn basic_violation() { + let got = serialize( + &dav::Error::(vec![ + dav::Violation::Extension(Violation::ResourceMustBeNull), + ]) + ).await; + + let expected = r#" + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_calendar_query1_req() { + let got = serialize( + &CalendarQuery:: { + selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { + mime: None, + comp: Some(Comp { + name: Component::VCalendar, + additional_rules: Some(CompInner { + prop_kind: PropKind::Prop(vec![ + CalProp { + name: ComponentProperty("VERSION".into()), + novalue: None, + } + ]), + comp_kind: CompKind::Comp(vec![ + Comp { + name: Component::VEvent, + additional_rules: Some(CompInner { + prop_kind: PropKind::Prop(vec![ + CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, + CalProp { name: ComponentProperty("UID".into()), novalue: None }, + CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, + CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, + CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, + CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, + ]), + comp_kind: CompKind::Comp(vec![]), + }), + }, + Comp { + name: Component::VTimeZone, + additional_rules: None, + } + ]), + }), + }), + recurrence: None, + limit_freebusy_set: None, + })), + ]))), + filter: Filter(CompFilter { + name: Component::VCalendar, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: None, + prop_filter: vec![], + comp_filter: vec![ + CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006,1,4,0,0,0).unwrap(), + Utc.with_ymd_and_hms(2006,1,5,0,0,0).unwrap(), + )), + prop_filter: vec![], + comp_filter: vec![], + })), + }, + ], + })), + }), + timezone: None, + } + ).await; + + let expected = r#" + + + + + + + + + + + + + + + + + + + + + + + + + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_calendar_query1_res() { + let got = serialize( + &dav::Multistatus::> { + responses: vec![ + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), + vec![dav::PropStat { + prop: dav::PropValue(vec![ + dav::Property::GetEtag("\"fffff-abcd2\"".into()), + dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + mime: None, + payload: "PLACEHOLDER".into() + })), + ]), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), + location: None, + error: None, + responsedescription: None, + }, + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), + vec![dav::PropStat { + prop: dav::PropValue(vec![ + dav::Property::GetEtag("\"fffff-abcd3\"".into()), + dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ + mime: None, + payload: "PLACEHOLDER".into(), + })), + ]), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), + location: None, + error: None, + responsedescription: None, + }, + ], + responsedescription: None, + }, + ).await; + + let expected = r#" + + http://cal.example.com/bernard/work/abcd2.ics + + + "fffff-abcd2" + PLACEHOLDER + + HTTP/1.1 200 OK + + + + http://cal.example.com/bernard/work/abcd3.ics + + + "fffff-abcd3" + PLACEHOLDER + + HTTP/1.1 200 OK + + +"#; + + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } +} diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs new file mode 100644 index 0000000..9b9091e --- /dev/null +++ b/aero-dav/src/caltypes.rs @@ -0,0 +1,1453 @@ +#![allow(dead_code)] + +use chrono::{DateTime,Utc}; +use super::types as dav; +use super::xml; + +//@FIXME ACL (rfc3744) is missing, required +//@FIXME Versioning (rfc3253) is missing, required +//@FIXME WebDAV sync (rfc6578) is missing, optional +// For reference, SabreDAV guide gives high-level & real-world overview: +// https://sabre.io/dav/building-a-caldav-client/ +// For reference, non-official extensions documented by SabreDAV: +// https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions + + +// ----- Root elements ----- + +// --- (MKCALENDAR PART) --- + +/// If a request body is included, it MUST be a CALDAV:mkcalendar XML +/// element. Instruction processing MUST occur in the order +/// instructions are received (i.e., from top to bottom). +/// Instructions MUST either all be executed or none executed. Thus, +/// if any error occurs during processing, all executed instructions +/// MUST be undone and a proper error result returned. Instruction +/// processing details can be found in the definition of the DAV:set +/// instruction in Section 12.13.2 of [RFC2518]. +/// +/// ```xmlschema +/// +/// ``` +#[derive(Debug, PartialEq)] +pub struct MkCalendar(pub dav::Set); + + +/// If a response body for a successful request is included, it MUST +/// be a CALDAV:mkcalendar-response XML element. +/// +/// +/// +/// ---- +/// +/// ANY is not satisfying, so looking at RFC5689 +/// https://www.rfc-editor.org/rfc/rfc5689.html#section-5.2 +/// +/// Definition: +/// +/// +#[derive(Debug, PartialEq)] +pub struct MkCalendarResponse>(pub Vec>); + +// --- (REPORT PART) --- + +/// Name: calendar-query +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Defines a report for querying calendar object resources. +/// +/// Description: See Section 7.8. +/// +/// Definition: +/// +/// +#[derive(Debug, PartialEq)] +pub struct CalendarQuery { + pub selector: Option>, + pub filter: Filter, + pub timezone: Option, +} + +/// Name: calendar-multiget +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: CalDAV report used to retrieve specific calendar object +/// resources. +/// +/// Description: See Section 7.9. +/// +/// Definition: +/// +/// +#[derive(Debug, PartialEq)] +pub struct CalendarMultiget { + pub selector: Option>, + pub href: Vec, +} + +/// Name: free-busy-query +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: CalDAV report used to generate a VFREEBUSY to determine +/// busy time over a specific time range. +/// +/// Description: See Section 7.10. +/// +/// Definition: +/// +#[derive(Debug, PartialEq)] +pub struct FreeBusyQuery(pub TimeRange); + +// ----- Hooks ----- +#[derive(Debug, PartialEq)] +pub enum ResourceType { + Calendar, +} + +/// Check the matching Property object for documentation +#[derive(Debug, PartialEq)] +pub enum PropertyRequest { + CalendarDescription, + CalendarTimezone, + SupportedCalendarComponentSet, + SupportedCalendarData, + MaxResourceSize, + MinDateTime, + MaxDateTime, + MaxInstances, + MaxAttendeesPerInstance, + SupportedCollationSet, + CalendarData(CalendarDataRequest), +} + +#[derive(Debug, PartialEq)] +pub enum Property { + /// Name: calendar-description + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a human-readable description of the calendar + /// collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MAY be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). An xml:lang attribute indicating the human + /// language of the description SHOULD be set for this property by + /// clients or through server provisioning. Servers MUST return any + /// xml:lang attribute if set for the property. + /// + /// Description: If present, the property contains a description of the + /// calendar collection that is suitable for presentation to a user. + /// If not present, the client should assume no description for the + /// calendar collection. + /// + /// Definition: + /// + /// + /// PCDATA value: string + /// + /// Example: + /// + /// Calendrier de Mathilde Desruisseaux + CalendarDescription { + lang: Option, + text: String, + }, + + /// 5.2.2. CALDAV:calendar-timezone Property + /// + /// Name: calendar-timezone + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specifies a time zone on a calendar collection. + /// + /// Conformance: This property SHOULD be defined on all calendar + /// collections. If defined, it SHOULD NOT be returned by a PROPFIND + /// DAV:allprop request (as defined in Section 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:calendar-timezone property is used to + /// specify the time zone the server should rely on to resolve "date" + /// values and "date with local time" values (i.e., floating time) to + /// "date with UTC time" values. The server will require this + /// information to determine if a calendar component scheduled with + /// "date" values or "date with local time" values overlaps a CALDAV: + /// time-range specified in a CALDAV:calendar-query REPORT. The + /// server will also require this information to compute the proper + /// FREEBUSY time period as "date with UTC time" in the VFREEBUSY + /// component returned in a response to a CALDAV:free-busy-query + /// REPORT request that takes into account calendar components + /// scheduled with "date" values or "date with local time" values. In + /// the absence of this property, the server MAY rely on the time zone + /// of their choice. + /// + /// Note: The iCalendar data embedded within the CALDAV:calendar- + /// timezone XML element MUST follow the standard XML character data + /// encoding rules, including use of <, >, & etc. entity + /// encoding or the use of a construct. In the + /// later case, the iCalendar data cannot contain the character + /// sequence "]]>", which is the end delimiter for the CDATA section. + /// + /// Definition: + /// + /// ```xmlschema + /// + /// PCDATA value: an iCalendar object with exactly one VTIMEZONE component. + /// ``` + /// + /// Example: + /// + /// ```xmlschema + /// BEGIN:VCALENDAR + /// PRODID:-//Example Corp.//CalDAV Client//EN + /// VERSION:2.0 + /// BEGIN:VTIMEZONE + /// TZID:US-Eastern + /// LAST-MODIFIED:19870101T000000Z + /// BEGIN:STANDARD + /// DTSTART:19671029T020000 + /// RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 + /// TZOFFSETFROM:-0400 + /// TZOFFSETTO:-0500 + /// TZNAME:Eastern Standard Time (US & Canada) + /// END:STANDARD + /// BEGIN:DAYLIGHT + /// DTSTART:19870405T020000 + /// RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 + /// TZOFFSETFROM:-0500 + /// TZOFFSETTO:-0400 + /// TZNAME:Eastern Daylight Time (US & Canada) + /// END:DAYLIGHT + /// END:VTIMEZONE + /// END:VCALENDAR + /// + /// ``` + //@FIXME we might want to put a buffer here or an iCal parsed object + CalendarTimezone(String), + + /// Name: supported-calendar-component-set + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specifies the calendar component types (e.g., VEVENT, + /// VTODO, etc.) that calendar object resources can contain in the + /// calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:supported-calendar-component-set property is + /// used to specify restrictions on the calendar component types that + /// calendar object resources may contain in a calendar collection. + /// Any attempt by the client to store calendar object resources with + /// component types not listed in this property, if it exists, MUST + /// result in an error, with the CALDAV:supported-calendar-component + /// precondition (Section 5.3.2.1) being violated. Since this + /// property is protected, it cannot be changed by clients using a + /// PROPPATCH request. However, clients can initialize the value of + /// this property when creating a new calendar collection with + /// MKCALENDAR. The empty-element tag MUST + /// only be specified if support for calendar object resources that + /// only contain VTIMEZONE components is provided or desired. Support + /// for VTIMEZONE components in calendar object resources that contain + /// VEVENT or VTODO components is always assumed. In the absence of + /// this property, the server MUST accept all component types, and the + /// client can assume that all component types are accepted. + /// + /// Definition: + /// + /// + /// + /// Example: + /// + /// + /// + /// + /// + SupportedCalendarComponentSet(Vec), + + /// Name: supported-calendar-data + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specifies what media types are allowed for calendar object + /// resources in a calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:supported-calendar-data property is used to + /// specify the media type supported for the calendar object resources + /// contained in a given calendar collection (e.g., iCalendar version + /// 2.0). Any attempt by the client to store calendar object + /// resources with a media type not listed in this property MUST + /// result in an error, with the CALDAV:supported-calendar-data + /// precondition (Section 5.3.2.1) being violated. In the absence of + /// this property, the server MUST only accept data with the media + /// type "text/calendar" and iCalendar version 2.0, and clients can + /// assume that the server will only accept this data. + /// + /// Definition: + /// + /// + /// + /// Example: + /// + /// + /// + /// + /// + /// ----- + /// + /// + /// + /// when nested in the CALDAV:supported-calendar-data property + /// to specify a supported media type for calendar object + /// resources; + SupportedCalendarData(Vec), + + /// Name: max-resource-size + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a numeric value indicating the maximum size of a + /// resource in octets that the server is willing to accept when a + /// calendar object resource is stored in a calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-resource-size is used to specify a + /// numeric value that represents the maximum size in octets that the + /// server is willing to accept when a calendar object resource is + /// stored in a calendar collection. Any attempt to store a calendar + /// object resource exceeding this size MUST result in an error, with + /// the CALDAV:max-resource-size precondition (Section 5.3.2.1) being + /// violated. In the absence of this property, the client can assume + /// that the server will allow storing a resource of any reasonable + /// size. + /// + /// Definition: + /// + /// + /// PCDATA value: a numeric value (positive integer) + /// + /// Example: + /// + /// + /// 102400 + /// + MaxResourceSize(u64), + + /// CALDAV:min-date-time Property + /// + /// Name: min-date-time + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a DATE-TIME value indicating the earliest date and + /// time (in UTC) that the server is willing to accept for any DATE or + /// DATE-TIME value in a calendar object resource stored in a calendar + /// collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:min-date-time is used to specify an + /// iCalendar DATE-TIME value in UTC that indicates the earliest + /// inclusive date that the server is willing to accept for any + /// explicit DATE or DATE-TIME value in a calendar object resource + /// stored in a calendar collection. Any attempt to store a calendar + /// object resource using a DATE or DATE-TIME value earlier than this + /// value MUST result in an error, with the CALDAV:min-date-time + /// precondition (Section 5.3.2.1) being violated. Note that servers + /// MUST accept recurring components that specify instances beyond + /// this limit, provided none of those instances have been overridden. + /// In that case, the server MAY simply ignore those instances outside + /// of the acceptable range when processing reports on the calendar + /// object resource. In the absence of this property, the client can + /// assume any valid iCalendar date may be used at least up to the + /// CALDAV:max-date-time value, if that is defined. + /// + /// Definition: + /// + /// + /// PCDATA value: an iCalendar format DATE-TIME value in UTC + /// + /// Example: + /// + /// + /// 19000101T000000Z + /// + MinDateTime(DateTime), + + /// CALDAV:max-date-time Property + /// + /// Name: max-date-time + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a DATE-TIME value indicating the latest date and + /// time (in UTC) that the server is willing to accept for any DATE or + /// DATE-TIME value in a calendar object resource stored in a calendar + /// collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-date-time is used to specify an + /// iCalendar DATE-TIME value in UTC that indicates the inclusive + /// latest date that the server is willing to accept for any date or + /// time value in a calendar object resource stored in a calendar + /// collection. Any attempt to store a calendar object resource using + /// a DATE or DATE-TIME value later than this value MUST result in an + /// error, with the CALDAV:max-date-time precondition + /// (Section 5.3.2.1) being violated. Note that servers MUST accept + /// recurring components that specify instances beyond this limit, + /// provided none of those instances have been overridden. In that + /// case, the server MAY simply ignore those instances outside of the + /// acceptable range when processing reports on the calendar object + /// resource. In the absence of this property, the client can assume + /// any valid iCalendar date may be used at least down to the CALDAV: + /// min-date-time value, if that is defined. + /// + /// Definition: + /// + /// + /// PCDATA value: an iCalendar format DATE-TIME value in UTC + /// + /// Example: + /// + /// + /// 20491231T235959Z + /// + MaxDateTime(DateTime), + + /// CALDAV:max-instances Property + /// + /// Name: max-instances + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a numeric value indicating the maximum number of + /// recurrence instances that a calendar object resource stored in a + /// calendar collection can generate. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-instances is used to specify a numeric + /// value that indicates the maximum number of recurrence instances + /// that a calendar object resource stored in a calendar collection + /// can generate. Any attempt to store a calendar object resource + /// with a recurrence pattern that generates more instances than this + /// value MUST result in an error, with the CALDAV:max-instances + /// precondition (Section 5.3.2.1) being violated. In the absence of + /// this property, the client can assume that the server has no limits + /// on the number of recurrence instances it can handle or expand. + /// + /// Definition: + /// + /// + /// PCDATA value: a numeric value (integer greater than zero) + /// + /// Example: + /// + /// + /// 100 + /// + MaxInstances(u64), + + /// CALDAV:max-attendees-per-instance Property + /// + /// Name: max-attendees-per-instance + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Provides a numeric value indicating the maximum number of + /// ATTENDEE properties in any instance of a calendar object resource + /// stored in a calendar collection. + /// + /// Conformance: This property MAY be defined on any calendar + /// collection. If defined, it MUST be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:max-attendees-per-instance is used to + /// specify a numeric value that indicates the maximum number of + /// iCalendar ATTENDEE properties on any one instance of a calendar + /// object resource stored in a calendar collection. Any attempt to + /// store a calendar object resource with more ATTENDEE properties per + /// instance than this value MUST result in an error, with the CALDAV: + /// max-attendees-per-instance precondition (Section 5.3.2.1) being + /// violated. In the absence of this property, the client can assume + /// that the server can handle any number of ATTENDEE properties in a + /// calendar component. + /// + /// Definition: + /// + /// + /// PCDATA value: a numeric value (integer greater than zero) + /// + /// Example: + /// + /// + /// 25 + /// + MaxAttendeesPerInstance(u64), + + /// Name: supported-collation-set + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Identifies the set of collations supported by the server + /// for text matching operations. + /// + /// Conformance: This property MUST be defined on any resource that + /// supports a report that does text matching. If defined, it MUST be + /// protected and SHOULD NOT be returned by a PROPFIND DAV:allprop + /// request (as defined in Section 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:supported-collation-set property contains + /// zero or more CALDAV:supported-collation elements, which specify + /// the collection identifiers of the collations supported by the + /// server. + /// + /// Definition: + /// + /// + /// + /// + /// Example: + /// + /// + /// i;ascii-casemap + /// i;octet + /// + SupportedCollationSet(Vec), + + /// Name: calendar-data + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Specified one of the following: + /// + /// 1. A supported media type for calendar object resources when + /// nested in the CALDAV:supported-calendar-data property; + /// + /// 2. The parts of a calendar object resource should be returned by + /// a calendaring report; + /// + /// 3. The content of a calendar object resource in a response to a + /// calendaring report. + /// + /// Description: When nested in the CALDAV:supported-calendar-data + /// property, the CALDAV:calendar-data XML element specifies a media + /// type supported by the CalDAV server for calendar object resources. + /// + /// When used in a calendaring REPORT request, the CALDAV:calendar- + /// data XML element specifies which parts of calendar object + /// resources need to be returned in the response. If the CALDAV: + /// calendar-data XML element doesn't contain any CALDAV:comp element, + /// calendar object resources will be returned in their entirety. + /// + /// Finally, when used in a calendaring REPORT response, the CALDAV: + /// calendar-data XML element specifies the content of a calendar + /// object resource. Given that XML parsers normalize the two- + /// character sequence CRLF (US-ASCII decimal 13 and US-ASCII decimal + /// 10) to a single LF character (US-ASCII decimal 10), the CR + /// character (US-ASCII decimal 13) MAY be omitted in calendar object + /// resources specified in the CALDAV:calendar-data XML element. + /// Furthermore, calendar object resources specified in the CALDAV: + /// calendar-data XML element MAY be invalid per their media type + /// specification if the CALDAV:calendar-data XML element part of the + /// calendaring REPORT request did not specify required properties + /// (e.g., UID, DTSTAMP, etc.), or specified a CALDAV:prop XML element + /// with the "novalue" attribute set to "yes". + /// + /// Note: The CALDAV:calendar-data XML element is specified in requests + /// and responses inside the DAV:prop XML element as if it were a + /// WebDAV property. However, the CALDAV:calendar-data XML element is + /// not a WebDAV property and, as such, is not returned in PROPFIND + /// responses, nor used in PROPPATCH requests. + /// + /// Note: The iCalendar data embedded within the CALDAV:calendar-data + /// XML element MUST follow the standard XML character data encoding + /// rules, including use of <, >, & etc. entity encoding or + /// the use of a construct. In the later case, the + /// iCalendar data cannot contain the character sequence "]]>", which + /// is the end delimiter for the CDATA section. + CalendarData(CalendarDataPayload), +} + +#[derive(Debug, PartialEq)] +pub enum Violation { + /// (DAV:resource-must-be-null): A resource MUST NOT exist at the + /// Request-URI; + ResourceMustBeNull, + + /// (CALDAV:calendar-collection-location-ok): The Request-URI MUST + /// identify a location where a calendar collection can be created; + CalendarCollectionLocationOk, + + /// (CALDAV:valid-calendar-data): The time zone specified in CALDAV: + /// calendar-timezone property MUST be a valid iCalendar object + /// containing a single valid VTIMEZONE component. + ValidCalendarData, + + ///@FIXME should not be here but in RFC3744 + /// !!! ERRATA 1002 !!! + /// (DAV:need-privileges): The DAV:bind privilege MUST be granted to + /// the current user on the parent collection of the Request-URI. + NeedPrivileges, + + /// (CALDAV:initialize-calendar-collection): A new calendar collection + /// exists at the Request-URI. The DAV:resourcetype of the calendar + /// collection MUST contain both DAV:collection and CALDAV:calendar + /// XML elements. + InitializeCalendarCollection, + + /// (CALDAV:supported-calendar-data): The resource submitted in the + /// PUT request, or targeted by a COPY or MOVE request, MUST be a + /// supported media type (i.e., iCalendar) for calendar object + /// resources; + SupportedCalendarData, + + /// (CALDAV:valid-calendar-object-resource): The resource submitted in + /// the PUT request, or targeted by a COPY or MOVE request, MUST obey + /// all restrictions specified in Section 4.1 (e.g., calendar object + /// resources MUST NOT contain more than one type of calendar + /// component, calendar object resources MUST NOT specify the + /// iCalendar METHOD property, etc.); + ValidCalendarObjectResource, + + /// (CALDAV:supported-calendar-component): The resource submitted in + /// the PUT request, or targeted by a COPY or MOVE request, MUST + /// contain a type of calendar component that is supported in the + /// targeted calendar collection; + SupportedCalendarComponent, + + /// (CALDAV:no-uid-conflict): The resource submitted in the PUT + /// request, or targeted by a COPY or MOVE request, MUST NOT specify + /// an iCalendar UID property value already in use in the targeted + /// calendar collection or overwrite an existing calendar object + /// resource with one that has a different UID property value. + /// Servers SHOULD report the URL of the resource that is already + /// making use of the same UID property value in the DAV:href element; + /// + /// + NoUidConflict(dav::Href), + + /// (CALDAV:max-resource-size): The resource submitted in the PUT + /// request, or targeted by a COPY or MOVE request, MUST have an octet + /// size less than or equal to the value of the CALDAV:max-resource- + /// size property value (Section 5.2.5) on the calendar collection + /// where the resource will be stored; + MaxResourceSize, + + /// (CALDAV:min-date-time): The resource submitted in the PUT request, + /// or targeted by a COPY or MOVE request, MUST have all of its + /// iCalendar DATE or DATE-TIME property values (for each recurring + /// instance) greater than or equal to the value of the CALDAV:min- + /// date-time property value (Section 5.2.6) on the calendar + /// collection where the resource will be stored; + MinDateTime, + + /// (CALDAV:max-date-time): The resource submitted in the PUT request, + /// or targeted by a COPY or MOVE request, MUST have all of its + /// iCalendar DATE or DATE-TIME property values (for each recurring + /// instance) less than the value of the CALDAV:max-date-time property + /// value (Section 5.2.7) on the calendar collection where the + /// resource will be stored; + MaxDateTime, + + /// (CALDAV:max-instances): The resource submitted in the PUT request, + /// or targeted by a COPY or MOVE request, MUST generate a number of + /// recurring instances less than or equal to the value of the CALDAV: + /// max-instances property value (Section 5.2.8) on the calendar + /// collection where the resource will be stored; + MaxInstances, + + /// (CALDAV:max-attendees-per-instance): The resource submitted in the + /// PUT request, or targeted by a COPY or MOVE request, MUST have a + /// number of ATTENDEE properties on any one instance less than or + /// equal to the value of the CALDAV:max-attendees-per-instance + /// property value (Section 5.2.9) on the calendar collection where + /// the resource will be stored; + MaxAttendeesPerInstance, + + /// (CALDAV:valid-filter): The CALDAV:filter XML element (see + /// Section 9.7) specified in the REPORT request MUST be valid. For + /// instance, a CALDAV:filter cannot nest a + /// element in a element, and a CALDAV:filter + /// cannot nest a element in a + /// element. + ValidFilter, + + /// (CALDAV:supported-filter): The CALDAV:comp-filter (see + /// Section 9.7.1), CALDAV:prop-filter (see Section 9.7.2), and + /// CALDAV:param-filter (see Section 9.7.3) XML elements used in the + /// CALDAV:filter XML element (see Section 9.7) in the REPORT request + /// only make reference to components, properties, and parameters for + /// which queries are supported by the server, i.e., if the CALDAV: + /// filter element attempts to reference an unsupported component, + /// property, or parameter, this precondition is violated. Servers + /// SHOULD report the CALDAV:comp-filter, CALDAV:prop-filter, or + /// CALDAV:param-filter for which it does not provide support. + /// + /// + SupportedFilter { + comp: Vec, + prop: Vec, + param: Vec, + }, + + /// (DAV:number-of-matches-within-limits): The number of matching + /// calendar object resources must fall within server-specific, + /// predefined limits. For example, this condition might be triggered + /// if a search specification would cause the return of an extremely + /// large number of responses. + NumberOfMatchesWithinLimits, +} + +// -------- Inner XML elements --------- + +/// Some of the reports defined in this section do text matches of +/// character strings provided by the client and are compared to stored +/// calendar data. Since iCalendar data is, by default, encoded in the +/// UTF-8 charset and may include characters outside the US-ASCII charset +/// range in some property and parameter values, there is a need to +/// ensure that text matching follows well-defined rules. +/// +/// To deal with this, this specification makes use of the IANA Collation +/// Registry defined in [RFC4790] to specify collations that may be used +/// to carry out the text comparison operations with a well-defined rule. +/// +/// The comparisons used in CalDAV are all "substring" matches, as per +/// [RFC4790], Section 4.2. Collations supported by the server MUST +/// support "substring" match operations. +/// +/// CalDAV servers are REQUIRED to support the "i;ascii-casemap" and +/// "i;octet" collations, as described in [RFC4790], and MAY support +/// other collations. +/// +/// Servers MUST advertise the set of collations that they support via +/// the CALDAV:supported-collation-set property defined on any resource +/// that supports reports that use collations. +/// +/// Clients MUST only use collations from the list advertised by the +/// server. +/// +/// In the absence of a collation explicitly specified by the client, or +/// if the client specifies the "default" collation identifier (as +/// defined in [RFC4790], Section 3.1), the server MUST default to using +/// "i;ascii-casemap" as the collation. +/// +/// Wildcards (as defined in [RFC4790], Section 3.2) MUST NOT be used in +/// the collation identifier. +/// +/// If the client chooses a collation not supported by the server, the +/// server MUST respond with a CALDAV:supported-collation precondition +/// error response. +#[derive(Debug, PartialEq)] +pub struct SupportedCollation(pub Collation); + +/// +/// PCDATA value: iCalendar object +/// +/// when nested in the DAV:prop XML element in a calendaring +/// REPORT response to specify the content of a returned +/// calendar object resource. +#[derive(Debug, PartialEq)] +pub struct CalendarDataPayload { + pub mime: Option, + pub payload: String, +} + +/// +/// +/// when nested in the DAV:prop XML element in a calendaring +/// REPORT request to specify which parts of calendar object +/// resources should be returned in the response; +#[derive(Debug, PartialEq)] +pub struct CalendarDataRequest { + pub mime: Option, + pub comp: Option, + pub recurrence: Option, + pub limit_freebusy_set: Option, +} + +/// calendar-data specialization for Property +/// +/// +/// +/// when nested in the CALDAV:supported-calendar-data property +/// to specify a supported media type for calendar object +/// resources; +#[derive(Debug, PartialEq)] +pub struct CalendarDataEmpty(pub Option); + +/// +/// content-type value: a MIME media type +/// version value: a version string +/// attributes can be used on all three variants of the +/// CALDAV:calendar-data XML element. +#[derive(Debug, PartialEq)] +pub struct CalendarDataSupport { + pub content_type: String, + pub version: String, +} + +/// Name: comp +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Defines which component types to return. +/// +/// Description: The name value is a calendar component name (e.g., +/// VEVENT). +/// +/// Definition: +/// +/// +/// +/// name value: a calendar component name +/// +/// Note: The CALDAV:prop and CALDAV:allprop elements have the same name +/// as the DAV:prop and DAV:allprop elements defined in [RFC2518]. +/// However, the CALDAV:prop and CALDAV:allprop elements are defined +/// in the "urn:ietf:params:xml:ns:caldav" namespace instead of the +/// "DAV:" namespace. +#[derive(Debug, PartialEq)] +pub struct Comp { + pub name: Component, + pub additional_rules: Option, +} + +#[derive(Debug, PartialEq)] +pub struct CompInner { + pub prop_kind: PropKind, + pub comp_kind: CompKind, +} + +/// For SupportedCalendarComponentSet +/// +/// Definition: +/// +/// +/// +/// Example: +/// +/// +/// +/// +/// +#[derive(Debug, PartialEq)] +pub struct CompSupport(pub Component); + +/// Name: allcomp +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies that all components shall be returned. +/// +/// Description: The CALDAV:allcomp XML element can be used when the +/// client wants all types of components returned by a calendaring +/// REPORT request. +/// +/// Definition: +/// +/// +#[derive(Debug, PartialEq)] +pub enum CompKind { + AllComp, + Comp(Vec), +} + +/// Name: allprop +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies that all properties shall be returned. +/// +/// Description: The CALDAV:allprop XML element can be used when the +/// client wants all properties of components returned by a +/// calendaring REPORT request. +/// +/// Definition: +/// +/// +/// +/// Note: The CALDAV:allprop element has the same name as the DAV: +/// allprop element defined in [RFC2518]. However, the CALDAV:allprop +/// element is defined in the "urn:ietf:params:xml:ns:caldav" +/// namespace instead of the "DAV:" namespace. +#[derive(Debug, PartialEq)] +pub enum PropKind { + AllProp, + Prop(Vec), +} + +/// Name: prop +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Defines which properties to return in the response. +/// +/// Description: The "name" attribute specifies the name of the calendar +/// property to return (e.g., ATTENDEE). The "novalue" attribute can +/// be used by clients to request that the actual value of the +/// property not be returned (if the "novalue" attribute is set to +/// "yes"). In that case, the server will return just the iCalendar +/// property name and any iCalendar parameters and a trailing ":" +/// without the subsequent value data. +/// +/// Definition: +/// +/// +/// name value: a calendar property name +/// novalue value: "yes" or "no" +/// +/// Note: The CALDAV:prop element has the same name as the DAV:prop +/// element defined in [RFC2518]. However, the CALDAV:prop element is +/// defined in the "urn:ietf:params:xml:ns:caldav" namespace instead +/// of the "DAV:" namespace. +#[derive(Debug, PartialEq)] +pub struct CalProp { + pub name: ComponentProperty, + pub novalue: Option, +} + +#[derive(Debug, PartialEq)] +pub enum RecurrenceModifier { + Expand(Expand), + LimitRecurrenceSet(LimitRecurrenceSet), +} + +/// Name: expand +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Forces the server to expand recurring components into +/// individual recurrence instances. +/// +/// Description: The CALDAV:expand XML element specifies that for a +/// given calendaring REPORT request, the server MUST expand the +/// recurrence set into calendar components that define exactly one +/// recurrence instance, and MUST return only those whose scheduled +/// time intersect a specified time range. +/// +/// The "start" attribute specifies the inclusive start of the time +/// range, and the "end" attribute specifies the non-inclusive end of +/// the time range. Both attributes are specified as date with UTC +/// time value. The value of the "end" attribute MUST be greater than +/// the value of the "start" attribute. +/// +/// The server MUST use the same logic as defined for CALDAV:time- +/// range to determine if a recurrence instance intersects the +/// specified time range. +/// +/// Recurring components, other than the initial instance, MUST +/// include a RECURRENCE-ID property indicating which instance they +/// refer to. +/// +/// The returned calendar components MUST NOT use recurrence +/// properties (i.e., EXDATE, EXRULE, RDATE, and RRULE) and MUST NOT +/// have reference to or include VTIMEZONE components. Date and local +/// time with reference to time zone information MUST be converted +/// into date with UTC time. +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] +pub struct Expand(pub DateTime, pub DateTime); + +/// CALDAV:limit-recurrence-set XML Element +/// +/// Name: limit-recurrence-set +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a time range to limit the set of "overridden +/// components" returned by the server. +/// +/// Description: The CALDAV:limit-recurrence-set XML element specifies +/// that for a given calendaring REPORT request, the server MUST +/// return, in addition to the "master component", only the +/// "overridden components" that impact a specified time range. An +/// overridden component impacts a time range if its current start and +/// end times overlap the time range, or if the original start and end +/// times -- the ones that would have been used if the instance were +/// not overridden -- overlap the time range. +/// +/// The "start" attribute specifies the inclusive start of the time +/// range, and the "end" attribute specifies the non-inclusive end of +/// the time range. Both attributes are specified as date with UTC +/// time value. The value of the "end" attribute MUST be greater than +/// the value of the "start" attribute. +/// +/// The server MUST use the same logic as defined for CALDAV:time- +/// range to determine if the current or original scheduled time of an +/// "overridden" recurrence instance intersects the specified time +/// range. +/// +/// Overridden components that have a RANGE parameter on their +/// RECURRENCE-ID property may specify one or more instances in the +/// recurrence set, and some of those instances may fall within the +/// specified time range or may have originally fallen within the +/// specified time range prior to being overridden. If that is the +/// case, the overridden component MUST be included in the results, as +/// it has a direct impact on the interpretation of instances within +/// the specified time range. +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] +pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); + +/// Name: limit-freebusy-set +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a time range to limit the set of FREEBUSY values +/// returned by the server. +/// +/// Description: The CALDAV:limit-freebusy-set XML element specifies +/// that for a given calendaring REPORT request, the server MUST only +/// return the FREEBUSY property values of a VFREEBUSY component that +/// intersects a specified time range. +/// +/// The "start" attribute specifies the inclusive start of the time +/// range, and the "end" attribute specifies the non-inclusive end of +/// the time range. Both attributes are specified as "date with UTC +/// time" value. The value of the "end" attribute MUST be greater +/// than the value of the "start" attribute. +/// +/// The server MUST use the same logic as defined for CALDAV:time- +/// range to determine if a FREEBUSY property value intersects the +/// specified time range. +/// +/// Definition: +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] +pub struct LimitFreebusySet(pub DateTime, pub DateTime); + +/// Used by CalendarQuery & CalendarMultiget +#[derive(Debug, PartialEq)] +pub enum CalendarSelector { + AllProp, + PropName, + Prop(dav::PropName), +} + +/// Name: comp-filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies search criteria on calendar components. +/// +/// Description: The CALDAV:comp-filter XML element specifies a query +/// targeted at the calendar object (i.e., VCALENDAR) or at a specific +/// calendar component type (e.g., VEVENT). The scope of the +/// CALDAV:comp-filter XML element is the calendar object when used as +/// a child of the CALDAV:filter XML element. The scope of the +/// CALDAV:comp-filter XML element is the enclosing calendar component +/// when used as a child of another CALDAV:comp-filter XML element. A +/// CALDAV:comp-filter is said to match if: +/// +/// * The CALDAV:comp-filter XML element is empty and the calendar +/// object or calendar component type specified by the "name" +/// attribute exists in the current scope; +/// +/// or: +/// +/// * The CALDAV:comp-filter XML element contains a CALDAV:is-not- +/// defined XML element and the calendar object or calendar +/// component type specified by the "name" attribute does not exist +/// in the current scope; +/// +/// or: +/// +/// * The CALDAV:comp-filter XML element contains a CALDAV:time-range +/// XML element and at least one recurrence instance in the +/// targeted calendar component is scheduled to overlap the +/// specified time range, and all specified CALDAV:prop-filter and +/// CALDAV:comp-filter child XML elements also match the targeted +/// calendar component; +/// +/// or: +/// +/// * The CALDAV:comp-filter XML element only contains CALDAV:prop- +/// filter and CALDAV:comp-filter child XML elements that all match +/// the targeted calendar component. +/// +/// Definition: +/// +/// ```xmlschema +/// +/// +/// +/// name value: a calendar object or calendar component +/// type (e.g., VEVENT) +/// ``` +#[derive(Debug, PartialEq)] +pub struct CompFilter { + pub name: Component, + // Option 1 = None, Option 2, 3, 4 = Some + pub additional_rules: Option, +} +#[derive(Debug, PartialEq)] +pub enum CompFilterRules { + // Option 2 + IsNotDefined, + // Options 3 & 4 + Matches(CompFilterMatch), +} +#[derive(Debug, PartialEq)] +pub struct CompFilterMatch { + pub time_range: Option, + pub prop_filter: Vec, + pub comp_filter: Vec, +} + +/// Name: prop-filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies search criteria on calendar properties. +/// +/// Description: The CALDAV:prop-filter XML element specifies a query +/// targeted at a specific calendar property (e.g., CATEGORIES) in the +/// scope of the enclosing calendar component. A calendar property is +/// said to match a CALDAV:prop-filter if: +/// +/// * The CALDAV:prop-filter XML element is empty and a property of +/// the type specified by the "name" attribute exists in the +/// enclosing calendar component; +/// +/// or: +/// +/// * The CALDAV:prop-filter XML element contains a CALDAV:is-not- +/// defined XML element and no property of the type specified by +/// the "name" attribute exists in the enclosing calendar +/// component; +/// +/// or: +/// +/// * The CALDAV:prop-filter XML element contains a CALDAV:time-range +/// XML element and the property value overlaps the specified time +/// range, and all specified CALDAV:param-filter child XML elements +/// also match the targeted property; +/// +/// or: +/// +/// * The CALDAV:prop-filter XML element contains a CALDAV:text-match +/// XML element and the property value matches it, and all +/// specified CALDAV:param-filter child XML elements also match the +/// targeted property; +/// +/// Definition: +/// +/// ```xmlschema +/// +/// +/// +/// name value: a calendar property name (e.g., ATTENDEE) +/// ``` +#[derive(Debug, PartialEq)] +pub struct PropFilter { + pub name: Component, + // None = Option 1, Some() = Option 2, 3 & 4 + pub additional_rules: Option, +} +#[derive(Debug, PartialEq)] +pub enum PropFilterRules { + // Option 2 + IsNotDefined, + // Options 3 & 4 + Match(PropFilterMatch), +} +#[derive(Debug, PartialEq)] +pub struct PropFilterMatch { + pub time_range: Option, + pub time_or_text: Option, + pub param_filter: Vec, +} +#[derive(Debug, PartialEq)] +pub enum TimeOrText { + Time(TimeRange), + Text(TextMatch), +} + +/// Name: text-match +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a substring match on a property or parameter +/// value. +/// +/// Description: The CALDAV:text-match XML element specifies text used +/// for a substring match against the property or parameter value +/// specified in a calendaring REPORT request. +/// +/// The "collation" attribute is used to select the collation that the +/// server MUST use for character string matching. In the absence of +/// this attribute, the server MUST use the "i;ascii-casemap" +/// collation. +/// +/// The "negate-condition" attribute is used to indicate that this +/// test returns a match if the text matches when the attribute value +/// is set to "no", or return a match if the text does not match, if +/// the attribute value is set to "yes". For example, this can be +/// used to match components with a STATUS property not set to +/// CANCELLED. +/// +/// Definition: +/// +/// PCDATA value: string +/// +#[derive(Debug, PartialEq)] +pub struct TextMatch { + pub collation: Option, + pub negate_condition: Option, + pub text: String, +} + +/// Name: param-filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Limits the search to specific parameter values. +/// +/// Description: The CALDAV:param-filter XML element specifies a query +/// targeted at a specific calendar property parameter (e.g., +/// PARTSTAT) in the scope of the calendar property on which it is +/// defined. A calendar property parameter is said to match a CALDAV: +/// param-filter if: +/// +/// * The CALDAV:param-filter XML element is empty and a parameter of +/// the type specified by the "name" attribute exists on the +/// calendar property being examined; +/// +/// or: +/// +/// * The CALDAV:param-filter XML element contains a CALDAV:is-not- +/// defined XML element and no parameter of the type specified by +/// the "name" attribute exists on the calendar property being +/// examined; +/// +/// Definition: +/// +/// ```xmlschema +/// +/// +/// +/// name value: a property parameter name (e.g., PARTSTAT) +/// ``` +#[derive(Debug, PartialEq)] +pub struct ParamFilter { + pub name: PropertyParameter, + pub additional_rules: Option, +} +#[derive(Debug, PartialEq)] +pub enum ParamFilterMatch { + IsNotDefined, + Match(TextMatch), +} + +/// CALDAV:is-not-defined XML Element +/// +/// Name: is-not-defined +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies that a match should occur if the enclosing +/// component, property, or parameter does not exist. +/// +/// Description: The CALDAV:is-not-defined XML element specifies that a +/// match occurs if the enclosing component, property, or parameter +/// value specified in a calendaring REPORT request does not exist in +/// the calendar data being tested. +/// +/// Definition: +/// +/* CURRENTLY INLINED */ + + + +/// Name: timezone +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies the time zone component to use when determining +/// the results of a report. +/// +/// Description: The CALDAV:timezone XML element specifies that for a +/// given calendaring REPORT request, the server MUST rely on the +/// specified VTIMEZONE component instead of the CALDAV:calendar- +/// timezone property of the calendar collection, in which the +/// calendar object resource is contained to resolve "date" values and +/// "date with local time" values (i.e., floating time) to "date with +/// UTC time" values. The server will require this information to +/// determine if a calendar component scheduled with "date" values or +/// "date with local time" values intersects a CALDAV:time-range +/// specified in a CALDAV:calendar-query REPORT. +/// +/// Note: The iCalendar data embedded within the CALDAV:timezone XML +/// element MUST follow the standard XML character data encoding +/// rules, including use of <, >, & etc. entity encoding or +/// the use of a construct. In the later case, the +/// +/// iCalendar data cannot contain the character sequence "]]>", which +/// is the end delimiter for the CDATA section. +/// +/// Definition: +/// +/// +/// PCDATA value: an iCalendar object with exactly one VTIMEZONE +#[derive(Debug, PartialEq)] +pub struct TimeZone(pub String); + +/// Name: filter +/// +/// Namespace: urn:ietf:params:xml:ns:caldav +/// +/// Purpose: Specifies a filter to limit the set of calendar components +/// returned by the server. +/// +/// Description: The CALDAV:filter XML element specifies the search +/// filter used to limit the calendar components returned by a +/// calendaring REPORT request. +/// +/// Definition: +/// +#[derive(Debug, PartialEq)] +pub struct Filter(pub CompFilter); + +/// Name: time-range +/// +/// Definition: +/// +/// +/// +/// start value: an iCalendar "date with UTC time" +/// end value: an iCalendar "date with UTC time" +#[derive(Debug, PartialEq)] +pub enum TimeRange { + OnlyStart(DateTime), + OnlyEnd(DateTime), + FullRange(DateTime, DateTime), +} + +// ----------------------- ENUM ATTRIBUTES --------------------- + +/// Known components +#[derive(Debug, PartialEq)] +pub enum Component { + VCalendar, + VJournal, + VFreeBusy, + VEvent, + VTodo, + VAlarm, + VTimeZone, + Unknown(String), +} +impl Component { + pub fn as_str<'a>(&'a self) -> &'a str { + match self { + Self::VCalendar => "VCALENDAR", + Self::VJournal => "VJOURNAL", + Self::VFreeBusy => "VFREEBUSY", + Self::VEvent => "VEVENT", + Self::VTodo => "VTODO", + Self::VAlarm => "VALARM", + Self::VTimeZone => "VTIMEZONE", + Self::Unknown(c) => c, + } + } +} + +/// name="VERSION", name="SUMMARY", etc. +/// Can be set on different objects: VCalendar, VEvent, etc. +/// Might be replaced by an enum later +#[derive(Debug, PartialEq)] +pub struct ComponentProperty(pub String); + +/// like PARSTAT +#[derive(Debug, PartialEq)] +pub struct PropertyParameter(pub String); +impl PropertyParameter { + pub fn as_str<'a>(&'a self) -> &'a str { + self.0.as_str() + } +} + +#[derive(Default,Debug,PartialEq)] +pub enum Collation { + #[default] + AsciiCaseMap, + Octet, + Unknown(String), +} +impl Collation { + pub fn as_str<'a>(&'a self) -> &'a str { + match self { + Self::AsciiCaseMap => "i;ascii-casemap", + Self::Octet => "i;octet", + Self::Unknown(c) => c.as_str(), + } + } +} diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs new file mode 100644 index 0000000..65cb712 --- /dev/null +++ b/aero-dav/src/decoder.rs @@ -0,0 +1,947 @@ +use std::future::Future; + +use quick_xml::events::Event; +use quick_xml::events::attributes::AttrError; +use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; +use quick_xml::reader::NsReader; +use tokio::io::AsyncBufRead; + +use super::types::*; +use super::error::ParsingError; +use super::xml::{Node, QRead, Reader, IRead, DAV_URN, CAL_URN}; + +//@TODO (1) Rewrite all objects as Href, +// where we return Ok(None) instead of trying to find the object at any cost. +// Add a xml.find() -> Result, ParsingError> or similar for the cases we +// really need the object +// (2) Rewrite QRead and replace Result, _> with Result<_, _>, not found being a possible +// error. +// (3) Rewrite vectors with xml.collect() -> Result, _> +// (4) Something for alternatives would be great but no idea yet + +// ---- ROOT ---- + +/// Propfind request +impl QRead> for PropFind { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "propfind").await?; + let propfind: PropFind = loop { + // allprop + if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { + let includ = xml.maybe_find::>().await?; + xml.close().await?; + break PropFind::AllProp(includ) + } + + // propname + if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { + xml.close().await?; + break PropFind::PropName + } + + // prop + let (mut maybe_prop, mut dirty) = (None, false); + xml.maybe_read::>(&mut maybe_prop, &mut dirty).await?; + if let Some(prop) = maybe_prop { + break PropFind::Prop(prop) + } + + // not found, skipping + xml.skip().await?; + }; + xml.close().await?; + + Ok(propfind) + } +} + +/// PROPPATCH request +impl QRead> for PropertyUpdate { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "propertyupdate").await?; + let collected_items = xml.collect::>().await?; + xml.close().await?; + Ok(PropertyUpdate(collected_items)) + } +} + +/// Generic response +impl> QRead> for Multistatus { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "multistatus").await?; + let mut responses = Vec::new(); + let mut responsedescription = None; + + loop { + let mut dirty = false; + xml.maybe_push(&mut responses, &mut dirty).await?; + xml.maybe_read(&mut responsedescription, &mut dirty).await?; + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.close().await?; + Ok(Multistatus { responses, responsedescription }) + } +} + +// LOCK REQUEST +impl QRead for LockInfo { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockinfo").await?; + let (mut m_scope, mut m_type, mut owner) = (None, None, None); + loop { + let mut dirty = false; + xml.maybe_read::(&mut m_scope, &mut dirty).await?; + xml.maybe_read::(&mut m_type, &mut dirty).await?; + xml.maybe_read::(&mut owner, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + xml.close().await?; + match (m_scope, m_type) { + (Some(lockscope), Some(locktype)) => Ok(LockInfo { lockscope, locktype, owner }), + _ => Err(ParsingError::MissingChild), + } + } +} + +// LOCK RESPONSE +impl QRead> for PropValue { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "prop").await?; + let mut acc = xml.collect::>().await?; + xml.close().await?; + Ok(PropValue(acc)) + } +} + + +/// Error response +impl QRead> for Error { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "error").await?; + let violations = xml.collect::>().await?; + xml.close().await?; + Ok(Error(violations)) + } +} + + + +// ---- INNER XML +impl> QRead> for Response { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "response").await?; + let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); + let mut href = Vec::new(); + let mut propstat = Vec::new(); + + loop { + let mut dirty = false; + xml.maybe_read::(&mut status, &mut dirty).await?; + xml.maybe_push::(&mut href, &mut dirty).await?; + xml.maybe_push::>(&mut propstat, &mut dirty).await?; + xml.maybe_read::>(&mut error, &mut dirty).await?; + xml.maybe_read::(&mut responsedescription, &mut dirty).await?; + xml.maybe_read::(&mut location, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await? }, + }; + } + } + + xml.close().await?; + match (status, &propstat[..], &href[..]) { + (Some(status), &[], &[_, ..]) => Ok(Response { + status_or_propstat: StatusOrPropstat::Status(href, status), + error, responsedescription, location, + }), + (None, &[_, ..], &[_, ..]) => Ok(Response { + status_or_propstat: StatusOrPropstat::PropStat(href.into_iter().next().unwrap(), propstat), + error, responsedescription, location, + }), + (Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl> QRead> for PropStat { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "propstat").await?; + + let (mut m_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); + + loop { + let mut dirty = false; + xml.maybe_read::(&mut m_prop, &mut dirty).await?; + xml.maybe_read::(&mut m_status, &mut dirty).await?; + xml.maybe_read::>(&mut error, &mut dirty).await?; + xml.maybe_read::(&mut responsedescription, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.close().await?; + match (m_prop, m_status) { + (Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl QRead for Status { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "status").await?; + let fullcode = xml.tag_string().await?; + let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; + let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; + xml.close().await?; + Ok(Status(code)) + } +} + +impl QRead for ResponseDescription { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "responsedescription").await?; + let cnt = xml.tag_string().await?; + xml.close().await?; + Ok(ResponseDescription(cnt)) + } +} + +impl QRead for Location { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "location").await?; + let href = xml.find::().await?; + xml.close().await?; + Ok(Location(href)) + } +} + +impl QRead> for PropertyUpdateItem { + async fn qread(xml: &mut Reader) -> Result { + match Remove::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(PropertyUpdateItem::Remove), + } + Set::qread(xml).await.map(PropertyUpdateItem::Set) + } +} + +impl QRead> for Remove { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "remove").await?; + let propname = xml.find::>().await?; + xml.close().await?; + Ok(Remove(propname)) + } +} + +impl QRead> for Set { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "set").await?; + let propvalue = xml.find::>().await?; + xml.close().await?; + Ok(Set(propvalue)) + } +} + +impl QRead> for Violation { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "lock-token-matches-request-uri").await?.is_some() { + xml.close().await?; + Ok(Violation::LockTokenMatchesRequestUri) + } else if xml.maybe_open(DAV_URN, "lock-token-submitted").await?.is_some() { + let links = xml.collect::().await?; + xml.close().await?; + Ok(Violation::LockTokenSubmitted(links)) + } else if xml.maybe_open(DAV_URN, "no-conflicting-lock").await?.is_some() { + let links = xml.collect::().await?; + xml.close().await?; + Ok(Violation::NoConflictingLock(links)) + } else if xml.maybe_open(DAV_URN, "no-external-entities").await?.is_some() { + xml.close().await?; + Ok(Violation::NoExternalEntities) + } else if xml.maybe_open(DAV_URN, "preserved-live-properties").await?.is_some() { + xml.close().await?; + Ok(Violation::PreservedLiveProperties) + } else if xml.maybe_open(DAV_URN, "propfind-finite-depth").await?.is_some() { + xml.close().await?; + Ok(Violation::PropfindFiniteDepth) + } else if xml.maybe_open(DAV_URN, "cannot-modify-protected-property").await?.is_some() { + xml.close().await?; + Ok(Violation::CannotModifyProtectedProperty) + } else { + E::Error::qread(xml).await.map(Violation::Extension) + } + } +} + +impl QRead> for Include { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "include").await?; + let acc = xml.collect::>().await?; + xml.close().await?; + Ok(Include(acc)) + } +} + +impl QRead> for PropName { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "prop").await?; + let acc = xml.collect::>().await?; + xml.close().await?; + Ok(PropName(acc)) + } +} + +impl QRead> for PropertyRequest { + async fn qread(xml: &mut Reader) -> Result { + let maybe = if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { + Some(PropertyRequest::CreationDate) + } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { + Some(PropertyRequest::DisplayName) + } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { + Some(PropertyRequest::GetContentLanguage) + } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { + Some(PropertyRequest::GetContentLength) + } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { + Some(PropertyRequest::GetContentType) + } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { + Some(PropertyRequest::GetEtag) + } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { + Some(PropertyRequest::GetLastModified) + } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { + Some(PropertyRequest::LockDiscovery) + } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { + Some(PropertyRequest::ResourceType) + } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { + Some(PropertyRequest::SupportedLock) + } else { + None + }; + + match maybe { + Some(pr) => { + xml.close().await?; + Ok(pr) + }, + None => E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension), + } + } +} + +impl QRead> for Property { + async fn qread(xml: &mut Reader) -> Result { + use chrono::{DateTime, FixedOffset, TimeZone}; + + // Core WebDAV properties + if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { + let datestr = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) + } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { + let name = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::DisplayName(name)) + } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { + let lang = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetContentLanguage(lang)) + } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { + let cl = xml.tag_string().await?.parse::()?; + xml.close().await?; + return Ok(Property::GetContentLength(cl)) + } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { + let ct = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetContentType(ct)) + } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { + let etag = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetEtag(etag)) + } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { + let datestr = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(datestr.as_str())?)) + } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { + let acc = xml.collect::().await?; + xml.close().await?; + return Ok(Property::LockDiscovery(acc)) + } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { + let acc = xml.collect::>().await?; + xml.close().await?; + return Ok(Property::ResourceType(acc)) + } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { + let acc = xml.collect::().await?; + xml.close().await?; + return Ok(Property::SupportedLock(acc)) + } + + // Option 2: an extension property, delegating + E::Property::qread(xml).await.map(Property::Extension) + } +} + +impl QRead for ActiveLock { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "activelock").await?; + let (mut m_scope, mut m_type, mut m_depth, mut owner, mut timeout, mut locktoken, mut m_root) = + (None, None, None, None, None, None, None); + + loop { + let mut dirty = false; + xml.maybe_read::(&mut m_scope, &mut dirty).await?; + xml.maybe_read::(&mut m_type, &mut dirty).await?; + xml.maybe_read::(&mut m_depth, &mut dirty).await?; + xml.maybe_read::(&mut owner, &mut dirty).await?; + xml.maybe_read::(&mut timeout, &mut dirty).await?; + xml.maybe_read::(&mut locktoken, &mut dirty).await?; + xml.maybe_read::(&mut m_root, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + } + } + + xml.close().await?; + match (m_scope, m_type, m_depth, m_root) { + (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => + Ok(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot }), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl QRead for Depth { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "depth").await?; + let depth_str = xml.tag_string().await?; + xml.close().await?; + match depth_str.as_str() { + "0" => Ok(Depth::Zero), + "1" => Ok(Depth::One), + "infinity" => Ok(Depth::Infinity), + _ => Err(ParsingError::WrongToken), + } + } +} + +impl QRead for Owner { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "owner").await?; + + let mut owner = Owner::Unknown; + loop { + match xml.peek() { + Event::Text(_) | Event::CData(_) => { + let txt = xml.tag_string().await?; + if matches!(owner, Owner::Unknown) { + owner = Owner::Txt(txt); + } + } + Event::Start(_) | Event::Empty(_) => { + match Href::qread(xml).await { + Ok(href) => { owner = Owner::Href(href); }, + Err(ParsingError::Recoverable) => { xml.skip().await?; }, + Err(e) => return Err(e), + } + } + Event::End(_) => break, + _ => { xml.skip().await?; }, + } + }; + xml.close().await?; + Ok(owner) + } +} + +impl QRead for Timeout { + async fn qread(xml: &mut Reader) -> Result { + const SEC_PFX: &str = "SEC_PFX"; + xml.open(DAV_URN, "timeout").await?; + + let timeout = match xml.tag_string().await?.as_str() { + "Infinite" => Timeout::Infinite, + seconds => match seconds.strip_prefix(SEC_PFX) { + Some(secs) => Timeout::Seconds(secs.parse::()?), + None => return Err(ParsingError::InvalidValue), + }, + }; + + xml.close().await?; + Ok(timeout) + } +} + +impl QRead for LockToken { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "locktoken").await?; + let href = Href::qread(xml).await?; + xml.close().await?; + Ok(LockToken(href)) + } +} + +impl QRead for LockRoot { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockroot").await?; + let href = Href::qread(xml).await?; + xml.close().await?; + Ok(LockRoot(href)) + } +} + +impl QRead> for ResourceType { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "collection").await?.is_some() { + xml.close().await?; + return Ok(ResourceType::Collection) + } + + E::ResourceType::qread(xml).await.map(ResourceType::Extension) + } +} + +impl QRead for LockEntry { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockentry").await?; + let (mut maybe_scope, mut maybe_type) = (None, None); + + loop { + let mut dirty = false; + xml.maybe_read::(&mut maybe_scope, &mut dirty).await?; + xml.maybe_read::(&mut maybe_type, &mut dirty).await?; + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.close().await?; + match (maybe_scope, maybe_type) { + (Some(lockscope), Some(locktype)) => Ok(LockEntry { lockscope, locktype }), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl QRead for LockScope { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "lockscope").await?; + + let lockscope = loop { + if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() { + xml.close().await?; + break LockScope::Exclusive + } else if xml.maybe_open(DAV_URN, "shared").await?.is_some() { + xml.close().await?; + break LockScope::Shared + } + + xml.skip().await?; + }; + + xml.close().await?; + Ok(lockscope) + } +} + +impl QRead for LockType { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "locktype").await?; + + let locktype = loop { + if xml.maybe_open(DAV_URN, "write").await?.is_some() { + xml.close().await?; + break LockType::Write + } + + xml.skip().await?; + }; + + xml.close().await?; + Ok(locktype) + } +} + +impl QRead for Href { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "href").await?; + let mut url = xml.tag_string().await?; + xml.close().await?; + Ok(Href(url)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::{FixedOffset, DateTime, TimeZone, Utc}; + use crate::realization::Core; + + #[tokio::test] + async fn basic_propfind_propname() { + let src = r#" + + + + + +"#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!(got, PropFind::::PropName); + } + + #[tokio::test] + async fn basic_propfind_prop() { + let src = r#" + + + + + + + + + + + + + +"#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!(got, PropFind::Prop(PropName(vec![ + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]))); + } + + #[tokio::test] + async fn rfc_lock_error() { + let src = r#" + + + /locked/ + + "#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!(got, Error(vec![ + Violation::LockTokenSubmitted(vec![ + Href("/locked/".into()) + ]) + ])); + } + + + #[tokio::test] + async fn rfc_propertyupdate() { + let src = r#" + + + + + Jim Whitehead + Roy Fielding + + + + + + + "#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!(got, PropertyUpdate(vec![ + PropertyUpdateItem::Set(Set(PropValue(vec![]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![]))), + ])); + } + + #[tokio::test] + async fn rfc_lockinfo() { + let src = r#" + + + + + + http://example.org/~ejw/contact.html + + +"#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::().await.unwrap(); + + assert_eq!(got, LockInfo { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + }); + } + + #[tokio::test] + async fn rfc_multistatus_name() { + let src = r#" + + + + http://www.example.com/container/ + + + + + + + + + + HTTP/1.1 200 OK + + + + http://www.example.com/container/front.html + + + + + + + + + + + + + HTTP/1.1 200 OK + + + +"#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>>().await.unwrap(); + + assert_eq!(got, Multistatus { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/front.html".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + }); + } + + + #[tokio::test] + async fn rfc_multistatus_value() { + let src = r#" + + + + /container/ + + + Box type A + Hadrian + 1997-12-01T17:42:21-08:00 + Example collection + + + + + + + + + + + + + HTTP/1.1 200 OK + + + + /container/front.html + + + Box type B + + 1997-12-01T18:27:21-08:00 + Example HTML resource + 4525 + text/html + "zzyzx" + Mon, 12 Jan 1998 09:25:56 GMT + + + + + + + + + + + + + HTTP/1.1 200 OK + + + "#; + + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let got = rdr.find::>>().await.unwrap(); + + assert_eq!(got, Multistatus { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap()), + Property::DisplayName("Example collection".into()), + Property::ResourceType(vec![ResourceType::Collection]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/front.html".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap()), + Property::DisplayName("Example HTML resource".into()), + Property::GetContentLength(4525), + Property::GetContentType("text/html".into()), + Property::GetEtag(r#""zzyzx""#.into()), + Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap()), + //Property::ResourceType(vec![]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + + }, + ], + responsedescription: None, + }); + } + +} diff --git a/aero-dav/src/encoder.rs b/aero-dav/src/encoder.rs new file mode 100644 index 0000000..fd2f9ca --- /dev/null +++ b/aero-dav/src/encoder.rs @@ -0,0 +1,1112 @@ +use quick_xml::Error as QError; +use quick_xml::events::{Event, BytesText}; +use super::types::*; +use super::xml::{Node, Writer,QWrite,IWrite}; + + +// --- XML ROOTS + +/// PROPFIND REQUEST +impl QWrite for PropFind { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("propfind"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::PropName => { + let empty_propname = xml.create_dav_element("propname"); + xml.q.write_event_async(Event::Empty(empty_propname)).await? + }, + Self::AllProp(maybe_include) => { + let empty_allprop = xml.create_dav_element("allprop"); + xml.q.write_event_async(Event::Empty(empty_allprop)).await?; + if let Some(include) = maybe_include { + include.qwrite(xml).await?; + } + }, + Self::Prop(propname) => propname.qwrite(xml).await?, + } + xml.q.write_event_async(Event::End(end)).await + } +} + +/// PROPPATCH REQUEST +impl QWrite for PropertyUpdate { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("propertyupdate"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for update in self.0.iter() { + update.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + + +/// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE +/// DELETE RESPONSE, +impl> QWrite for Multistatus { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("multistatus"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for response in self.responses.iter() { + response.qwrite(xml).await?; + } + if let Some(description) = &self.responsedescription { + description.qwrite(xml).await?; + } + + xml.q.write_event_async(Event::End(end)).await?; + Ok(()) + } +} + +/// LOCK REQUEST +impl QWrite for LockInfo { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockinfo"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.lockscope.qwrite(xml).await?; + self.locktype.qwrite(xml).await?; + if let Some(owner) = &self.owner { + owner.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +/// SOME LOCK RESPONSES +impl QWrite for PropValue { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("prop"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for propval in &self.0 { + propval.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +/// Error response +impl QWrite for Error { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("error"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for violation in &self.0 { + violation.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +// --- XML inner elements +impl QWrite for PropertyUpdateItem { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Set(set) => set.qwrite(xml).await, + Self::Remove(rm) => rm.qwrite(xml).await, + } + } +} + +impl QWrite for Set { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("set"); + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Remove { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("remove"); + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + + +impl QWrite for PropName { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("prop"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for propname in &self.0 { + propname.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + + +impl QWrite for Href { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("href"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl> QWrite for Response { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("response"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.status_or_propstat.qwrite(xml).await?; + if let Some(error) = &self.error { + error.qwrite(xml).await?; + } + if let Some(responsedescription) = &self.responsedescription { + responsedescription.qwrite(xml).await?; + } + if let Some(location) = &self.location { + location.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl> QWrite for StatusOrPropstat { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Status(many_href, status) => { + for href in many_href.iter() { + href.qwrite(xml).await?; + } + status.qwrite(xml).await + }, + Self::PropStat(href, propstat_list) => { + href.qwrite(xml).await?; + for propstat in propstat_list.iter() { + propstat.qwrite(xml).await?; + } + Ok(()) + } + } + } +} + +impl QWrite for Status { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("status"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + + let txt = format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason")); + xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?; + + xml.q.write_event_async(Event::End(end)).await?; + + Ok(()) + } +} + +impl QWrite for ResponseDescription { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("responsedescription"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Location { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("location"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl> QWrite for PropStat { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("propstat"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.prop.qwrite(xml).await?; + self.status.qwrite(xml).await?; + if let Some(error) = &self.error { + error.qwrite(xml).await?; + } + if let Some(description) = &self.responsedescription { + description.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await?; + + Ok(()) + } +} + +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + use Property::*; + match self { + CreationDate(date) => { + // 1997-12-01T17:42:21-08:00 + let start = xml.create_dav_element("creationdate"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + DisplayName(name) => { + // Example collection + let start = xml.create_dav_element("displayname"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(name))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + GetContentLanguage(lang) => { + let start = xml.create_dav_element("getcontentlanguage"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(lang))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + GetContentLength(len) => { + // 4525 + let start = xml.create_dav_element("getcontentlength"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + GetContentType(ct) => { + // text/html + let start = xml.create_dav_element("getcontenttype"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&ct))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + GetEtag(et) => { + // "zzyzx" + let start = xml.create_dav_element("getetag"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(et))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + GetLastModified(date) => { + // Mon, 12 Jan 1998 09:25:56 GMT + let start = xml.create_dav_element("getlastmodified"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; + xml.q.write_event_async(Event::End(end)).await?; + }, + LockDiscovery(many_locks) => { + // ... + let start = xml.create_dav_element("lockdiscovery"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for lock in many_locks.iter() { + lock.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await?; + }, + ResourceType(many_types) => { + // + + // + + // + // + // + // + + let start = xml.create_dav_element("resourcetype"); + if many_types.is_empty() { + xml.q.write_event_async(Event::Empty(start)).await?; + } else { + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + for restype in many_types.iter() { + restype.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await?; + } + }, + SupportedLock(many_entries) => { + // + + // ... + + let start = xml.create_dav_element("supportedlock"); + if many_entries.is_empty() { + xml.q.write_event_async(Event::Empty(start)).await?; + } else { + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + for entry in many_entries.iter() { + entry.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await?; + } + }, + Extension(inner) => inner.qwrite(xml).await?, + }; + Ok(()) + } +} + +impl QWrite for ResourceType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Collection => { + let empty_collection = xml.create_dav_element("collection"); + xml.q.write_event_async(Event::Empty(empty_collection)).await + }, + Self::Extension(inner) => inner.qwrite(xml).await, + } + } +} + +impl QWrite for Include { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("include"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for prop in self.0.iter() { + prop.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + use PropertyRequest::*; + let mut atom = async |c| { + let empty_tag = xml.create_dav_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; + + match self { + CreationDate => atom("creationdate").await, + DisplayName => atom("displayname").await, + GetContentLanguage => atom("getcontentlanguage").await, + GetContentLength => atom("getcontentlength").await, + GetContentType => atom("getcontenttype").await, + GetEtag => atom("getetag").await, + GetLastModified => atom("getlastmodified").await, + LockDiscovery => atom("lockdiscovery").await, + ResourceType => atom("resourcetype").await, + SupportedLock => atom("supportedlock").await, + Extension(inner) => inner.qwrite(xml).await, + } + } +} + +impl QWrite for ActiveLock { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + // + // + // + // infinity + // + // http://example.org/~ejw/contact.html + // + // Second-604800 + // + // urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4 + // + // + // http://example.com/workspace/webdav/proposal.doc + // + // + let start = xml.create_dav_element("activelock"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.locktype.qwrite(xml).await?; + self.lockscope.qwrite(xml).await?; + self.depth.qwrite(xml).await?; + if let Some(owner) = &self.owner { + owner.qwrite(xml).await?; + } + if let Some(timeout) = &self.timeout { + timeout.qwrite(xml).await?; + } + if let Some(locktoken) = &self.locktoken { + locktoken.qwrite(xml).await?; + } + self.lockroot.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for LockType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("locktype"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Write => { + let empty_write = xml.create_dav_element("write"); + xml.q.write_event_async(Event::Empty(empty_write)).await? + }, + }; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for LockScope { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockscope"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Exclusive => { + let empty_tag = xml.create_dav_element("exclusive"); + xml.q.write_event_async(Event::Empty(empty_tag)).await? + }, + Self::Shared => { + let empty_tag = xml.create_dav_element("shared"); + xml.q.write_event_async(Event::Empty(empty_tag)).await? + }, + }; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Owner { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("owner"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Txt(txt) => xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?, + Self::Href(href) => href.qwrite(xml).await?, + Self::Unknown => (), + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Depth { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("depth"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Zero => xml.q.write_event_async(Event::Text(BytesText::new("0"))).await?, + Self::One => xml.q.write_event_async(Event::Text(BytesText::new("1"))).await?, + Self::Infinity => xml.q.write_event_async(Event::Text(BytesText::new("infinity"))).await?, + }; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Timeout { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("timeout"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::Seconds(count) => { + let txt = format!("Second-{}", count); + xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await? + }, + Self::Infinite => xml.q.write_event_async(Event::Text(BytesText::new("Infinite"))).await? + }; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for LockToken { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("locktoken"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for LockRoot { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockroot"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for LockEntry { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("lockentry"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.lockscope.qwrite(xml).await?; + self.locktype.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for Violation { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_dav_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; + + match self { + Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await, + Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => atom("lock-token-submitted").await, + Violation::LockTokenSubmitted(hrefs) => { + let start = xml.create_dav_element("lock-token-submitted"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for href in hrefs { + href.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + }, + Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => atom("no-conflicting-lock").await, + Violation::NoConflictingLock(hrefs) => { + let start = xml.create_dav_element("no-conflicting-lock"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for href in hrefs { + href.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + }, + Violation::NoExternalEntities => atom("no-external-entities").await, + Violation::PreservedLiveProperties => atom("preserved-live-properties").await, + Violation::PropfindFiniteDepth => atom("propfind-finite-depth").await, + Violation::CannotModifyProtectedProperty => atom("cannot-modify-protected-property").await, + Violation::Extension(inner) => inner.qwrite(xml).await, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::realization::Core; + use tokio::io::AsyncWriteExt; + + /// To run only the unit tests and avoid the behavior ones: + /// cargo test --bin aerogramme + + async fn serialize(elem: &impl QWrite) -> String { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let mut writer = Writer { q, ns_to_apply }; + + elem.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + + return got.into() + } + + #[tokio::test] + async fn basic_href() { + + let got = serialize( + &Href("/SOGo/dav/so/".into()) + ).await; + let expected = r#"/SOGo/dav/so/"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn basic_multistatus() { + let got = serialize( + &Multistatus::> { + responses: vec![], + responsedescription: Some(ResponseDescription("Hello world".into())) + }, + ).await; + + let expected = r#" + Hello world +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + + #[tokio::test] + async fn rfc_error_delete_locked() { + let got = serialize( + &Error::(vec![ + Violation::LockTokenSubmitted(vec![ + Href("/locked/".into()) + ]) + ]), + ).await; + + let expected = r#" + + /locked/ + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_propname_req() { + let got = serialize( + &PropFind::::PropName, + ).await; + + let expected = r#" + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_propname_res() { + let got = serialize( + &Multistatus::> { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/front.html".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + } + ]), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + }, + ).await; + + let expected = r#" + + http://www.example.com/container/ + + + + + + + + HTTP/1.1 200 OK + + + + http://www.example.com/container/front.html + + + + + + + + + + + + HTTP/1.1 200 OK + + +"#; + + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_allprop_req() { + let got = serialize( + &PropFind::::AllProp(None), + ).await; + + let expected = r#" + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_allprop_res() { + use chrono::{DateTime,FixedOffset,TimeZone}; + let got = serialize( + &Multistatus::> { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) + .unwrap()), + Property::DisplayName("Example collection".into()), + Property::ResourceType(vec![ResourceType::Collection]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/front.html".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) + .unwrap()), + Property::DisplayName("Example HTML resource".into()), + Property::GetContentLength(4525), + Property::GetContentType("text/html".into()), + Property::GetEtag(r#""zzyzx""#.into()), + Property::GetLastModified(FixedOffset::east_opt(0) + .unwrap() + .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) + .unwrap()), + Property::ResourceType(vec![]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ]), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + } + ).await; + + let expected = r#" + + /container/ + + + 1997-12-01T17:42:21-08:00 + Example collection + + + + + + + + + + + + + + + + + + + + + + + HTTP/1.1 200 OK + + + + /container/front.html + + + 1997-12-01T18:27:21-08:00 + Example HTML resource + 4525 + text/html + "zzyzx" + Mon, 12 Jan 1998 09:25:56 +0000 + + + + + + + + + + + + + + + + + + + + + HTTP/1.1 200 OK + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_allprop_include() { + let got = serialize( + &PropFind::::AllProp(Some(Include(vec![ + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + ]))), + ).await; + + let expected = r#" + + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_propertyupdate() { + let got = serialize( + &PropertyUpdate::(vec![ + PropertyUpdateItem::Set(Set(PropValue(vec![ + Property::GetContentLanguage("fr-FR".into()), + ]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![ + PropertyRequest::DisplayName, + ]))), + ]), + ).await; + + let expected = r#" + + + fr-FR + + + + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_delete_locked2() { + let got = serialize( + &Multistatus::> { + responses: vec![Response { + status_or_propstat: StatusOrPropstat::Status( + vec![Href("http://www.example.com/container/resource3".into())], + Status(http::status::StatusCode::from_u16(423).unwrap()) + ), + error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), + responsedescription: None, + location: None, + }], + responsedescription: None, + }, + ).await; + + let expected = r#" + + http://www.example.com/container/resource3 + HTTP/1.1 423 Locked + + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_simple_lock_request() { + let got = serialize( + &LockInfo { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + }, + ).await; + + let expected = r#" + + + + + + + + http://example.org/~ejw/contact.html + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } + + #[tokio::test] + async fn rfc_simple_lock_response() { + let got = serialize( + &PropValue::(vec![ + Property::LockDiscovery(vec![ActiveLock { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + depth: Depth::Infinity, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + timeout: Some(Timeout::Seconds(604800)), + locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), + lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), + }]), + ]), + ).await; + + let expected = r#" + + + + + + + + + infinity + + http://example.org/~ejw/contact.html + + Second-604800 + + urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4 + + + http://example.com/workspace/webdav/proposal.doc + + + +"#; + + assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + } +} diff --git a/aero-dav/src/error.rs b/aero-dav/src/error.rs new file mode 100644 index 0000000..78c6d6b --- /dev/null +++ b/aero-dav/src/error.rs @@ -0,0 +1,42 @@ +use quick_xml::events::attributes::AttrError; + +#[derive(Debug)] +pub enum ParsingError { + Recoverable, + MissingChild, + NamespacePrefixAlreadyUsed, + WrongToken, + TagNotFound, + InvalidValue, + Utf8Error(std::str::Utf8Error), + QuickXml(quick_xml::Error), + Chrono(chrono::format::ParseError), + Int(std::num::ParseIntError), + Eof +} +impl From for ParsingError { + fn from(value: AttrError) -> Self { + Self::QuickXml(value.into()) + } +} +impl From for ParsingError { + fn from(value: quick_xml::Error) -> Self { + Self::QuickXml(value) + } +} +impl From for ParsingError { + fn from(value: std::str::Utf8Error) -> Self { + Self::Utf8Error(value) + } +} +impl From for ParsingError { + fn from(value: chrono::format::ParseError) -> Self { + Self::Chrono(value) + } +} + +impl From for ParsingError { + fn from(value: std::num::ParseIntError) -> Self { + Self::Int(value) + } +} diff --git a/aero-dav/src/lib.rs b/aero-dav/src/lib.rs new file mode 100644 index 0000000..6bfbf62 --- /dev/null +++ b/aero-dav/src/lib.rs @@ -0,0 +1,25 @@ +#![feature(type_alias_impl_trait)] +#![feature(async_fn_in_trait)] +#![feature(async_closure)] +#![feature(trait_alias)] + +// utils +pub mod error; +pub mod xml; + +// webdav +pub mod types; +pub mod encoder; +pub mod decoder; + +// calendar +pub mod caltypes; +pub mod calencoder; +pub mod caldecoder; + +// wip +mod acltypes; +mod versioningtypes; + +// final type +pub mod realization; diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs new file mode 100644 index 0000000..33a556e --- /dev/null +++ b/aero-dav/src/realization.rs @@ -0,0 +1,42 @@ +use super::types as dav; +use super::caltypes as cal; +use super::xml; +use super::error; + +#[derive(Debug, PartialEq)] +pub struct Disabled(()); +impl xml::QRead for Disabled { + async fn qread(xml: &mut xml::Reader) -> Result { + Err(error::ParsingError::Recoverable) + } +} +impl xml::QWrite for Disabled { + async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + unreachable!(); + } +} + +/// The base WebDAV +/// +/// Any extension is kooh is disabled through an object we can't build +/// due to a private inner element. +#[derive(Debug, PartialEq)] +pub struct Core {} +impl dav::Extension for Core { + type Error = Disabled; + type Property = Disabled; + type PropertyRequest = Disabled; + type ResourceType = Disabled; +} + +// WebDAV with the base Calendar implementation (RFC4791) +#[derive(Debug, PartialEq)] +pub struct Calendar {} +impl dav::Extension for Calendar +{ + type Error = cal::Violation; + type Property = cal::Property; + type PropertyRequest = cal::PropertyRequest; + type ResourceType = cal::ResourceType; +} + diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs new file mode 100644 index 0000000..2489c0a --- /dev/null +++ b/aero-dav/src/types.rs @@ -0,0 +1,949 @@ +#![allow(dead_code)] +use std::fmt::Debug; + +use chrono::{DateTime,FixedOffset}; +use super::xml; + +/// It's how we implement a DAV extension +/// (That's the dark magic part...) +pub trait Extension: std::fmt::Debug + PartialEq { + type Error: xml::Node; + type Property: xml::Node; + type PropertyRequest: xml::Node; + type ResourceType: xml::Node; +} + +/// 14.1. activelock XML Element +/// +/// Name: activelock +/// +/// Purpose: Describes a lock on a resource. +/// +#[derive(Debug, PartialEq)] +pub struct ActiveLock { + pub lockscope: LockScope, + pub locktype: LockType, + pub depth: Depth, + pub owner: Option, + pub timeout: Option, + pub locktoken: Option, + pub lockroot: LockRoot, +} + +/// 14.3 collection XML Element +/// +/// Name: collection +/// +/// Purpose: Identifies the associated resource as a collection. The +/// DAV:resourcetype property of a collection resource MUST contain +/// this element. It is normally empty but extensions may add sub- +/// elements. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Collection{} + +/// 14.4 depth XML Element +/// +/// Name: depth +/// +/// Purpose: Used for representing depth values in XML content (e.g., +/// in lock information). +/// +/// Value: "0" | "1" | "infinity" +/// +/// +#[derive(Debug, PartialEq)] +pub enum Depth { + Zero, + One, + Infinity +} + +/// 14.5 error XML Element +/// +/// Name: error +/// +/// Purpose: Error responses, particularly 403 Forbidden and 409 +/// Conflict, sometimes need more information to indicate what went +/// wrong. In these cases, servers MAY return an XML response body +/// with a document element of 'error', containing child elements +/// identifying particular condition codes. +/// +/// Description: Contains at least one XML element, and MUST NOT +/// contain text or mixed content. Any element that is a child of the +/// 'error' element is considered to be a precondition or +/// postcondition code. Unrecognized elements MUST be ignored. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Error(pub Vec>); +#[derive(Debug, PartialEq)] +pub enum Violation { + /// Name: lock-token-matches-request-uri + /// + /// Use with: 409 Conflict + /// + /// Purpose: (precondition) -- A request may include a Lock-Token header + /// to identify a lock for the UNLOCK method. However, if the + /// Request-URI does not fall within the scope of the lock identified + /// by the token, the server SHOULD use this error. The lock may have + /// a scope that does not include the Request-URI, or the lock could + /// have disappeared, or the token may be invalid. + LockTokenMatchesRequestUri, + + /// Name: lock-token-submitted (precondition) + /// + /// Use with: 423 Locked + /// + /// Purpose: The request could not succeed because a lock token should + /// have been submitted. This element, if present, MUST contain at + /// least one URL of a locked resource that prevented the request. In + /// cases of MOVE, COPY, and DELETE where collection locks are + /// involved, it can be difficult for the client to find out which + /// locked resource made the request fail -- but the server is only + /// responsible for returning one such locked resource. The server + /// MAY return every locked resource that prevented the request from + /// succeeding if it knows them all. + /// + /// + LockTokenSubmitted(Vec), + + /// Name: no-conflicting-lock (precondition) + /// + /// Use with: Typically 423 Locked + /// + /// Purpose: A LOCK request failed due the presence of an already + /// existing conflicting lock. Note that a lock can be in conflict + /// although the resource to which the request was directed is only + /// indirectly locked. In this case, the precondition code can be + /// used to inform the client about the resource that is the root of + /// the conflicting lock, avoiding a separate lookup of the + /// "lockdiscovery" property. + /// + /// + NoConflictingLock(Vec), + + /// Name: no-external-entities + /// + /// Use with: 403 Forbidden + /// + /// Purpose: (precondition) -- If the server rejects a client request + /// because the request body contains an external entity, the server + /// SHOULD use this error. + NoExternalEntities, + + /// Name: preserved-live-properties + /// + /// Use with: 409 Conflict + /// + /// Purpose: (postcondition) -- The server received an otherwise-valid + /// MOVE or COPY request, but cannot maintain the live properties with + /// the same behavior at the destination. It may be that the server + /// only supports some live properties in some parts of the + /// repository, or simply has an internal error. + PreservedLiveProperties, + + /// Name: propfind-finite-depth + /// + /// Use with: 403 Forbidden + /// + /// Purpose: (precondition) -- This server does not allow infinite-depth + /// PROPFIND requests on collections. + PropfindFiniteDepth, + + + /// Name: cannot-modify-protected-property + /// + /// Use with: 403 Forbidden + /// + /// Purpose: (precondition) -- The client attempted to set a protected + /// property in a PROPPATCH (such as DAV:getetag). See also + /// [RFC3253], Section 3.12. + CannotModifyProtectedProperty, + + /// Specific errors + Extension(E::Error), +} + +/// 14.6. exclusive XML Element +/// +/// Name: exclusive +/// +/// Purpose: Specifies an exclusive lock. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Exclusive {} + +/// 14.7. href XML Element +/// +/// Name: href +/// +/// Purpose: MUST contain a URI or a relative reference. +/// +/// Description: There may be limits on the value of 'href' depending +/// on the context of its use. Refer to the specification text where +/// 'href' is used to see what limitations apply in each case. +/// +/// Value: Simple-ref +/// +/// +#[derive(Debug, PartialEq)] +pub struct Href(pub String); + + +/// 14.8. include XML Element +/// +/// Name: include +/// +/// Purpose: Any child element represents the name of a property to be +/// included in the PROPFIND response. All elements inside an +/// 'include' XML element MUST define properties related to the +/// resource, although possible property names are in no way limited +/// to those property names defined in this document or other +/// standards. This element MUST NOT contain text or mixed content. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Include(pub Vec>); + +/// 14.9. location XML Element +/// +/// Name: location +/// +/// Purpose: HTTP defines the "Location" header (see [RFC2616], Section +/// 14.30) for use with some status codes (such as 201 and the 300 +/// series codes). When these codes are used inside a 'multistatus' +/// element, the 'location' element can be used to provide the +/// accompanying Location header value. +/// +/// Description: Contains a single href element with the same value +/// that would be used in a Location header. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Location(pub Href); + +/// 14.10. lockentry XML Element +/// +/// Name: lockentry +/// +/// Purpose: Defines the types of locks that can be used with the +/// resource. +/// +/// +#[derive(Debug, PartialEq)] +pub struct LockEntry { + pub lockscope: LockScope, + pub locktype: LockType, +} + +/// 14.11. lockinfo XML Element +/// +/// Name: lockinfo +/// +/// Purpose: The 'lockinfo' XML element is used with a LOCK method to +/// specify the type of lock the client wishes to have created. +/// +/// +#[derive(Debug, PartialEq)] +pub struct LockInfo { + pub lockscope: LockScope, + pub locktype: LockType, + pub owner: Option, +} + +/// 14.12. lockroot XML Element +/// +/// Name: lockroot +/// +/// Purpose: Contains the root URL of the lock, which is the URL +/// through which the resource was addressed in the LOCK request. +/// +/// Description: The href element contains the root of the lock. The +/// server SHOULD include this in all DAV:lockdiscovery property +/// values and the response to LOCK requests. +/// +/// +#[derive(Debug, PartialEq)] +pub struct LockRoot(pub Href); + +/// 14.13. lockscope XML Element +/// +/// Name: lockscope +/// +/// Purpose: Specifies whether a lock is an exclusive lock, or a shared +/// lock. +/// +#[derive(Debug, PartialEq)] +pub enum LockScope { + Exclusive, + Shared +} + +/// 14.14. locktoken XML Element +/// +/// Name: locktoken +/// +/// Purpose: The lock token associated with a lock. +/// +/// Description: The href contains a single lock token URI, which +/// refers to the lock. +/// +/// +#[derive(Debug, PartialEq)] +pub struct LockToken(pub Href); + +/// 14.15. locktype XML Element +/// +/// Name: locktype +/// +/// Purpose: Specifies the access type of a lock. At present, this +/// specification only defines one lock type, the write lock. +/// +/// +#[derive(Debug, PartialEq)] +pub enum LockType { + /// 14.30. write XML Element + /// + /// Name: write + /// + /// Purpose: Specifies a write lock. + /// + /// + /// + Write +} + +/// 14.16. multistatus XML Element +/// +/// Name: multistatus +/// +/// Purpose: Contains multiple response messages. +/// +/// Description: The 'responsedescription' element at the top level is +/// used to provide a general message describing the overarching +/// nature of the response. If this value is available, an +/// application may use it instead of presenting the individual +/// response descriptions contained within the responses. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Multistatus> { + pub responses: Vec>, + pub responsedescription: Option, +} + +/// 14.17. owner XML Element +/// +/// Name: owner +/// +/// Purpose: Holds client-supplied information about the creator of a +/// lock. +/// +/// Description: Allows a client to provide information sufficient for +/// either directly contacting a principal (such as a telephone number +/// or Email URI), or for discovering the principal (such as the URL +/// of a homepage) who created a lock. The value provided MUST be +/// treated as a dead property in terms of XML Information Item +/// preservation. The server MUST NOT alter the value unless the +/// owner value provided by the client is empty. For a certain amount +/// of interoperability between different client implementations, if +/// clients have URI-formatted contact information for the lock +/// creator suitable for user display, then clients SHOULD put those +/// URIs in 'href' child elements of the 'owner' element. +/// +/// Extensibility: MAY be extended with child elements, mixed content, +/// text content or attributes. +/// +/// +//@FIXME might need support for an extension +#[derive(Debug, PartialEq)] +pub enum Owner { + Txt(String), + Href(Href), + Unknown, +} + +/// 14.18. prop XML Element +/// +/// Name: prop +/// +/// Purpose: Contains properties related to a resource. +/// +/// Description: A generic container for properties defined on +/// resources. All elements inside a 'prop' XML element MUST define +/// properties related to the resource, although possible property +/// names are in no way limited to those property names defined in +/// this document or other standards. This element MUST NOT contain +/// text or mixed content. +/// +/// +#[derive(Debug, PartialEq)] +pub struct PropName(pub Vec>); + +#[derive(Debug, PartialEq)] +pub struct PropValue(pub Vec>); + +/// 14.19. propertyupdate XML Element +/// +/// Name: propertyupdate +/// +/// Purpose: Contains a request to alter the properties on a resource. +/// +/// Description: This XML element is a container for the information +/// required to modify the properties on the resource. +/// +/// +#[derive(Debug, PartialEq)] +pub struct PropertyUpdate(pub Vec>); + +#[derive(Debug, PartialEq)] +pub enum PropertyUpdateItem { + Remove(Remove), + Set(Set), +} + +/// 14.2 allprop XML Element +/// +/// Name: allprop +/// +/// Purpose: Specifies that all names and values of dead properties and +/// the live properties defined by this document existing on the +/// resource are to be returned. +/// +/// +/// +/// --- +/// +/// 14.21. propname XML Element +/// +/// Name: propname +/// +/// Purpose: Specifies that only a list of property names on the +/// resource is to be returned. +/// +/// +/// +/// --- +/// +/// 14.20. propfind XML Element +/// +/// Name: propfind +/// +/// Purpose: Specifies the properties to be returned from a PROPFIND +/// method. Four special elements are specified for use with +/// 'propfind': 'prop', 'allprop', 'include', and 'propname'. If +/// 'prop' is used inside 'propfind', it MUST NOT contain property +/// values. +/// +/// +#[derive(Debug, PartialEq)] +pub enum PropFind { + PropName, + AllProp(Option>), + Prop(PropName), +} + +/// 14.22 propstat XML Element +/// +/// Name: propstat +/// +/// Purpose: Groups together a prop and status element that is +/// associated with a particular 'href' element. +/// +/// Description: The propstat XML element MUST contain one prop XML +/// element and one status XML element. The contents of the prop XML +/// element MUST only list the names of properties to which the result +/// in the status element applies. The optional precondition/ +/// postcondition element and 'responsedescription' text also apply to +/// the properties named in 'prop'. +/// +/// +#[derive(Debug, PartialEq)] +pub struct PropStat> { + pub prop: N, + pub status: Status, + pub error: Option>, + pub responsedescription: Option, +} + +/// 14.23. remove XML Element +/// +/// Name: remove +/// +/// Purpose: Lists the properties to be removed from a resource. +/// +/// Description: Remove instructs that the properties specified in prop +/// should be removed. Specifying the removal of a property that does +/// not exist is not an error. All the XML elements in a 'prop' XML +/// element inside of a 'remove' XML element MUST be empty, as only +/// the names of properties to be removed are required. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Remove(pub PropName); + +/// 14.24. response XML Element +/// +/// Name: response +/// +/// Purpose: Holds a single response describing the effect of a method +/// on resource and/or its properties. +/// +/// Description: The 'href' element contains an HTTP URL pointing to a +/// WebDAV resource when used in the 'response' container. A +/// particular 'href' value MUST NOT appear more than once as the +/// child of a 'response' XML element under a 'multistatus' XML +/// element. This requirement is necessary in order to keep +/// processing costs for a response to linear time. Essentially, this +/// prevents having to search in order to group together all the +/// responses by 'href'. There are, however, no requirements +/// regarding ordering based on 'href' values. The optional +/// precondition/postcondition element and 'responsedescription' text +/// can provide additional information about this resource relative to +/// the request or result. +/// +/// +/// +/// --- rewritten as --- +/// +#[derive(Debug, PartialEq)] +pub enum StatusOrPropstat> { + // One status, multiple hrefs... + Status(Vec, Status), + // A single href, multiple properties... + PropStat(Href, Vec>), +} + +#[derive(Debug, PartialEq)] +pub struct Response> { + pub status_or_propstat: StatusOrPropstat, + pub error: Option>, + pub responsedescription: Option, + pub location: Option, +} + +/// 14.25. responsedescription XML Element +/// +/// Name: responsedescription +/// +/// Purpose: Contains information about a status response within a +/// Multi-Status. +/// +/// Description: Provides information suitable to be presented to a +/// user. +/// +/// +#[derive(Debug, PartialEq)] +pub struct ResponseDescription(pub String); + +/// 14.26. set XML Element +/// +/// Name: set +/// +/// Purpose: Lists the property values to be set for a resource. +/// +/// Description: The 'set' element MUST contain only a 'prop' element. +/// The elements contained by the 'prop' element inside the 'set' +/// element MUST specify the name and value of properties that are set +/// on the resource identified by Request-URI. If a property already +/// exists, then its value is replaced. Language tagging information +/// appearing in the scope of the 'prop' element (in the "xml:lang" +/// attribute, if present) MUST be persistently stored along with the +/// property, and MUST be subsequently retrievable using PROPFIND. +/// +/// +#[derive(Debug, PartialEq)] +pub struct Set(pub PropValue); + +/// 14.27. shared XML Element +/// +/// Name: shared +/// +/// Purpose: Specifies a shared lock. +/// +/// +/// +#[derive(Debug, PartialEq)] +pub struct Shared {} + + +/// 14.28. status XML Element +/// +/// Name: status +/// +/// Purpose: Holds a single HTTP status-line. +/// +/// Value: status-line (defined in Section 6.1 of [RFC2616]) +/// +/// +//@FIXME: Better typing is possible with an enum for example +#[derive(Debug, PartialEq)] +pub struct Status(pub http::status::StatusCode); + +/// 14.29. timeout XML Element +/// +/// Name: timeout +/// +/// Purpose: The number of seconds remaining before a lock expires. +/// +/// Value: TimeType (defined in Section 10.7) +/// +/// +/// +/// +/// TimeOut = "Timeout" ":" 1#TimeType +/// TimeType = ("Second-" DAVTimeOutVal | "Infinite") +/// ; No LWS allowed within TimeType +/// DAVTimeOutVal = 1*DIGIT +/// +/// Clients MAY include Timeout request headers in their LOCK requests. +/// However, the server is not required to honor or even consider these +/// requests. Clients MUST NOT submit a Timeout request header with any +/// method other than a LOCK method. +/// +/// The "Second" TimeType specifies the number of seconds that will +/// elapse between granting of the lock at the server, and the automatic +/// removal of the lock. The timeout value for TimeType "Second" MUST +/// NOT be greater than 2^32-1. +#[derive(Debug, PartialEq)] +pub enum Timeout { + Seconds(u32), + Infinite, +} + + +/// 15. DAV Properties +/// +/// For DAV properties, the name of the property is also the same as the +/// name of the XML element that contains its value. In the section +/// below, the final line of each section gives the element type +/// declaration using the format defined in [REC-XML]. The "Value" +/// field, where present, specifies further restrictions on the allowable +/// contents of the XML element using BNF (i.e., to further restrict the +/// values of a PCDATA element). +/// +/// A protected property is one that cannot be changed with a PROPPATCH +/// request. There may be other requests that would result in a change +/// to a protected property (as when a LOCK request affects the value of +/// DAV:lockdiscovery). Note that a given property could be protected on +/// one type of resource, but not protected on another type of resource. +/// +/// A computed property is one with a value defined in terms of a +/// computation (based on the content and other properties of that +/// resource, or even of some other resource). A computed property is +/// always a protected property. +/// +/// COPY and MOVE behavior refers to local COPY and MOVE operations. +/// +/// For properties defined based on HTTP GET response headers (DAV:get*), +/// the header value could include LWS as defined in [RFC2616], Section +/// 4.2. Server implementors SHOULD strip LWS from these values before +/// using as WebDAV property values. +#[derive(Debug, PartialEq)] +pub enum PropertyRequest { + CreationDate, + DisplayName, + GetContentLanguage, + GetContentLength, + GetContentType, + GetEtag, + GetLastModified, + LockDiscovery, + ResourceType, + SupportedLock, + Extension(E::PropertyRequest), +} + +#[derive(Debug, PartialEq)] +pub enum Property { + /// 15.1. creationdate Property + /// + /// Name: creationdate + /// + /// Purpose: Records the time and date the resource was created. + /// + /// Value: date-time (defined in [RFC3339], see the ABNF in Section + /// 5.6.) + /// + /// Protected: MAY be protected. Some servers allow DAV:creationdate + /// to be changed to reflect the time the document was created if that + /// is more meaningful to the user (rather than the time it was + /// uploaded). Thus, clients SHOULD NOT use this property in + /// synchronization logic (use DAV:getetag instead). + /// + /// COPY/MOVE behavior: This property value SHOULD be kept during a + /// MOVE operation, but is normally re-initialized when a resource is + /// created with a COPY. It should not be set in a COPY. + /// + /// Description: The DAV:creationdate property SHOULD be defined on all + /// DAV compliant resources. If present, it contains a timestamp of + /// the moment when the resource was created. Servers that are + /// incapable of persistently recording the creation date SHOULD + /// instead leave it undefined (i.e. report "Not Found"). + /// + /// + CreationDate(DateTime), + + /// 15.2. displayname Property + /// + /// Name: displayname + /// + /// Purpose: Provides a name for the resource that is suitable for + /// presentation to a user. + /// + /// Value: Any text. + /// + /// Protected: SHOULD NOT be protected. Note that servers implementing + /// [RFC2518] might have made this a protected property as this is a + /// new requirement. + /// + /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY + /// and MOVE operations. + /// + /// Description: Contains a description of the resource that is + /// suitable for presentation to a user. This property is defined on + /// the resource, and hence SHOULD have the same value independent of + /// the Request-URI used to retrieve it (thus, computing this property + /// based on the Request-URI is deprecated). While generic clients + /// might display the property value to end users, client UI designers + /// must understand that the method for identifying resources is still + /// the URL. Changes to DAV:displayname do not issue moves or copies + /// to the server, but simply change a piece of meta-data on the + /// individual resource. Two resources can have the same DAV: + /// displayname value even within the same collection. + /// + /// + DisplayName(String), + + + /// 15.3. getcontentlanguage Property + /// + /// Name: getcontentlanguage + /// + /// Purpose: Contains the Content-Language header value (from Section + /// 14.12 of [RFC2616]) as it would be returned by a GET without + /// accept headers. + /// + /// Value: language-tag (language-tag is defined in Section 3.10 of + /// [RFC2616]) + /// + /// Protected: SHOULD NOT be protected, so that clients can reset the + /// language. Note that servers implementing [RFC2518] might have + /// made this a protected property as this is a new requirement. + /// + /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY + /// and MOVE operations. + /// + /// Description: The DAV:getcontentlanguage property MUST be defined on + /// any DAV-compliant resource that returns the Content-Language + /// header on a GET. + /// + /// + GetContentLanguage(String), + + /// 15.4. getcontentlength Property + /// + /// Name: getcontentlength + /// + /// Purpose: Contains the Content-Length header returned by a GET + /// without accept headers. + /// + /// Value: See Section 14.13 of [RFC2616]. + /// + /// Protected: This property is computed, therefore protected. + /// + /// Description: The DAV:getcontentlength property MUST be defined on + /// any DAV-compliant resource that returns the Content-Length header + /// in response to a GET. + /// + /// COPY/MOVE behavior: This property value is dependent on the size of + /// the destination resource, not the value of the property on the + /// source resource. + /// + /// + GetContentLength(u64), + + /// 15.5. getcontenttype Property + /// + /// Name: getcontenttype + /// + /// Purpose: Contains the Content-Type header value (from Section 14.17 + /// of [RFC2616]) as it would be returned by a GET without accept + /// headers. + /// + /// Value: media-type (defined in Section 3.7 of [RFC2616]) + /// + /// Protected: Potentially protected if the server prefers to assign + /// content types on its own (see also discussion in Section 9.7.1). + /// + /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY + /// and MOVE operations. + /// + /// Description: This property MUST be defined on any DAV-compliant + /// resource that returns the Content-Type header in response to a + /// GET. + /// + /// + GetContentType(String), + + /// 15.6. getetag Property + /// + /// Name: getetag + /// + /// Purpose: Contains the ETag header value (from Section 14.19 of + /// [RFC2616]) as it would be returned by a GET without accept + /// headers. + /// + /// Value: entity-tag (defined in Section 3.11 of [RFC2616]) + /// + /// Protected: MUST be protected because this value is created and + /// controlled by the server. + /// + /// COPY/MOVE behavior: This property value is dependent on the final + /// state of the destination resource, not the value of the property + /// on the source resource. Also note the considerations in + /// Section 8.8. + /// + /// Description: The getetag property MUST be defined on any DAV- + /// compliant resource that returns the Etag header. Refer to Section + /// 3.11 of RFC 2616 for a complete definition of the semantics of an + /// ETag, and to Section 8.6 for a discussion of ETags in WebDAV. + /// + /// + GetEtag(String), + + /// 15.7. getlastmodified Property + /// + /// Name: getlastmodified + /// + /// Purpose: Contains the Last-Modified header value (from Section + /// 14.29 of [RFC2616]) as it would be returned by a GET method + /// without accept headers. + /// + /// Value: rfc1123-date (defined in Section 3.3.1 of [RFC2616]) + /// + /// Protected: SHOULD be protected because some clients may rely on the + /// value for appropriate caching behavior, or on the value of the + /// Last-Modified header to which this property is linked. + /// + /// COPY/MOVE behavior: This property value is dependent on the last + /// modified date of the destination resource, not the value of the + /// property on the source resource. Note that some server + /// implementations use the file system date modified value for the + /// DAV:getlastmodified value, and this can be preserved in a MOVE + /// even when the HTTP Last-Modified value SHOULD change. Note that + /// since [RFC2616] requires clients to use ETags where provided, a + /// server implementing ETags can count on clients using a much better + /// mechanism than modification dates for offline synchronization or + /// cache control. Also note the considerations in Section 8.8. + /// + /// Description: The last-modified date on a resource SHOULD only + /// reflect changes in the body (the GET responses) of the resource. + /// A change in a property only SHOULD NOT cause the last-modified + /// date to change, because clients MAY rely on the last-modified date + /// to know when to overwrite the existing body. The DAV: + /// getlastmodified property MUST be defined on any DAV-compliant + /// resource that returns the Last-Modified header in response to a + /// GET. + /// + /// + GetLastModified(DateTime), + + /// 15.8. lockdiscovery Property + /// + /// Name: lockdiscovery + /// + /// Purpose: Describes the active locks on a resource + /// + /// Protected: MUST be protected. Clients change the list of locks + /// through LOCK and UNLOCK, not through PROPPATCH. + /// + /// COPY/MOVE behavior: The value of this property depends on the lock + /// state of the destination, not on the locks of the source resource. + /// Recall that locks are not moved in a MOVE operation. + /// + /// Description: Returns a listing of who has a lock, what type of lock + /// he has, the timeout type and the time remaining on the timeout, + /// and the associated lock token. Owner information MAY be omitted + /// if it is considered sensitive. If there are no locks, but the + /// server supports locks, the property will be present but contain + /// zero 'activelock' elements. If there are one or more locks, an + /// 'activelock' element appears for each lock on the resource. This + /// property is NOT lockable with respect to write locks (Section 7). + /// + /// + LockDiscovery(Vec), + + + /// 15.9. resourcetype Property + /// + /// Name: resourcetype + /// + /// Purpose: Specifies the nature of the resource. + /// + /// Protected: SHOULD be protected. Resource type is generally decided + /// through the operation creating the resource (MKCOL vs PUT), not by + /// PROPPATCH. + /// + /// COPY/MOVE behavior: Generally a COPY/MOVE of a resource results in + /// the same type of resource at the destination. + /// + /// Description: MUST be defined on all DAV-compliant resources. Each + /// child element identifies a specific type the resource belongs to, + /// such as 'collection', which is the only resource type defined by + /// this specification (see Section 14.3). If the element contains + /// the 'collection' child element plus additional unrecognized + /// elements, it should generally be treated as a collection. If the + /// element contains no recognized child elements, it should be + /// treated as a non-collection resource. The default value is empty. + /// This element MUST NOT contain text or mixed content. Any custom + /// child element is considered to be an identifier for a resource + /// type. + /// + /// Example: (fictional example to show extensibility) + /// + /// + /// + /// + /// + ResourceType(Vec>), + + /// 15.10. supportedlock Property + /// + /// Name: supportedlock + /// + /// Purpose: To provide a listing of the lock capabilities supported by + /// the resource. + /// + /// Protected: MUST be protected. Servers, not clients, determine what + /// lock mechanisms are supported. + /// COPY/MOVE behavior: This property value is dependent on the kind of + /// locks supported at the destination, not on the value of the + /// property at the source resource. Servers attempting to COPY to a + /// destination should not attempt to set this property at the + /// destination. + /// + /// Description: Returns a listing of the combinations of scope and + /// access types that may be specified in a lock request on the + /// resource. Note that the actual contents are themselves controlled + /// by access controls, so a server is not required to provide + /// information the client is not authorized to see. This property is + /// NOT lockable with respect to write locks (Section 7). + /// + /// + SupportedLock(Vec), + + /// Any extension + Extension(E::Property), +} + +#[derive(Debug, PartialEq)] +pub enum ResourceType { + Collection, + Extension(E::ResourceType), +} diff --git a/aero-dav/src/versioningtypes.rs b/aero-dav/src/versioningtypes.rs new file mode 100644 index 0000000..6c1c204 --- /dev/null +++ b/aero-dav/src/versioningtypes.rs @@ -0,0 +1,3 @@ +//@FIXME required for a full DAV implementation +// See section 7.1 of the CalDAV RFC +// It seems it's mainly due to the fact that the REPORT method is re-used. diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs new file mode 100644 index 0000000..98037ac --- /dev/null +++ b/aero-dav/src/xml.rs @@ -0,0 +1,274 @@ +use futures::Future; +use quick_xml::events::{Event, BytesStart}; +use quick_xml::name::ResolveResult; +use quick_xml::reader::NsReader; +use tokio::io::{AsyncWrite, AsyncBufRead}; + +use super::error::ParsingError; + +// Constants +pub const DAV_URN: &[u8] = b"DAV:"; +pub const CAL_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; +pub const CARD_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; + +// Async traits +pub trait IWrite = AsyncWrite + Unpin; +pub trait IRead = AsyncBufRead + Unpin; + +// Serialization/Deserialization traits +pub trait QWrite { + fn qwrite(&self, xml: &mut Writer) -> impl Future>; +} +pub trait QRead { + fn qread(xml: &mut Reader) -> impl Future>; +} + +// The representation of an XML node in Rust +pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq; + +// --------------- + +/// Transform a Rust object into an XML stream of characters +pub struct Writer { + pub q: quick_xml::writer::Writer, + pub ns_to_apply: Vec<(String, String)>, +} +impl Writer { + pub fn create_dav_element(&mut self, name: &str) -> BytesStart<'static> { + self.create_ns_element("D", name) + } + pub fn create_cal_element(&mut self, name: &str) -> BytesStart<'static> { + self.create_ns_element("C", name) + } + + fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> { + let mut start = BytesStart::new(format!("{}:{}", ns, name)); + if !self.ns_to_apply.is_empty() { + start.extend_attributes(self.ns_to_apply.iter().map(|(k, n)| (k.as_str(), n.as_str()))); + self.ns_to_apply.clear() + } + start + } +} + +/// Transform an XML stream of characters into a Rust object +pub struct Reader { + pub rdr: NsReader, + cur: Event<'static>, + parents: Vec>, + buf: Vec, +} +impl Reader { + pub async fn new(mut rdr: NsReader) -> Result { + let mut buf: Vec = vec![]; + let cur = rdr.read_event_into_async(&mut buf).await?.into_owned(); + let parents = vec![]; + buf.clear(); + Ok(Self { cur, parents, rdr, buf }) + } + + /// read one more tag + /// do not expose it publicly + async fn next(&mut self) -> Result, ParsingError> { + let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); + self.buf.clear(); + let old_evt = std::mem::replace(&mut self.cur, evt); + Ok(old_evt) + } + + /// skip a node at current level + /// I would like to make this one private but not ready + pub async fn skip(&mut self) -> Result, ParsingError> { + //println!("skipping inside node {:?}", self.parents.last()); + match &self.cur { + Event::Start(b) => { + let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; + self.next().await + }, + Event::End(_) => Err(ParsingError::WrongToken), + Event::Eof => Err(ParsingError::Eof), + _ => self.next().await, + } + } + + /// check if this is the desired tag + fn is_tag(&self, ns: &[u8], key: &str) -> bool { + let qname = match self.peek() { + Event::Start(bs) | Event::Empty(bs) => bs.name(), + Event::End(be) => be.name(), + _ => return false, + }; + + let (extr_ns, local) = self.rdr.resolve_element(qname); + + if local.into_inner() != key.as_bytes() { + return false + } + + match extr_ns { + ResolveResult::Bound(v) => v.into_inner() == ns, + _ => false, + } + } + + fn parent_has_child(&self) -> bool { + matches!(self.parents.last(), Some(Event::Start(_)) | None) + } + + fn ensure_parent_has_child(&self) -> Result<(), ParsingError> { + match self.parent_has_child() { + true => Ok(()), + false => Err(ParsingError::Recoverable), + } + } + + pub fn peek(&self) -> &Event<'static> { + &self.cur + } + + // NEW API + pub async fn tag_string(&mut self) -> Result { + self.ensure_parent_has_child()?; + + let mut acc = String::new(); + loop { + match self.peek() { + Event::CData(unescaped) => { + acc.push_str(std::str::from_utf8(unescaped.as_ref())?); + self.next().await? + }, + Event::Text(escaped) => { + acc.push_str(escaped.unescape()?.as_ref()); + self.next().await? + } + Event::End(_) | Event::Start(_) | Event::Empty(_) => return Ok(acc), + _ => self.next().await?, + }; + } + } + + pub async fn maybe_read>(&mut self, t: &mut Option, dirty: &mut bool) -> Result<(), ParsingError> { + if !self.parent_has_child() { + return Ok(()) + } + + match N::qread(self).await { + Ok(v) => { + *t = Some(v); + *dirty = true; + Ok(()) + }, + Err(ParsingError::Recoverable) => Ok(()), + Err(e) => Err(e), + } + } + + pub async fn maybe_push>(&mut self, t: &mut Vec, dirty: &mut bool) -> Result<(), ParsingError> { + if !self.parent_has_child() { + return Ok(()) + } + + match N::qread(self).await { + Ok(v) => { + t.push(v); + *dirty = true; + Ok(()) + }, + Err(ParsingError::Recoverable) => Ok(()), + Err(e) => Err(e), + } + } + + pub async fn find>(&mut self) -> Result { + self.ensure_parent_has_child()?; + + loop { + // Try parse + match N::qread(self).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise, + } + + // If recovered, skip the element + self.skip().await?; + } + } + + pub async fn maybe_find>(&mut self) -> Result, ParsingError> { + self.ensure_parent_has_child()?; + + loop { + // Try parse + match N::qread(self).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Some), + } + + match self.peek() { + Event::End(_) => return Ok(None), + _ => self.skip().await?, + }; + } + } + + pub async fn collect>(&mut self) -> Result, ParsingError> { + self.ensure_parent_has_child()?; + let mut acc = Vec::new(); + + loop { + match N::qread(self).await { + Err(ParsingError::Recoverable) => match self.peek() { + Event::End(_) => return Ok(acc), + _ => { + self.skip().await?; + }, + }, + Ok(v) => acc.push(v), + Err(e) => return Err(e), + } + } + } + + pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + let evt = match self.peek() { + Event::Empty(_) if self.is_tag(ns, key) => self.cur.clone(), + Event::Start(_) if self.is_tag(ns, key) => self.next().await?, + _ => return Err(ParsingError::Recoverable), + }; + + //println!("open tag {:?}", evt); + self.parents.push(evt.clone()); + Ok(evt) + } + + pub async fn maybe_open(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { + match self.open(ns, key).await { + Ok(v) => Ok(Some(v)), + Err(ParsingError::Recoverable) => Ok(None), + Err(e) => Err(e), + } + } + + // find stop tag + pub async fn close(&mut self) -> Result, ParsingError> { + //println!("close tag {:?}", self.parents.last()); + + // Handle the empty case + if !self.parent_has_child() { + self.parents.pop(); + return self.next().await + } + + // Handle the start/end case + loop { + match self.peek() { + Event::End(_) => { + self.parents.pop(); + return self.next().await + }, + _ => self.skip().await?, + }; + } + } +} + diff --git a/aero-proto/dav.rs b/aero-proto/dav.rs new file mode 100644 index 0000000..fa2023a --- /dev/null +++ b/aero-proto/dav.rs @@ -0,0 +1,145 @@ +use std::net::SocketAddr; + +use anyhow::{anyhow, Result}; +use base64::Engine; +use hyper::service::service_fn; +use hyper::{Request, Response, body::Bytes}; +use hyper::server::conn::http1 as http; +use hyper_util::rt::TokioIo; +use http_body_util::Full; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::net::TcpListener; +use tokio::sync::watch; + +use crate::config::DavUnsecureConfig; +use crate::login::ArcLoginProvider; +use crate::user::User; + +pub struct Server { + bind_addr: SocketAddr, + login_provider: ArcLoginProvider, +} + +pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { + Server { + bind_addr: config.bind_addr, + login_provider: login, + } +} + +impl Server { + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("DAV server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("Accepted connection from {}", remote_addr); + let stream = TokioIo::new(socket); + let login = self.login_provider.clone(); + let conn = tokio::spawn(async move { + //@FIXME should create a generic "public web" server on which "routers" could be + //abitrarily bound + //@FIXME replace with a handler supporting http2 and TLS + match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { + let login = login.clone(); + async move { + auth(login, req).await + } + })).await { + Err(e) => tracing::warn!(err=?e, "connection failed"), + Ok(()) => tracing::trace!("connection terminated with success"), + } + }); + connections.push(conn); + } + drop(tcp); + + tracing::info!("Server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +//@FIXME We should not support only BasicAuth +async fn auth( + login: ArcLoginProvider, + req: Request, +) -> Result>> { + + let auth_val = match req.headers().get("Authorization") { + Some(hv) => hv.to_str()?, + None => return Ok(Response::builder() + .status(401) + .body(Full::new(Bytes::from("Missing Authorization field")))?), + }; + + let b64_creds_maybe_padded = match auth_val.split_once(" ") { + Some(("Basic", b64)) => b64, + _ => return Ok(Response::builder() + .status(400) + .body(Full::new(Bytes::from("Unsupported Authorization field")))?), + }; + + // base64urlencoded may have trailing equals, base64urlsafe has not + // theoretically authorization is padded but "be liberal in what you accept" + let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); + + // Decode base64 + let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; + let str_creds = std::str::from_utf8(&creds)?; + + // Split username and password + let (username, password) = str_creds + .split_once(':') + .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; + + // Call login provider + let creds = match login.login(username, password).await { + Ok(c) => c, + Err(e) => return Ok(Response::builder() + .status(401) + .body(Full::new(Bytes::from("Wrong credentials")))?), + }; + + // Build a user + let user = User::new(username.into(), creds).await?; + + // Call router with user + router(user, req).await +} + +async fn router(user: std::sync::Arc, req: Request) -> Result>> { + let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); + match path_segments.as_slice() { + [] => tracing::info!("root"), + [ username, ..] if *username != user.username => return Ok(Response::builder() + .status(403) + .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), + [ _ ] => tracing::info!("user home"), + [ _, "calendar" ] => tracing::info!("user calendars"), + [ _, "calendar", colname ] => tracing::info!(name=colname, "selected calendar"), + [ _, "calendar", colname, member ] => tracing::info!(name=colname, obj=member, "selected event"), + _ => return Ok(Response::builder() + .status(404) + .body(Full::new(Bytes::from("Resource not found")))?), + } + Ok(Response::new(Full::new(Bytes::from("Hello World!")))) +} + +async fn collections(user: std::sync::Arc, req: Request) -> Result>> { + unimplemented!(); +} diff --git a/aero-proto/imap/attributes.rs b/aero-proto/imap/attributes.rs new file mode 100644 index 0000000..89446a8 --- /dev/null +++ b/aero-proto/imap/attributes.rs @@ -0,0 +1,77 @@ +use imap_codec::imap_types::command::FetchModifier; +use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName, Section}; + +/// Internal decisions based on fetched attributes +/// passed by the client + +pub struct AttributesProxy { + pub attrs: Vec>, +} +impl AttributesProxy { + pub fn new( + attrs: &MacroOrMessageDataItemNames<'static>, + modifiers: &[FetchModifier], + is_uid_fetch: bool, + ) -> Self { + // Expand macros + let mut fetch_attrs = match attrs { + MacroOrMessageDataItemNames::Macro(m) => { + use imap_codec::imap_types::fetch::Macro; + use MessageDataItemName::*; + match m { + Macro::All => vec![Flags, InternalDate, Rfc822Size, Envelope], + Macro::Fast => vec![Flags, InternalDate, Rfc822Size], + Macro::Full => vec![Flags, InternalDate, Rfc822Size, Envelope, Body], + _ => { + tracing::error!("unimplemented macro"); + vec![] + } + } + } + MacroOrMessageDataItemNames::MessageDataItemNames(a) => a.clone(), + }; + + // Handle uids + if is_uid_fetch && !fetch_attrs.contains(&MessageDataItemName::Uid) { + fetch_attrs.push(MessageDataItemName::Uid); + } + + // Handle inferred MODSEQ tag + let is_changed_since = modifiers + .iter() + .any(|m| matches!(m, FetchModifier::ChangedSince(..))); + if is_changed_since && !fetch_attrs.contains(&MessageDataItemName::ModSeq) { + fetch_attrs.push(MessageDataItemName::ModSeq); + } + + Self { attrs: fetch_attrs } + } + + pub fn is_enabling_condstore(&self) -> bool { + self.attrs + .iter() + .any(|x| matches!(x, MessageDataItemName::ModSeq)) + } + + pub fn need_body(&self) -> bool { + self.attrs.iter().any(|x| match x { + MessageDataItemName::Body + | MessageDataItemName::Rfc822 + | MessageDataItemName::Rfc822Text + | MessageDataItemName::BodyStructure => true, + + MessageDataItemName::BodyExt { + section: Some(section), + partial: _, + peek: _, + } => match section { + Section::Header(None) + | Section::HeaderFields(None, _) + | Section::HeaderFieldsNot(None, _) => false, + _ => true, + }, + MessageDataItemName::BodyExt { .. } => true, + _ => false, + }) + } +} diff --git a/aero-proto/imap/capability.rs b/aero-proto/imap/capability.rs new file mode 100644 index 0000000..c76b51c --- /dev/null +++ b/aero-proto/imap/capability.rs @@ -0,0 +1,159 @@ +use imap_codec::imap_types::command::{FetchModifier, SelectExamineModifier, StoreModifier}; +use imap_codec::imap_types::core::Vec1; +use imap_codec::imap_types::extensions::enable::{CapabilityEnable, Utf8Kind}; +use imap_codec::imap_types::response::Capability; +use std::collections::HashSet; + +use crate::imap::attributes::AttributesProxy; + +fn capability_unselect() -> Capability<'static> { + Capability::try_from("UNSELECT").unwrap() +} + +fn capability_condstore() -> Capability<'static> { + Capability::try_from("CONDSTORE").unwrap() +} + +fn capability_uidplus() -> Capability<'static> { + Capability::try_from("UIDPLUS").unwrap() +} + +fn capability_liststatus() -> Capability<'static> { + Capability::try_from("LIST-STATUS").unwrap() +} + +/* +fn capability_qresync() -> Capability<'static> { + Capability::try_from("QRESYNC").unwrap() +} +*/ + +#[derive(Debug, Clone)] +pub struct ServerCapability(HashSet>); + +impl Default for ServerCapability { + fn default() -> Self { + Self(HashSet::from([ + Capability::Imap4Rev1, + Capability::Enable, + Capability::Move, + Capability::LiteralPlus, + Capability::Idle, + capability_unselect(), + capability_condstore(), + capability_uidplus(), + capability_liststatus(), + //capability_qresync(), + ])) + } +} + +impl ServerCapability { + pub fn to_vec(&self) -> Vec1> { + self.0 + .iter() + .map(|v| v.clone()) + .collect::>() + .try_into() + .unwrap() + } + + #[allow(dead_code)] + pub fn support(&self, cap: &Capability<'static>) -> bool { + self.0.contains(cap) + } +} + +#[derive(Clone)] +pub enum ClientStatus { + NotSupportedByServer, + Disabled, + Enabled, +} +impl ClientStatus { + pub fn is_enabled(&self) -> bool { + matches!(self, Self::Enabled) + } + + pub fn enable(&self) -> Self { + match self { + Self::Disabled => Self::Enabled, + other => other.clone(), + } + } +} + +pub struct ClientCapability { + pub condstore: ClientStatus, + pub utf8kind: Option, +} + +impl ClientCapability { + pub fn new(sc: &ServerCapability) -> Self { + Self { + condstore: match sc.0.contains(&capability_condstore()) { + true => ClientStatus::Disabled, + _ => ClientStatus::NotSupportedByServer, + }, + utf8kind: None, + } + } + + pub fn enable_condstore(&mut self) { + self.condstore = self.condstore.enable(); + } + + pub fn attributes_enable(&mut self, ap: &AttributesProxy) { + if ap.is_enabling_condstore() { + self.enable_condstore() + } + } + + pub fn fetch_modifiers_enable(&mut self, mods: &[FetchModifier]) { + if mods + .iter() + .any(|x| matches!(x, FetchModifier::ChangedSince(..))) + { + self.enable_condstore() + } + } + + pub fn store_modifiers_enable(&mut self, mods: &[StoreModifier]) { + if mods + .iter() + .any(|x| matches!(x, StoreModifier::UnchangedSince(..))) + { + self.enable_condstore() + } + } + + pub fn select_enable(&mut self, mods: &[SelectExamineModifier]) { + for m in mods.iter() { + match m { + SelectExamineModifier::Condstore => self.enable_condstore(), + } + } + } + + pub fn try_enable( + &mut self, + caps: &[CapabilityEnable<'static>], + ) -> Vec> { + let mut enabled = vec![]; + for cap in caps { + match cap { + CapabilityEnable::CondStore if matches!(self.condstore, ClientStatus::Disabled) => { + self.condstore = ClientStatus::Enabled; + enabled.push(cap.clone()); + } + CapabilityEnable::Utf8(kind) if Some(kind) != self.utf8kind.as_ref() => { + self.utf8kind = Some(kind.clone()); + enabled.push(cap.clone()); + } + _ => (), + } + } + + enabled + } +} diff --git a/aero-proto/imap/command/anonymous.rs b/aero-proto/imap/command/anonymous.rs new file mode 100644 index 0000000..811d1e4 --- /dev/null +++ b/aero-proto/imap/command/anonymous.rs @@ -0,0 +1,83 @@ +use anyhow::Result; +use imap_codec::imap_types::command::{Command, CommandBody}; +use imap_codec::imap_types::core::AString; +use imap_codec::imap_types::response::Code; +use imap_codec::imap_types::secret::Secret; + +use crate::imap::capability::ServerCapability; +use crate::imap::command::anystate; +use crate::imap::flow; +use crate::imap::response::Response; +use crate::login::ArcLoginProvider; +use crate::user::User; + +//--- dispatching + +pub struct AnonymousContext<'a> { + pub req: &'a Command<'static>, + pub server_capabilities: &'a ServerCapability, + pub login_provider: &'a ArcLoginProvider, +} + +pub async fn dispatch(ctx: AnonymousContext<'_>) -> Result<(Response<'static>, flow::Transition)> { + match &ctx.req.body { + // Any State + CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), + CommandBody::Capability => { + anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) + } + CommandBody::Logout => anystate::logout(), + + // Specific to anonymous context (3 commands) + CommandBody::Login { username, password } => ctx.login(username, password).await, + CommandBody::Authenticate { .. } => { + anystate::not_implemented(ctx.req.tag.clone(), "authenticate") + } + //StartTLS is not implemented for now, we will probably go full TLS. + + // Collect other commands + _ => anystate::wrong_state(ctx.req.tag.clone()), + } +} + +//--- Command controllers, private + +impl<'a> AnonymousContext<'a> { + async fn login( + self, + username: &AString<'a>, + password: &Secret>, + ) -> Result<(Response<'static>, flow::Transition)> { + let (u, p) = ( + std::str::from_utf8(username.as_ref())?, + std::str::from_utf8(password.declassify().as_ref())?, + ); + tracing::info!(user = %u, "command.login"); + + let creds = match self.login_provider.login(&u, &p).await { + Err(e) => { + tracing::debug!(error=%e, "authentication failed"); + return Ok(( + Response::build() + .to_req(self.req) + .message("Authentication failed") + .no()?, + flow::Transition::None, + )); + } + Ok(c) => c, + }; + + let user = User::new(u.to_string(), creds).await?; + + tracing::info!(username=%u, "connected"); + Ok(( + Response::build() + .to_req(self.req) + .code(Code::Capability(self.server_capabilities.to_vec())) + .message("Completed") + .ok()?, + flow::Transition::Authenticate(user), + )) + } +} diff --git a/aero-proto/imap/command/anystate.rs b/aero-proto/imap/command/anystate.rs new file mode 100644 index 0000000..718ba3f --- /dev/null +++ b/aero-proto/imap/command/anystate.rs @@ -0,0 +1,54 @@ +use anyhow::Result; +use imap_codec::imap_types::core::Tag; +use imap_codec::imap_types::response::Data; + +use crate::imap::capability::ServerCapability; +use crate::imap::flow; +use crate::imap::response::Response; + +pub(crate) fn capability( + tag: Tag<'static>, + cap: &ServerCapability, +) -> Result<(Response<'static>, flow::Transition)> { + let res = Response::build() + .tag(tag) + .message("Server capabilities") + .data(Data::Capability(cap.to_vec())) + .ok()?; + + Ok((res, flow::Transition::None)) +} + +pub(crate) fn noop_nothing(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build().tag(tag).message("Noop completed.").ok()?, + flow::Transition::None, + )) +} + +pub(crate) fn logout() -> Result<(Response<'static>, flow::Transition)> { + Ok((Response::bye()?, flow::Transition::Logout)) +} + +pub(crate) fn not_implemented<'a>( + tag: Tag<'a>, + what: &str, +) -> Result<(Response<'a>, flow::Transition)> { + Ok(( + Response::build() + .tag(tag) + .message(format!("Command not implemented {}", what)) + .bad()?, + flow::Transition::None, + )) +} + +pub(crate) fn wrong_state(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build() + .tag(tag) + .message("Command not authorized in this state") + .bad()?, + flow::Transition::None, + )) +} diff --git a/aero-proto/imap/command/authenticated.rs b/aero-proto/imap/command/authenticated.rs new file mode 100644 index 0000000..3d332ec --- /dev/null +++ b/aero-proto/imap/command/authenticated.rs @@ -0,0 +1,683 @@ +use std::collections::BTreeMap; +use std::sync::Arc; +use thiserror::Error; + +use anyhow::{anyhow, bail, Result}; +use imap_codec::imap_types::command::{ + Command, CommandBody, ListReturnItem, SelectExamineModifier, +}; +use imap_codec::imap_types::core::{Atom, Literal, QuotedChar, Vec1}; +use imap_codec::imap_types::datetime::DateTime; +use imap_codec::imap_types::extensions::enable::CapabilityEnable; +use imap_codec::imap_types::flag::{Flag, FlagNameAttribute}; +use imap_codec::imap_types::mailbox::{ListMailbox, Mailbox as MailboxCodec}; +use imap_codec::imap_types::response::{Code, CodeOther, Data}; +use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName}; + +use crate::imap::capability::{ClientCapability, ServerCapability}; +use crate::imap::command::{anystate, MailboxName}; +use crate::imap::flow; +use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; +use crate::imap::response::Response; +use crate::imap::Body; + +use crate::mail::uidindex::*; +use crate::user::User; +use crate::mail::IMF; +use crate::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; + +pub struct AuthenticatedContext<'a> { + pub req: &'a Command<'static>, + pub server_capabilities: &'a ServerCapability, + pub client_capabilities: &'a mut ClientCapability, + pub user: &'a Arc, +} + +pub async fn dispatch<'a>( + mut ctx: AuthenticatedContext<'a>, +) -> Result<(Response<'static>, flow::Transition)> { + match &ctx.req.body { + // Any state + CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), + CommandBody::Capability => { + anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) + } + CommandBody::Logout => anystate::logout(), + + // Specific to this state (11 commands) + CommandBody::Create { mailbox } => ctx.create(mailbox).await, + CommandBody::Delete { mailbox } => ctx.delete(mailbox).await, + CommandBody::Rename { from, to } => ctx.rename(from, to).await, + CommandBody::Lsub { + reference, + mailbox_wildcard, + } => ctx.list(reference, mailbox_wildcard, &[], true).await, + CommandBody::List { + reference, + mailbox_wildcard, + r#return, + } => ctx.list(reference, mailbox_wildcard, r#return, false).await, + CommandBody::Status { + mailbox, + item_names, + } => ctx.status(mailbox, item_names).await, + CommandBody::Subscribe { mailbox } => ctx.subscribe(mailbox).await, + CommandBody::Unsubscribe { mailbox } => ctx.unsubscribe(mailbox).await, + CommandBody::Select { mailbox, modifiers } => ctx.select(mailbox, modifiers).await, + CommandBody::Examine { mailbox, modifiers } => ctx.examine(mailbox, modifiers).await, + CommandBody::Append { + mailbox, + flags, + date, + message, + } => ctx.append(mailbox, flags, date, message).await, + + // rfc5161 ENABLE + CommandBody::Enable { capabilities } => ctx.enable(capabilities), + + // Collect other commands + _ => anystate::wrong_state(ctx.req.tag.clone()), + } +} + +// --- PRIVATE --- +impl<'a> AuthenticatedContext<'a> { + async fn create( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name = match mailbox { + MailboxCodec::Inbox => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Cannot create INBOX") + .bad()?, + flow::Transition::None, + )); + } + MailboxCodec::Other(aname) => std::str::from_utf8(aname.as_ref())?, + }; + + match self.user.create_mailbox(&name).await { + Ok(()) => Ok(( + Response::build() + .to_req(self.req) + .message("CREATE complete") + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(&e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + async fn delete( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + match self.user.delete_mailbox(&name).await { + Ok(()) => Ok(( + Response::build() + .to_req(self.req) + .message("DELETE complete") + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + async fn rename( + self, + from: &MailboxCodec<'a>, + to: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(from).try_into()?; + let new_name: &str = MailboxName(to).try_into()?; + + match self.user.rename_mailbox(&name, &new_name).await { + Ok(()) => Ok(( + Response::build() + .to_req(self.req) + .message("RENAME complete") + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + async fn list( + &mut self, + reference: &MailboxCodec<'a>, + mailbox_wildcard: &ListMailbox<'a>, + must_return: &[ListReturnItem], + is_lsub: bool, + ) -> Result<(Response<'static>, flow::Transition)> { + let mbx_hier_delim: QuotedChar = QuotedChar::unvalidated(MBX_HIER_DELIM_RAW); + + let reference: &str = MailboxName(reference).try_into()?; + if !reference.is_empty() { + return Ok(( + Response::build() + .to_req(self.req) + .message("References not supported") + .bad()?, + flow::Transition::None, + )); + } + + let status_item_names = must_return.iter().find_map(|m| match m { + ListReturnItem::Status(v) => Some(v), + _ => None, + }); + + // @FIXME would probably need a rewrite to better use the imap_codec library + let wildcard = match mailbox_wildcard { + ListMailbox::Token(v) => std::str::from_utf8(v.as_ref())?, + ListMailbox::String(v) => std::str::from_utf8(v.as_ref())?, + }; + if wildcard.is_empty() { + if is_lsub { + return Ok(( + Response::build() + .to_req(self.req) + .message("LSUB complete") + .data(Data::Lsub { + items: vec![], + delimiter: Some(mbx_hier_delim), + mailbox: "".try_into().unwrap(), + }) + .ok()?, + flow::Transition::None, + )); + } else { + return Ok(( + Response::build() + .to_req(self.req) + .message("LIST complete") + .data(Data::List { + items: vec![], + delimiter: Some(mbx_hier_delim), + mailbox: "".try_into().unwrap(), + }) + .ok()?, + flow::Transition::None, + )); + } + } + + let mailboxes = self.user.list_mailboxes().await?; + let mut vmailboxes = BTreeMap::new(); + for mb in mailboxes.iter() { + for (i, _) in mb.match_indices(MBX_HIER_DELIM_RAW) { + if i > 0 { + let smb = &mb[..i]; + vmailboxes.entry(smb).or_insert(false); + } + } + vmailboxes.insert(mb, true); + } + + let mut ret = vec![]; + for (mb, is_real) in vmailboxes.iter() { + if matches_wildcard(&wildcard, mb) { + let mailbox: MailboxCodec = mb + .to_string() + .try_into() + .map_err(|_| anyhow!("invalid mailbox name"))?; + let mut items = vec![FlagNameAttribute::from(Atom::unvalidated("Subscribed"))]; + + // Decoration + if !*is_real { + items.push(FlagNameAttribute::Noselect); + } else { + match *mb { + "Drafts" => items.push(Atom::unvalidated("Drafts").into()), + "Archive" => items.push(Atom::unvalidated("Archive").into()), + "Sent" => items.push(Atom::unvalidated("Sent").into()), + "Trash" => items.push(Atom::unvalidated("Trash").into()), + _ => (), + }; + } + + // Result type + if is_lsub { + ret.push(Data::Lsub { + items, + delimiter: Some(mbx_hier_delim), + mailbox: mailbox.clone(), + }); + } else { + ret.push(Data::List { + items, + delimiter: Some(mbx_hier_delim), + mailbox: mailbox.clone(), + }); + } + + // Also collect status + if let Some(sin) = status_item_names { + let ret_attrs = match self.status_items(mb, sin).await { + Ok(a) => a, + Err(e) => { + tracing::error!(err=?e, mailbox=%mb, "Unable to fetch status for mailbox"); + continue; + } + }; + + let data = Data::Status { + mailbox, + items: ret_attrs.into(), + }; + + ret.push(data); + } + } + } + + let msg = if is_lsub { + "LSUB completed" + } else { + "LIST completed" + }; + Ok(( + Response::build() + .to_req(self.req) + .message(msg) + .many_data(ret) + .ok()?, + flow::Transition::None, + )) + } + + async fn status( + &mut self, + mailbox: &MailboxCodec<'static>, + attributes: &[StatusDataItemName], + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + let ret_attrs = match self.status_items(name, attributes).await { + Ok(v) => v, + Err(e) => match e.downcast_ref::() { + Some(CommandError::MailboxNotFound) => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Mailbox does not exist") + .no()?, + flow::Transition::None, + )) + } + _ => return Err(e.into()), + }, + }; + + let data = Data::Status { + mailbox: mailbox.clone(), + items: ret_attrs.into(), + }; + + Ok(( + Response::build() + .to_req(self.req) + .message("STATUS completed") + .data(data) + .ok()?, + flow::Transition::None, + )) + } + + async fn status_items( + &mut self, + name: &str, + attributes: &[StatusDataItemName], + ) -> Result> { + let mb_opt = self.user.open_mailbox(name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => return Err(CommandError::MailboxNotFound.into()), + }; + + let view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + + let mut ret_attrs = vec![]; + for attr in attributes.iter() { + ret_attrs.push(match attr { + StatusDataItemName::Messages => StatusDataItem::Messages(view.exists()?), + StatusDataItemName::Unseen => StatusDataItem::Unseen(view.unseen_count() as u32), + StatusDataItemName::Recent => StatusDataItem::Recent(view.recent()?), + StatusDataItemName::UidNext => StatusDataItem::UidNext(view.uidnext()), + StatusDataItemName::UidValidity => { + StatusDataItem::UidValidity(view.uidvalidity()) + } + StatusDataItemName::Deleted => { + bail!("quota not implemented, can't return deleted elements waiting for EXPUNGE"); + }, + StatusDataItemName::DeletedStorage => { + bail!("quota not implemented, can't return freed storage after EXPUNGE will be run"); + }, + StatusDataItemName::HighestModSeq => { + self.client_capabilities.enable_condstore(); + StatusDataItem::HighestModSeq(view.highestmodseq().get()) + }, + }); + } + Ok(ret_attrs) + } + + async fn subscribe( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + if self.user.has_mailbox(&name).await? { + Ok(( + Response::build() + .to_req(self.req) + .message("SUBSCRIBE complete") + .ok()?, + flow::Transition::None, + )) + } else { + Ok(( + Response::build() + .to_req(self.req) + .message(format!("Mailbox {} does not exist", name)) + .bad()?, + flow::Transition::None, + )) + } + } + + async fn unsubscribe( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + if self.user.has_mailbox(&name).await? { + Ok(( + Response::build() + .to_req(self.req) + .message(format!( + "Cannot unsubscribe from mailbox {}: not supported by Aerogramme", + name + )) + .bad()?, + flow::Transition::None, + )) + } else { + Ok(( + Response::build() + .to_req(self.req) + .message(format!("Mailbox {} does not exist", name)) + .no()?, + flow::Transition::None, + )) + } + } + + /* + * TRACE BEGIN --- + + + Example: C: A142 SELECT INBOX + S: * 172 EXISTS + S: * 1 RECENT + S: * OK [UNSEEN 12] Message 12 is first unseen + S: * OK [UIDVALIDITY 3857529045] UIDs valid + S: * OK [UIDNEXT 4392] Predicted next UID + S: * FLAGS (\Answered \Flagged \Deleted \Seen \Draft) + S: * OK [PERMANENTFLAGS (\Deleted \Seen \*)] Limited + S: A142 OK [READ-WRITE] SELECT completed + + --- a mailbox with no unseen message -> no unseen entry + NOTES: + RFC3501 (imap4rev1) says if there is no OK [UNSEEN] response, client must make no assumption, + it is therefore correct to not return it even if there are unseen messages + RFC9051 (imap4rev2) says that OK [UNSEEN] responses are deprecated after SELECT and EXAMINE + For Aerogramme, we just don't send the OK [UNSEEN], it's correct to do in both specifications. + + + 20 select "INBOX.achats" + * FLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1) + * OK [PERMANENTFLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1 \*)] Flags permitted. + * 88 EXISTS + * 0 RECENT + * OK [UIDVALIDITY 1347986788] UIDs valid + * OK [UIDNEXT 91] Predicted next UID + * OK [HIGHESTMODSEQ 72] Highest + 20 OK [READ-WRITE] Select completed (0.001 + 0.000 secs). + + * TRACE END --- + */ + async fn select( + self, + mailbox: &MailboxCodec<'a>, + modifiers: &[SelectExamineModifier], + ) -> Result<(Response<'static>, flow::Transition)> { + self.client_capabilities.select_enable(modifiers); + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Mailbox does not exist") + .no()?, + flow::Transition::None, + )) + } + }; + tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.selected"); + + let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + let data = mb.summary()?; + + Ok(( + Response::build() + .message("Select completed") + .to_req(self.req) + .code(Code::ReadWrite) + .set_body(data) + .ok()?, + flow::Transition::Select(mb, flow::MailboxPerm::ReadWrite), + )) + } + + async fn examine( + self, + mailbox: &MailboxCodec<'a>, + modifiers: &[SelectExamineModifier], + ) -> Result<(Response<'static>, flow::Transition)> { + self.client_capabilities.select_enable(modifiers); + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Mailbox does not exist") + .no()?, + flow::Transition::None, + )) + } + }; + tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.examined"); + + let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + let data = mb.summary()?; + + Ok(( + Response::build() + .to_req(self.req) + .message("Examine completed") + .code(Code::ReadOnly) + .set_body(data) + .ok()?, + flow::Transition::Select(mb, flow::MailboxPerm::ReadOnly), + )) + } + + //@FIXME we should write a specific version for the "selected" state + //that returns some unsollicited responses + async fn append( + self, + mailbox: &MailboxCodec<'a>, + flags: &[Flag<'a>], + date: &Option, + message: &Literal<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let append_tag = self.req.tag.clone(); + match self.append_internal(mailbox, flags, date, message).await { + Ok((_mb_view, uidvalidity, uid, _modseq)) => Ok(( + Response::build() + .tag(append_tag) + .message("APPEND completed") + .code(Code::Other(CodeOther::unvalidated( + format!("APPENDUID {} {}", uidvalidity, uid).into_bytes(), + ))) + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .tag(append_tag) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + fn enable( + self, + cap_enable: &Vec1>, + ) -> Result<(Response<'static>, flow::Transition)> { + let mut response_builder = Response::build().to_req(self.req); + let capabilities = self.client_capabilities.try_enable(cap_enable.as_ref()); + if capabilities.len() > 0 { + response_builder = response_builder.data(Data::Enabled { capabilities }); + } + Ok(( + response_builder.message("ENABLE completed").ok()?, + flow::Transition::None, + )) + } + + //@FIXME should be refactored and integrated to the mailbox view + pub(crate) async fn append_internal( + self, + mailbox: &MailboxCodec<'a>, + flags: &[Flag<'a>], + date: &Option, + message: &Literal<'a>, + ) -> Result<(MailboxView, ImapUidvalidity, ImapUid, ModSeq)> { + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => bail!("Mailbox does not exist"), + }; + let mut view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + + if date.is_some() { + tracing::warn!("Cannot set date when appending message"); + } + + let msg = + IMF::try_from(message.data()).map_err(|_| anyhow!("Could not parse e-mail message"))?; + let flags = flags.iter().map(|x| x.to_string()).collect::>(); + // TODO: filter allowed flags? ping @Quentin + + let (uidvalidity, uid, modseq) = + view.internal.mailbox.append(msg, None, &flags[..]).await?; + //let unsollicited = view.update(UpdateParameters::default()).await?; + + Ok((view, uidvalidity, uid, modseq)) + } +} + +fn matches_wildcard(wildcard: &str, name: &str) -> bool { + let wildcard = wildcard.chars().collect::>(); + let name = name.chars().collect::>(); + + let mut matches = vec![vec![false; wildcard.len() + 1]; name.len() + 1]; + + for i in 0..=name.len() { + for j in 0..=wildcard.len() { + matches[i][j] = (i == 0 && j == 0) + || (j > 0 + && matches[i][j - 1] + && (wildcard[j - 1] == '%' || wildcard[j - 1] == '*')) + || (i > 0 + && j > 0 + && matches[i - 1][j - 1] + && wildcard[j - 1] == name[i - 1] + && wildcard[j - 1] != '%' + && wildcard[j - 1] != '*') + || (i > 0 + && j > 0 + && matches[i - 1][j] + && (wildcard[j - 1] == '*' + || (wildcard[j - 1] == '%' && name[i - 1] != MBX_HIER_DELIM_RAW))); + } + } + + matches[name.len()][wildcard.len()] +} + +#[derive(Error, Debug)] +pub enum CommandError { + #[error("Mailbox not found")] + MailboxNotFound, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_wildcard_matches() { + assert!(matches_wildcard("INBOX", "INBOX")); + assert!(matches_wildcard("*", "INBOX")); + assert!(matches_wildcard("%", "INBOX")); + assert!(!matches_wildcard("%", "Test.Azerty")); + assert!(!matches_wildcard("INBOX.*", "INBOX")); + assert!(matches_wildcard("Sent.*", "Sent.A")); + assert!(matches_wildcard("Sent.*", "Sent.A.B")); + assert!(!matches_wildcard("Sent.%", "Sent.A.B")); + } +} diff --git a/aero-proto/imap/command/mod.rs b/aero-proto/imap/command/mod.rs new file mode 100644 index 0000000..f201eb6 --- /dev/null +++ b/aero-proto/imap/command/mod.rs @@ -0,0 +1,20 @@ +pub mod anonymous; +pub mod anystate; +pub mod authenticated; +pub mod selected; + +use crate::mail::namespace::INBOX; +use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; + +/// Convert an IMAP mailbox name/identifier representation +/// to an utf-8 string that is used internally in Aerogramme +struct MailboxName<'a>(&'a MailboxCodec<'a>); +impl<'a> TryInto<&'a str> for MailboxName<'a> { + type Error = std::str::Utf8Error; + fn try_into(self) -> Result<&'a str, Self::Error> { + match self.0 { + MailboxCodec::Inbox => Ok(INBOX), + MailboxCodec::Other(aname) => Ok(std::str::from_utf8(aname.as_ref())?), + } + } +} diff --git a/aero-proto/imap/command/selected.rs b/aero-proto/imap/command/selected.rs new file mode 100644 index 0000000..eedfbd6 --- /dev/null +++ b/aero-proto/imap/command/selected.rs @@ -0,0 +1,424 @@ +use std::num::NonZeroU64; +use std::sync::Arc; + +use anyhow::Result; +use imap_codec::imap_types::command::{Command, CommandBody, FetchModifier, StoreModifier}; +use imap_codec::imap_types::core::{Charset, Vec1}; +use imap_codec::imap_types::fetch::MacroOrMessageDataItemNames; +use imap_codec::imap_types::flag::{Flag, StoreResponse, StoreType}; +use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; +use imap_codec::imap_types::response::{Code, CodeOther}; +use imap_codec::imap_types::search::SearchKey; +use imap_codec::imap_types::sequence::SequenceSet; + +use crate::imap::attributes::AttributesProxy; +use crate::imap::capability::{ClientCapability, ServerCapability}; +use crate::imap::command::{anystate, authenticated, MailboxName}; +use crate::imap::flow; +use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; +use crate::imap::response::Response; +use crate::user::User; + +pub struct SelectedContext<'a> { + pub req: &'a Command<'static>, + pub user: &'a Arc, + pub mailbox: &'a mut MailboxView, + pub server_capabilities: &'a ServerCapability, + pub client_capabilities: &'a mut ClientCapability, + pub perm: &'a flow::MailboxPerm, +} + +pub async fn dispatch<'a>( + ctx: SelectedContext<'a>, +) -> Result<(Response<'static>, flow::Transition)> { + match &ctx.req.body { + // Any State + // noop is specific to this state + CommandBody::Capability => { + anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) + } + CommandBody::Logout => anystate::logout(), + + // Specific to this state (7 commands + NOOP) + CommandBody::Close => match ctx.perm { + flow::MailboxPerm::ReadWrite => ctx.close().await, + flow::MailboxPerm::ReadOnly => ctx.examine_close().await, + }, + CommandBody::Noop | CommandBody::Check => ctx.noop().await, + CommandBody::Fetch { + sequence_set, + macro_or_item_names, + modifiers, + uid, + } => { + ctx.fetch(sequence_set, macro_or_item_names, modifiers, uid) + .await + } + //@FIXME SearchKey::And is a legacy hack, should be refactored + CommandBody::Search { + charset, + criteria, + uid, + } => { + ctx.search(charset, &SearchKey::And(criteria.clone()), uid) + .await + } + CommandBody::Expunge { + // UIDPLUS (rfc4315) + uid_sequence_set, + } => ctx.expunge(uid_sequence_set).await, + CommandBody::Store { + sequence_set, + kind, + response, + flags, + modifiers, + uid, + } => { + ctx.store(sequence_set, kind, response, flags, modifiers, uid) + .await + } + CommandBody::Copy { + sequence_set, + mailbox, + uid, + } => ctx.copy(sequence_set, mailbox, uid).await, + CommandBody::Move { + sequence_set, + mailbox, + uid, + } => ctx.r#move(sequence_set, mailbox, uid).await, + + // UNSELECT extension (rfc3691) + CommandBody::Unselect => ctx.unselect().await, + + // In selected mode, we fallback to authenticated when needed + _ => { + authenticated::dispatch(authenticated::AuthenticatedContext { + req: ctx.req, + server_capabilities: ctx.server_capabilities, + client_capabilities: ctx.client_capabilities, + user: ctx.user, + }) + .await + } + } +} + +// --- PRIVATE --- + +impl<'a> SelectedContext<'a> { + async fn close(self) -> Result<(Response<'static>, flow::Transition)> { + // We expunge messages, + // but we don't send the untagged EXPUNGE responses + let tag = self.req.tag.clone(); + self.expunge(&None).await?; + Ok(( + Response::build().tag(tag).message("CLOSE completed").ok()?, + flow::Transition::Unselect, + )) + } + + /// CLOSE in examined state is not the same as in selected state + /// (in selected state it also does an EXPUNGE, here it doesn't) + async fn examine_close(self) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build() + .to_req(self.req) + .message("CLOSE completed") + .ok()?, + flow::Transition::Unselect, + )) + } + + async fn unselect(self) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build() + .to_req(self.req) + .message("UNSELECT completed") + .ok()?, + flow::Transition::Unselect, + )) + } + + pub async fn fetch( + self, + sequence_set: &SequenceSet, + attributes: &'a MacroOrMessageDataItemNames<'static>, + modifiers: &[FetchModifier], + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + let ap = AttributesProxy::new(attributes, modifiers, *uid); + let mut changed_since: Option = None; + modifiers.iter().for_each(|m| match m { + FetchModifier::ChangedSince(val) => { + changed_since = Some(*val); + } + }); + + match self + .mailbox + .fetch(sequence_set, &ap, changed_since, uid) + .await + { + Ok(resp) => { + // Capabilities enabling logic only on successful command + // (according to my understanding of the spec) + self.client_capabilities.attributes_enable(&ap); + self.client_capabilities.fetch_modifiers_enable(modifiers); + + // Response to the client + Ok(( + Response::build() + .to_req(self.req) + .message("FETCH completed") + .set_body(resp) + .ok()?, + flow::Transition::None, + )) + } + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + pub async fn search( + self, + charset: &Option>, + criteria: &SearchKey<'a>, + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + let (found, enable_condstore) = self.mailbox.search(charset, criteria, *uid).await?; + if enable_condstore { + self.client_capabilities.enable_condstore(); + } + Ok(( + Response::build() + .to_req(self.req) + .set_body(found) + .message("SEARCH completed") + .ok()?, + flow::Transition::None, + )) + } + + pub async fn noop(self) -> Result<(Response<'static>, flow::Transition)> { + self.mailbox.internal.mailbox.force_sync().await?; + + let updates = self.mailbox.update(UpdateParameters::default()).await?; + Ok(( + Response::build() + .to_req(self.req) + .message("NOOP completed.") + .set_body(updates) + .ok()?, + flow::Transition::None, + )) + } + + async fn expunge( + self, + uid_sequence_set: &Option, + ) -> Result<(Response<'static>, flow::Transition)> { + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let tag = self.req.tag.clone(); + let data = self.mailbox.expunge(uid_sequence_set).await?; + + Ok(( + Response::build() + .tag(tag) + .message("EXPUNGE completed") + .set_body(data) + .ok()?, + flow::Transition::None, + )) + } + + async fn store( + self, + sequence_set: &SequenceSet, + kind: &StoreType, + response: &StoreResponse, + flags: &[Flag<'a>], + modifiers: &[StoreModifier], + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let mut unchanged_since: Option = None; + modifiers.iter().for_each(|m| match m { + StoreModifier::UnchangedSince(val) => { + unchanged_since = Some(*val); + } + }); + + let (data, modified) = self + .mailbox + .store(sequence_set, kind, response, flags, unchanged_since, uid) + .await?; + + let mut ok_resp = Response::build() + .to_req(self.req) + .message("STORE completed") + .set_body(data); + + match modified[..] { + [] => (), + [_head, ..] => { + let modified_str = format!( + "MODIFIED {}", + modified + .into_iter() + .map(|x| x.to_string()) + .collect::>() + .join(",") + ); + ok_resp = ok_resp.code(Code::Other(CodeOther::unvalidated( + modified_str.into_bytes(), + ))); + } + }; + + self.client_capabilities.store_modifiers_enable(modifiers); + + Ok((ok_resp.ok()?, flow::Transition::None)) + } + + async fn copy( + self, + sequence_set: &SequenceSet, + mailbox: &MailboxCodec<'a>, + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + //@FIXME Could copy be valid in EXAMINE mode? + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Destination mailbox does not exist") + .code(Code::TryCreate) + .no()?, + flow::Transition::None, + )) + } + }; + + let (uidval, uid_map) = self.mailbox.copy(sequence_set, mb, uid).await?; + + let copyuid_str = format!( + "{} {} {}", + uidval, + uid_map + .iter() + .map(|(sid, _)| format!("{}", sid)) + .collect::>() + .join(","), + uid_map + .iter() + .map(|(_, tuid)| format!("{}", tuid)) + .collect::>() + .join(",") + ); + + Ok(( + Response::build() + .to_req(self.req) + .message("COPY completed") + .code(Code::Other(CodeOther::unvalidated( + format!("COPYUID {}", copyuid_str).into_bytes(), + ))) + .ok()?, + flow::Transition::None, + )) + } + + async fn r#move( + self, + sequence_set: &SequenceSet, + mailbox: &MailboxCodec<'a>, + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Destination mailbox does not exist") + .code(Code::TryCreate) + .no()?, + flow::Transition::None, + )) + } + }; + + let (uidval, uid_map, data) = self.mailbox.r#move(sequence_set, mb, uid).await?; + + // compute code + let copyuid_str = format!( + "{} {} {}", + uidval, + uid_map + .iter() + .map(|(sid, _)| format!("{}", sid)) + .collect::>() + .join(","), + uid_map + .iter() + .map(|(_, tuid)| format!("{}", tuid)) + .collect::>() + .join(",") + ); + + Ok(( + Response::build() + .to_req(self.req) + .message("COPY completed") + .code(Code::Other(CodeOther::unvalidated( + format!("COPYUID {}", copyuid_str).into_bytes(), + ))) + .set_body(data) + .ok()?, + flow::Transition::None, + )) + } + + fn fail_read_only(&self) -> Option> { + match self.perm { + flow::MailboxPerm::ReadWrite => None, + flow::MailboxPerm::ReadOnly => Some( + Response::build() + .to_req(self.req) + .message("Write command are forbidden while exmining mailbox") + .no() + .unwrap(), + ), + } + } +} diff --git a/aero-proto/imap/flags.rs b/aero-proto/imap/flags.rs new file mode 100644 index 0000000..0f6ec64 --- /dev/null +++ b/aero-proto/imap/flags.rs @@ -0,0 +1,30 @@ +use imap_codec::imap_types::core::Atom; +use imap_codec::imap_types::flag::{Flag, FlagFetch}; + +pub fn from_str(f: &str) -> Option> { + match f.chars().next() { + Some('\\') => match f { + "\\Seen" => Some(FlagFetch::Flag(Flag::Seen)), + "\\Answered" => Some(FlagFetch::Flag(Flag::Answered)), + "\\Flagged" => Some(FlagFetch::Flag(Flag::Flagged)), + "\\Deleted" => Some(FlagFetch::Flag(Flag::Deleted)), + "\\Draft" => Some(FlagFetch::Flag(Flag::Draft)), + "\\Recent" => Some(FlagFetch::Recent), + _ => match Atom::try_from(f.strip_prefix('\\').unwrap().to_string()) { + Err(_) => { + tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); + None + } + Ok(a) => Some(FlagFetch::Flag(Flag::system(a))), + }, + }, + Some(_) => match Atom::try_from(f.to_string()) { + Err(_) => { + tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); + None + } + Ok(a) => Some(FlagFetch::Flag(Flag::keyword(a))), + }, + None => None, + } +} diff --git a/aero-proto/imap/flow.rs b/aero-proto/imap/flow.rs new file mode 100644 index 0000000..86eb12e --- /dev/null +++ b/aero-proto/imap/flow.rs @@ -0,0 +1,114 @@ +use std::error::Error as StdError; +use std::fmt; +use std::sync::Arc; + +use imap_codec::imap_types::core::Tag; +use tokio::sync::Notify; + +use crate::imap::mailbox_view::MailboxView; +use crate::user::User; + +#[derive(Debug)] +pub enum Error { + ForbiddenTransition, +} +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Forbidden Transition") + } +} +impl StdError for Error {} + +pub enum State { + NotAuthenticated, + Authenticated(Arc), + Selected(Arc, MailboxView, MailboxPerm), + Idle( + Arc, + MailboxView, + MailboxPerm, + Tag<'static>, + Arc, + ), + Logout, +} +impl State { + pub fn notify(&self) -> Option> { + match self { + Self::Idle(_, _, _, _, anotif) => Some(anotif.clone()), + _ => None, + } + } +} +impl fmt::Display for State { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use State::*; + match self { + NotAuthenticated => write!(f, "NotAuthenticated"), + Authenticated(..) => write!(f, "Authenticated"), + Selected(..) => write!(f, "Selected"), + Idle(..) => write!(f, "Idle"), + Logout => write!(f, "Logout"), + } + } +} + +#[derive(Clone)] +pub enum MailboxPerm { + ReadOnly, + ReadWrite, +} + +pub enum Transition { + None, + Authenticate(Arc), + Select(MailboxView, MailboxPerm), + Idle(Tag<'static>, Notify), + UnIdle, + Unselect, + Logout, +} +impl fmt::Display for Transition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use Transition::*; + match self { + None => write!(f, "None"), + Authenticate(..) => write!(f, "Authenticated"), + Select(..) => write!(f, "Selected"), + Idle(..) => write!(f, "Idle"), + UnIdle => write!(f, "UnIdle"), + Unselect => write!(f, "Unselect"), + Logout => write!(f, "Logout"), + } + } +} + +// See RFC3501 section 3. +// https://datatracker.ietf.org/doc/html/rfc3501#page-13 +impl State { + pub fn apply(&mut self, tr: Transition) -> Result<(), Error> { + tracing::debug!(state=%self, transition=%tr, "try change state"); + + let new_state = match (std::mem::replace(self, State::Logout), tr) { + (s, Transition::None) => s, + (State::NotAuthenticated, Transition::Authenticate(u)) => State::Authenticated(u), + (State::Authenticated(u) | State::Selected(u, _, _), Transition::Select(m, p)) => { + State::Selected(u, m, p) + } + (State::Selected(u, _, _), Transition::Unselect) => State::Authenticated(u.clone()), + (State::Selected(u, m, p), Transition::Idle(t, s)) => { + State::Idle(u, m, p, t, Arc::new(s)) + } + (State::Idle(u, m, p, _, _), Transition::UnIdle) => State::Selected(u, m, p), + (_, Transition::Logout) => State::Logout, + (s, t) => { + tracing::error!(state=%s, transition=%t, "forbidden transition"); + return Err(Error::ForbiddenTransition); + } + }; + *self = new_state; + tracing::debug!(state=%self, "transition succeeded"); + + Ok(()) + } +} diff --git a/aero-proto/imap/imf_view.rs b/aero-proto/imap/imf_view.rs new file mode 100644 index 0000000..a4ca2e8 --- /dev/null +++ b/aero-proto/imap/imf_view.rs @@ -0,0 +1,109 @@ +use anyhow::{anyhow, Result}; +use chrono::naive::NaiveDate; + +use imap_codec::imap_types::core::{IString, NString}; +use imap_codec::imap_types::envelope::{Address, Envelope}; + +use eml_codec::imf; + +pub struct ImfView<'a>(pub &'a imf::Imf<'a>); + +impl<'a> ImfView<'a> { + pub fn naive_date(&self) -> Result { + Ok(self.0.date.ok_or(anyhow!("date is not set"))?.date_naive()) + } + + /// Envelope rules are defined in RFC 3501, section 7.4.2 + /// https://datatracker.ietf.org/doc/html/rfc3501#section-7.4.2 + /// + /// Some important notes: + /// + /// If the Sender or Reply-To lines are absent in the [RFC-2822] + /// header, or are present but empty, the server sets the + /// corresponding member of the envelope to be the same value as + /// the from member (the client is not expected to know to do + /// this). Note: [RFC-2822] requires that all messages have a valid + /// From header. Therefore, the from, sender, and reply-to + /// members in the envelope can not be NIL. + /// + /// If the Date, Subject, In-Reply-To, and Message-ID header lines + /// are absent in the [RFC-2822] header, the corresponding member + /// of the envelope is NIL; if these header lines are present but + /// empty the corresponding member of the envelope is the empty + /// string. + + //@FIXME return an error if the envelope is invalid instead of panicking + //@FIXME some fields must be defaulted if there are not set. + pub fn message_envelope(&self) -> Envelope<'static> { + let msg = self.0; + let from = msg.from.iter().map(convert_mbx).collect::>(); + + Envelope { + date: NString( + msg.date + .as_ref() + .map(|d| IString::try_from(d.to_rfc3339()).unwrap()), + ), + subject: NString( + msg.subject + .as_ref() + .map(|d| IString::try_from(d.to_string()).unwrap()), + ), + sender: msg + .sender + .as_ref() + .map(|v| vec![convert_mbx(v)]) + .unwrap_or(from.clone()), + reply_to: if msg.reply_to.is_empty() { + from.clone() + } else { + convert_addresses(&msg.reply_to) + }, + from, + to: convert_addresses(&msg.to), + cc: convert_addresses(&msg.cc), + bcc: convert_addresses(&msg.bcc), + in_reply_to: NString( + msg.in_reply_to + .iter() + .next() + .map(|d| IString::try_from(d.to_string()).unwrap()), + ), + message_id: NString( + msg.msg_id + .as_ref() + .map(|d| IString::try_from(d.to_string()).unwrap()), + ), + } + } +} + +pub fn convert_addresses(addrlist: &Vec) -> Vec> { + let mut acc = vec![]; + for item in addrlist { + match item { + imf::address::AddressRef::Single(a) => acc.push(convert_mbx(a)), + imf::address::AddressRef::Many(l) => acc.extend(l.participants.iter().map(convert_mbx)), + } + } + return acc; +} + +pub fn convert_mbx(addr: &imf::mailbox::MailboxRef) -> Address<'static> { + Address { + name: NString( + addr.name + .as_ref() + .map(|x| IString::try_from(x.to_string()).unwrap()), + ), + // SMTP at-domain-list (source route) seems obsolete since at least 1991 + // https://www.mhonarc.org/archive/html/ietf-822/1991-06/msg00060.html + adl: NString(None), + mailbox: NString(Some( + IString::try_from(addr.addrspec.local_part.to_string()).unwrap(), + )), + host: NString(Some( + IString::try_from(addr.addrspec.domain.to_string()).unwrap(), + )), + } +} diff --git a/aero-proto/imap/index.rs b/aero-proto/imap/index.rs new file mode 100644 index 0000000..9b794b8 --- /dev/null +++ b/aero-proto/imap/index.rs @@ -0,0 +1,211 @@ +use std::num::{NonZeroU32, NonZeroU64}; + +use anyhow::{anyhow, Result}; +use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; + +use crate::mail::uidindex::{ImapUid, ModSeq, UidIndex}; +use crate::mail::unique_ident::UniqueIdent; + +pub struct Index<'a> { + pub imap_index: Vec>, + pub internal: &'a UidIndex, +} +impl<'a> Index<'a> { + pub fn new(internal: &'a UidIndex) -> Result { + let imap_index = internal + .idx_by_uid + .iter() + .enumerate() + .map(|(i_enum, (&uid, &uuid))| { + let (_, modseq, flags) = internal + .table + .get(&uuid) + .ok_or(anyhow!("mail is missing from index"))?; + let i_int: u32 = (i_enum + 1).try_into()?; + let i: NonZeroU32 = i_int.try_into()?; + + Ok(MailIndex { + i, + uid, + uuid, + modseq: *modseq, + flags, + }) + }) + .collect::>>()?; + + Ok(Self { + imap_index, + internal, + }) + } + + pub fn last(&'a self) -> Option<&'a MailIndex<'a>> { + self.imap_index.last() + } + + /// Fetch mail descriptors based on a sequence of UID + /// + /// Complexity analysis: + /// - Sort is O(n * log n) where n is the number of uid generated by the sequence + /// - Finding the starting point in the index O(log m) where m is the size of the mailbox + /// While n =< m, it's not clear if the difference is big or not. + /// + /// For now, the algorithm tries to be fast for small values of n, + /// as it is what is expected by clients. + /// + /// So we assume for our implementation that : n << m. + /// It's not true for full mailbox searches for example... + pub fn fetch_on_uid(&'a self, sequence_set: &SequenceSet) -> Vec<&'a MailIndex<'a>> { + if self.imap_index.is_empty() { + return vec![]; + } + let largest = self.last().expect("The mailbox is not empty").uid; + let mut unroll_seq = sequence_set.iter(largest).collect::>(); + unroll_seq.sort(); + + let start_seq = match unroll_seq.iter().next() { + Some(elem) => elem, + None => return vec![], + }; + + // Quickly jump to the right point in the mailbox vector O(log m) instead + // of iterating one by one O(m). Works only because both unroll_seq & imap_index are sorted per uid. + let mut imap_idx = { + let start_idx = self + .imap_index + .partition_point(|mail_idx| &mail_idx.uid < start_seq); + &self.imap_index[start_idx..] + }; + + let mut acc = vec![]; + for wanted_uid in unroll_seq.iter() { + // Slide the window forward as long as its first element is lower than our wanted uid. + let start_idx = match imap_idx.iter().position(|midx| &midx.uid >= wanted_uid) { + Some(v) => v, + None => break, + }; + imap_idx = &imap_idx[start_idx..]; + + // If the beginning of our new window is the uid we want, we collect it + if &imap_idx[0].uid == wanted_uid { + acc.push(&imap_idx[0]); + } + } + + acc + } + + pub fn fetch_on_id(&'a self, sequence_set: &SequenceSet) -> Result>> { + if self.imap_index.is_empty() { + return Ok(vec![]); + } + let largest = NonZeroU32::try_from(self.imap_index.len() as u32)?; + let mut acc = sequence_set + .iter(largest) + .map(|wanted_id| { + self.imap_index + .get((wanted_id.get() as usize) - 1) + .ok_or(anyhow!("Mail not found")) + }) + .collect::>>()?; + + // Sort the result to be consistent with UID + acc.sort_by(|a, b| a.i.cmp(&b.i)); + + Ok(acc) + } + + pub fn fetch( + self: &'a Index<'a>, + sequence_set: &SequenceSet, + by_uid: bool, + ) -> Result>> { + match by_uid { + true => Ok(self.fetch_on_uid(sequence_set)), + _ => self.fetch_on_id(sequence_set), + } + } + + pub fn fetch_changed_since( + self: &'a Index<'a>, + sequence_set: &SequenceSet, + maybe_modseq: Option, + by_uid: bool, + ) -> Result>> { + let raw = self.fetch(sequence_set, by_uid)?; + let res = match maybe_modseq { + Some(pit) => raw.into_iter().filter(|midx| midx.modseq > pit).collect(), + None => raw, + }; + + Ok(res) + } + + pub fn fetch_unchanged_since( + self: &'a Index<'a>, + sequence_set: &SequenceSet, + maybe_modseq: Option, + by_uid: bool, + ) -> Result<(Vec<&'a MailIndex<'a>>, Vec<&'a MailIndex<'a>>)> { + let raw = self.fetch(sequence_set, by_uid)?; + let res = match maybe_modseq { + Some(pit) => raw.into_iter().partition(|midx| midx.modseq <= pit), + None => (raw, vec![]), + }; + + Ok(res) + } +} + +#[derive(Clone, Debug)] +pub struct MailIndex<'a> { + pub i: NonZeroU32, + pub uid: ImapUid, + pub uuid: UniqueIdent, + pub modseq: ModSeq, + pub flags: &'a Vec, +} + +impl<'a> MailIndex<'a> { + // The following functions are used to implement the SEARCH command + pub fn is_in_sequence_i(&self, seq: &Sequence) -> bool { + match seq { + Sequence::Single(SeqOrUid::Asterisk) => true, + Sequence::Single(SeqOrUid::Value(target)) => target == &self.i, + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) + | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.i, + Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { + if x1 < x2 { + x1 <= &self.i && &self.i <= x2 + } else { + x1 >= &self.i && &self.i >= x2 + } + } + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, + } + } + + pub fn is_in_sequence_uid(&self, seq: &Sequence) -> bool { + match seq { + Sequence::Single(SeqOrUid::Asterisk) => true, + Sequence::Single(SeqOrUid::Value(target)) => target == &self.uid, + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) + | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.uid, + Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { + if x1 < x2 { + x1 <= &self.uid && &self.uid <= x2 + } else { + x1 >= &self.uid && &self.uid >= x2 + } + } + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, + } + } + + pub fn is_flag_set(&self, flag: &str) -> bool { + self.flags + .iter() + .any(|candidate| candidate.as_str() == flag) + } +} diff --git a/aero-proto/imap/mail_view.rs b/aero-proto/imap/mail_view.rs new file mode 100644 index 0000000..a8db733 --- /dev/null +++ b/aero-proto/imap/mail_view.rs @@ -0,0 +1,306 @@ +use std::num::NonZeroU32; + +use anyhow::{anyhow, bail, Result}; +use chrono::{naive::NaiveDate, DateTime as ChronoDateTime, Local, Offset, TimeZone, Utc}; + +use imap_codec::imap_types::core::NString; +use imap_codec::imap_types::datetime::DateTime; +use imap_codec::imap_types::fetch::{ + MessageDataItem, MessageDataItemName, Section as FetchSection, +}; +use imap_codec::imap_types::flag::Flag; +use imap_codec::imap_types::response::Data; + +use eml_codec::{ + imf, + part::{composite::Message, AnyPart}, +}; + +use crate::mail::query::QueryResult; + +use crate::imap::attributes::AttributesProxy; +use crate::imap::flags; +use crate::imap::imf_view::ImfView; +use crate::imap::index::MailIndex; +use crate::imap::mime_view; +use crate::imap::response::Body; + +pub struct MailView<'a> { + pub in_idx: &'a MailIndex<'a>, + pub query_result: &'a QueryResult, + pub content: FetchedMail<'a>, +} + +impl<'a> MailView<'a> { + pub fn new(query_result: &'a QueryResult, in_idx: &'a MailIndex<'a>) -> Result> { + Ok(Self { + in_idx, + query_result, + content: match query_result { + QueryResult::FullResult { content, .. } => { + let (_, parsed) = + eml_codec::parse_message(&content).or(Err(anyhow!("Invalid mail body")))?; + FetchedMail::full_from_message(parsed) + } + QueryResult::PartialResult { metadata, .. } => { + let (_, parsed) = eml_codec::parse_message(&metadata.headers) + .or(Err(anyhow!("unable to parse email headers")))?; + FetchedMail::partial_from_message(parsed) + } + QueryResult::IndexResult { .. } => FetchedMail::IndexOnly, + }, + }) + } + + pub fn imf(&self) -> Option { + self.content.as_imf().map(ImfView) + } + + pub fn selected_mime(&'a self) -> Option> { + self.content.as_anypart().ok().map(mime_view::SelectedMime) + } + + pub fn filter(&self, ap: &AttributesProxy) -> Result<(Body<'static>, SeenFlag)> { + let mut seen = SeenFlag::DoNothing; + let res_attrs = ap + .attrs + .iter() + .map(|attr| match attr { + MessageDataItemName::Uid => Ok(self.uid()), + MessageDataItemName::Flags => Ok(self.flags()), + MessageDataItemName::Rfc822Size => self.rfc_822_size(), + MessageDataItemName::Rfc822Header => self.rfc_822_header(), + MessageDataItemName::Rfc822Text => self.rfc_822_text(), + MessageDataItemName::Rfc822 => { + if self.is_not_yet_seen() { + seen = SeenFlag::MustAdd; + } + self.rfc822() + } + MessageDataItemName::Envelope => Ok(self.envelope()), + MessageDataItemName::Body => self.body(), + MessageDataItemName::BodyStructure => self.body_structure(), + MessageDataItemName::BodyExt { + section, + partial, + peek, + } => { + let (body, has_seen) = self.body_ext(section, partial, peek)?; + seen = has_seen; + Ok(body) + } + MessageDataItemName::InternalDate => self.internal_date(), + MessageDataItemName::ModSeq => Ok(self.modseq()), + }) + .collect::, _>>()?; + + Ok(( + Body::Data(Data::Fetch { + seq: self.in_idx.i, + items: res_attrs.try_into()?, + }), + seen, + )) + } + + pub fn stored_naive_date(&self) -> Result { + let mail_meta = self.query_result.metadata().expect("metadata were fetched"); + let mail_ts: i64 = mail_meta.internaldate.try_into()?; + let msg_date: ChronoDateTime = ChronoDateTime::from_timestamp(mail_ts, 0) + .ok_or(anyhow!("unable to parse timestamp"))? + .with_timezone(&Local); + + Ok(msg_date.date_naive()) + } + + pub fn is_header_contains_pattern(&self, hdr: &[u8], pattern: &[u8]) -> bool { + let mime = match self.selected_mime() { + None => return false, + Some(x) => x, + }; + + let val = match mime.header_value(hdr) { + None => return false, + Some(x) => x, + }; + + val.windows(pattern.len()).any(|win| win == pattern) + } + + // Private function, mainly for filter! + fn uid(&self) -> MessageDataItem<'static> { + MessageDataItem::Uid(self.in_idx.uid.clone()) + } + + fn flags(&self) -> MessageDataItem<'static> { + MessageDataItem::Flags( + self.in_idx + .flags + .iter() + .filter_map(|f| flags::from_str(f)) + .collect(), + ) + } + + fn rfc_822_size(&self) -> Result> { + let sz = self + .query_result + .metadata() + .ok_or(anyhow!("mail metadata are required"))? + .rfc822_size; + Ok(MessageDataItem::Rfc822Size(sz as u32)) + } + + fn rfc_822_header(&self) -> Result> { + let hdrs: NString = self + .query_result + .metadata() + .ok_or(anyhow!("mail metadata are required"))? + .headers + .to_vec() + .try_into()?; + Ok(MessageDataItem::Rfc822Header(hdrs)) + } + + fn rfc_822_text(&self) -> Result> { + let txt: NString = self.content.as_msg()?.raw_body.to_vec().try_into()?; + Ok(MessageDataItem::Rfc822Text(txt)) + } + + fn rfc822(&self) -> Result> { + let full: NString = self.content.as_msg()?.raw_part.to_vec().try_into()?; + Ok(MessageDataItem::Rfc822(full)) + } + + fn envelope(&self) -> MessageDataItem<'static> { + MessageDataItem::Envelope( + self.imf() + .expect("an imf object is derivable from fetchedmail") + .message_envelope(), + ) + } + + fn body(&self) -> Result> { + Ok(MessageDataItem::Body(mime_view::bodystructure( + self.content.as_msg()?.child.as_ref(), + false, + )?)) + } + + fn body_structure(&self) -> Result> { + Ok(MessageDataItem::BodyStructure(mime_view::bodystructure( + self.content.as_msg()?.child.as_ref(), + true, + )?)) + } + + fn is_not_yet_seen(&self) -> bool { + let seen_flag = Flag::Seen.to_string(); + !self.in_idx.flags.iter().any(|x| *x == seen_flag) + } + + /// maps to BODY[
]<> and BODY.PEEK[
]<> + /// peek does not implicitly set the \Seen flag + /// eg. BODY[HEADER.FIELDS (DATE FROM)] + /// eg. BODY[]<0.2048> + fn body_ext( + &self, + section: &Option>, + partial: &Option<(u32, NonZeroU32)>, + peek: &bool, + ) -> Result<(MessageDataItem<'static>, SeenFlag)> { + // Manage Seen flag + let mut seen = SeenFlag::DoNothing; + if !peek && self.is_not_yet_seen() { + // Add \Seen flag + //self.mailbox.add_flags(uuid, &[seen_flag]).await?; + seen = SeenFlag::MustAdd; + } + + // Process message + let (text, origin) = + match mime_view::body_ext(self.content.as_anypart()?, section, partial)? { + mime_view::BodySection::Full(body) => (body, None), + mime_view::BodySection::Slice { body, origin_octet } => (body, Some(origin_octet)), + }; + + let data: NString = text.to_vec().try_into()?; + + return Ok(( + MessageDataItem::BodyExt { + section: section.as_ref().map(|fs| fs.clone()), + origin, + data, + }, + seen, + )); + } + + fn internal_date(&self) -> Result> { + let dt = Utc + .fix() + .timestamp_opt( + i64::try_from( + self.query_result + .metadata() + .ok_or(anyhow!("mail metadata were not fetched"))? + .internaldate + / 1000, + )?, + 0, + ) + .earliest() + .ok_or(anyhow!("Unable to parse internal date"))?; + Ok(MessageDataItem::InternalDate(DateTime::unvalidated(dt))) + } + + fn modseq(&self) -> MessageDataItem<'static> { + MessageDataItem::ModSeq(self.in_idx.modseq) + } +} + +pub enum SeenFlag { + DoNothing, + MustAdd, +} + +// ------------------- + +pub enum FetchedMail<'a> { + IndexOnly, + Partial(AnyPart<'a>), + Full(AnyPart<'a>), +} +impl<'a> FetchedMail<'a> { + pub fn full_from_message(msg: Message<'a>) -> Self { + Self::Full(AnyPart::Msg(msg)) + } + + pub fn partial_from_message(msg: Message<'a>) -> Self { + Self::Partial(AnyPart::Msg(msg)) + } + + pub fn as_anypart(&self) -> Result<&AnyPart<'a>> { + match self { + FetchedMail::Full(x) => Ok(&x), + FetchedMail::Partial(x) => Ok(&x), + _ => bail!("The full message must be fetched, not only its headers"), + } + } + + pub fn as_msg(&self) -> Result<&Message<'a>> { + match self { + FetchedMail::Full(AnyPart::Msg(x)) => Ok(&x), + FetchedMail::Partial(AnyPart::Msg(x)) => Ok(&x), + _ => bail!("The full message must be fetched, not only its headers AND it must be an AnyPart::Msg."), + } + } + + pub fn as_imf(&self) -> Option<&imf::Imf<'a>> { + match self { + FetchedMail::Full(AnyPart::Msg(x)) => Some(&x.imf), + FetchedMail::Partial(AnyPart::Msg(x)) => Some(&x.imf), + _ => None, + } + } +} diff --git a/aero-proto/imap/mailbox_view.rs b/aero-proto/imap/mailbox_view.rs new file mode 100644 index 0000000..1c53b93 --- /dev/null +++ b/aero-proto/imap/mailbox_view.rs @@ -0,0 +1,772 @@ +use std::collections::HashSet; +use std::num::{NonZeroU32, NonZeroU64}; +use std::sync::Arc; + +use anyhow::{anyhow, Error, Result}; + +use futures::stream::{StreamExt, TryStreamExt}; + +use imap_codec::imap_types::core::{Charset, Vec1}; +use imap_codec::imap_types::fetch::MessageDataItem; +use imap_codec::imap_types::flag::{Flag, FlagFetch, FlagPerm, StoreResponse, StoreType}; +use imap_codec::imap_types::response::{Code, CodeOther, Data, Status}; +use imap_codec::imap_types::search::SearchKey; +use imap_codec::imap_types::sequence::SequenceSet; + +use crate::mail::mailbox::Mailbox; +use crate::mail::query::QueryScope; +use crate::mail::snapshot::FrozenMailbox; +use crate::mail::uidindex::{ImapUid, ImapUidvalidity, ModSeq}; +use crate::mail::unique_ident::UniqueIdent; + +use crate::imap::attributes::AttributesProxy; +use crate::imap::flags; +use crate::imap::index::Index; +use crate::imap::mail_view::{MailView, SeenFlag}; +use crate::imap::response::Body; +use crate::imap::search; + +const DEFAULT_FLAGS: [Flag; 5] = [ + Flag::Seen, + Flag::Answered, + Flag::Flagged, + Flag::Deleted, + Flag::Draft, +]; + +pub struct UpdateParameters { + pub silence: HashSet, + pub with_modseq: bool, + pub with_uid: bool, +} +impl Default for UpdateParameters { + fn default() -> Self { + Self { + silence: HashSet::new(), + with_modseq: false, + with_uid: false, + } + } +} + +/// A MailboxView is responsible for giving the client the information +/// it needs about a mailbox, such as an initial summary of the mailbox's +/// content and continuous updates indicating when the content +/// of the mailbox has been changed. +/// To do this, it keeps a variable `known_state` that corresponds to +/// what the client knows, and produces IMAP messages to be sent to the +/// client that go along updates to `known_state`. +pub struct MailboxView { + pub internal: FrozenMailbox, + pub is_condstore: bool, +} + +impl MailboxView { + /// Creates a new IMAP view into a mailbox. + pub async fn new(mailbox: Arc, is_cond: bool) -> Self { + Self { + internal: mailbox.frozen().await, + is_condstore: is_cond, + } + } + + /// Create an updated view, useful to make a diff + /// between what the client knows and new stuff + /// Produces a set of IMAP responses describing the change between + /// what the client knows and what is actually in the mailbox. + /// This does NOT trigger a sync, it bases itself on what is currently + /// loaded in RAM by Bayou. + pub async fn update(&mut self, params: UpdateParameters) -> Result>> { + let old_snapshot = self.internal.update().await; + let new_snapshot = &self.internal.snapshot; + + let mut data = Vec::::new(); + + // Calculate diff between two mailbox states + // See example in IMAP RFC in section on NOOP command: + // we want to produce something like this: + // C: a047 NOOP + // S: * 22 EXPUNGE + // S: * 23 EXISTS + // S: * 14 FETCH (UID 1305 FLAGS (\Seen \Deleted)) + // S: a047 OK Noop completed + // In other words: + // - notify client of expunged mails + // - if new mails arrived, notify client of number of existing mails + // - if flags changed for existing mails, tell client + // (for this last step: if uidvalidity changed, do nothing, + // just notify of new uidvalidity and they will resync) + + // - notify client of expunged mails + let mut n_expunge = 0; + for (i, (_uid, uuid)) in old_snapshot.idx_by_uid.iter().enumerate() { + if !new_snapshot.table.contains_key(uuid) { + data.push(Body::Data(Data::Expunge( + NonZeroU32::try_from((i + 1 - n_expunge) as u32).unwrap(), + ))); + n_expunge += 1; + } + } + + // - if new mails arrived, notify client of number of existing mails + if new_snapshot.table.len() != old_snapshot.table.len() - n_expunge + || new_snapshot.uidvalidity != old_snapshot.uidvalidity + { + data.push(self.exists_status()?); + } + + if new_snapshot.uidvalidity != old_snapshot.uidvalidity { + // TODO: do we want to push less/more info than this? + data.push(self.uidvalidity_status()?); + data.push(self.uidnext_status()?); + } else { + // - if flags changed for existing mails, tell client + for (i, (_uid, uuid)) in new_snapshot.idx_by_uid.iter().enumerate() { + if params.silence.contains(uuid) { + continue; + } + + let old_mail = old_snapshot.table.get(uuid); + let new_mail = new_snapshot.table.get(uuid); + if old_mail.is_some() && old_mail != new_mail { + if let Some((uid, modseq, flags)) = new_mail { + let mut items = vec![MessageDataItem::Flags( + flags.iter().filter_map(|f| flags::from_str(f)).collect(), + )]; + + if params.with_uid { + items.push(MessageDataItem::Uid(*uid)); + } + + if params.with_modseq { + items.push(MessageDataItem::ModSeq(*modseq)); + } + + data.push(Body::Data(Data::Fetch { + seq: NonZeroU32::try_from((i + 1) as u32).unwrap(), + items: items.try_into()?, + })); + } + } + } + } + Ok(data) + } + + /// Generates the necessary IMAP messages so that the client + /// has a satisfactory summary of the current mailbox's state. + /// These are the messages that are sent in response to a SELECT command. + pub fn summary(&self) -> Result>> { + let mut data = Vec::::new(); + data.push(self.exists_status()?); + data.push(self.recent_status()?); + data.extend(self.flags_status()?.into_iter()); + data.push(self.uidvalidity_status()?); + data.push(self.uidnext_status()?); + if self.is_condstore { + data.push(self.highestmodseq_status()?); + } + /*self.unseen_first_status()? + .map(|unseen_status| data.push(unseen_status));*/ + + Ok(data) + } + + pub async fn store<'a>( + &mut self, + sequence_set: &SequenceSet, + kind: &StoreType, + response: &StoreResponse, + flags: &[Flag<'a>], + unchanged_since: Option, + is_uid_store: &bool, + ) -> Result<(Vec>, Vec)> { + self.internal.sync().await?; + + let flags = flags.iter().map(|x| x.to_string()).collect::>(); + + let idx = self.index()?; + let (editable, in_conflict) = + idx.fetch_unchanged_since(sequence_set, unchanged_since, *is_uid_store)?; + + for mi in editable.iter() { + match kind { + StoreType::Add => { + self.internal.mailbox.add_flags(mi.uuid, &flags[..]).await?; + } + StoreType::Remove => { + self.internal.mailbox.del_flags(mi.uuid, &flags[..]).await?; + } + StoreType::Replace => { + self.internal.mailbox.set_flags(mi.uuid, &flags[..]).await?; + } + } + } + + let silence = match response { + StoreResponse::Answer => HashSet::new(), + StoreResponse::Silent => editable.iter().map(|midx| midx.uuid).collect(), + }; + + let conflict_id_or_uid = match is_uid_store { + true => in_conflict.into_iter().map(|midx| midx.uid).collect(), + _ => in_conflict.into_iter().map(|midx| midx.i).collect(), + }; + + let summary = self + .update(UpdateParameters { + with_uid: *is_uid_store, + with_modseq: unchanged_since.is_some(), + silence, + }) + .await?; + + Ok((summary, conflict_id_or_uid)) + } + + pub async fn idle_sync(&mut self) -> Result>> { + self.internal + .mailbox + .notify() + .await + .upgrade() + .ok_or(anyhow!("test"))? + .notified() + .await; + self.internal.mailbox.opportunistic_sync().await?; + self.update(UpdateParameters::default()).await + } + + pub async fn expunge( + &mut self, + maybe_seq_set: &Option, + ) -> Result>> { + // Get a recent view to apply our change + self.internal.sync().await?; + let state = self.internal.peek().await; + let idx = Index::new(&state)?; + + // Build a default sequence set for the default case + use imap_codec::imap_types::sequence::{SeqOrUid, Sequence}; + let seq = match maybe_seq_set { + Some(s) => s.clone(), + None => SequenceSet( + vec![Sequence::Range( + SeqOrUid::Value(NonZeroU32::MIN), + SeqOrUid::Asterisk, + )] + .try_into() + .unwrap(), + ), + }; + + let deleted_flag = Flag::Deleted.to_string(); + let msgs = idx + .fetch_on_uid(&seq) + .into_iter() + .filter(|midx| midx.flags.iter().any(|x| *x == deleted_flag)) + .map(|midx| midx.uuid); + + for msg in msgs { + self.internal.mailbox.delete(msg).await?; + } + + self.update(UpdateParameters::default()).await + } + + pub async fn copy( + &self, + sequence_set: &SequenceSet, + to: Arc, + is_uid_copy: &bool, + ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>)> { + let idx = self.index()?; + let mails = idx.fetch(sequence_set, *is_uid_copy)?; + + let mut new_uuids = vec![]; + for mi in mails.iter() { + new_uuids.push(to.copy_from(&self.internal.mailbox, mi.uuid).await?); + } + + let mut ret = vec![]; + let to_state = to.current_uid_index().await; + for (mi, new_uuid) in mails.iter().zip(new_uuids.iter()) { + let dest_uid = to_state + .table + .get(new_uuid) + .ok_or(anyhow!("copied mail not in destination mailbox"))? + .0; + ret.push((mi.uid, dest_uid)); + } + + Ok((to_state.uidvalidity, ret)) + } + + pub async fn r#move( + &mut self, + sequence_set: &SequenceSet, + to: Arc, + is_uid_copy: &bool, + ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>, Vec>)> { + let idx = self.index()?; + let mails = idx.fetch(sequence_set, *is_uid_copy)?; + + for mi in mails.iter() { + to.move_from(&self.internal.mailbox, mi.uuid).await?; + } + + let mut ret = vec![]; + let to_state = to.current_uid_index().await; + for mi in mails.iter() { + let dest_uid = to_state + .table + .get(&mi.uuid) + .ok_or(anyhow!("moved mail not in destination mailbox"))? + .0; + ret.push((mi.uid, dest_uid)); + } + + let update = self + .update(UpdateParameters { + with_uid: *is_uid_copy, + ..UpdateParameters::default() + }) + .await?; + + Ok((to_state.uidvalidity, ret, update)) + } + + /// Looks up state changes in the mailbox and produces a set of IMAP + /// responses describing the new state. + pub async fn fetch<'b>( + &self, + sequence_set: &SequenceSet, + ap: &AttributesProxy, + changed_since: Option, + is_uid_fetch: &bool, + ) -> Result>> { + // [1/6] Pre-compute data + // a. what are the uuids of the emails we want? + // b. do we need to fetch the full body? + //let ap = AttributesProxy::new(attributes, *is_uid_fetch); + let query_scope = match ap.need_body() { + true => QueryScope::Full, + _ => QueryScope::Partial, + }; + tracing::debug!("Query scope {:?}", query_scope); + let idx = self.index()?; + let mail_idx_list = idx.fetch_changed_since(sequence_set, changed_since, *is_uid_fetch)?; + + // [2/6] Fetch the emails + let uuids = mail_idx_list + .iter() + .map(|midx| midx.uuid) + .collect::>(); + + let query = self.internal.query(&uuids, query_scope); + //let query_result = self.internal.query(&uuids, query_scope).fetch().await?; + + let query_stream = query + .fetch() + .zip(futures::stream::iter(mail_idx_list)) + // [3/6] Derive an IMAP-specific view from the results, apply the filters + .map(|(maybe_qr, midx)| match maybe_qr { + Ok(qr) => Ok((MailView::new(&qr, midx)?.filter(&ap)?, midx)), + Err(e) => Err(e), + }) + // [4/6] Apply the IMAP transformation + .then(|maybe_ret| async move { + let ((body, seen), midx) = maybe_ret?; + + // [5/6] Register the \Seen flags + if matches!(seen, SeenFlag::MustAdd) { + let seen_flag = Flag::Seen.to_string(); + self.internal + .mailbox + .add_flags(midx.uuid, &[seen_flag]) + .await?; + } + + Ok::<_, anyhow::Error>(body) + }); + + // [6/6] Build the final result that will be sent to the client. + query_stream.try_collect().await + } + + /// A naive search implementation... + pub async fn search<'a>( + &self, + _charset: &Option>, + search_key: &SearchKey<'a>, + uid: bool, + ) -> Result<(Vec>, bool)> { + // 1. Compute the subset of sequence identifiers we need to fetch + // based on the search query + let crit = search::Criteria(search_key); + let (seq_set, seq_type) = crit.to_sequence_set(); + + // 2. Get the selection + let idx = self.index()?; + let selection = idx.fetch(&seq_set, seq_type.is_uid())?; + + // 3. Filter the selection based on the ID / UID / Flags + let (kept_idx, to_fetch) = crit.filter_on_idx(&selection); + + // 4.a Fetch additional info about the emails + let query_scope = crit.query_scope(); + let uuids = to_fetch.iter().map(|midx| midx.uuid).collect::>(); + let query = self.internal.query(&uuids, query_scope); + + // 4.b We don't want to keep all data in memory, so we do the computing in a stream + let query_stream = query + .fetch() + .zip(futures::stream::iter(&to_fetch)) + // 5.a Build a mailview with the body, might fail with an error + // 5.b If needed, filter the selection based on the body, but keep the errors + // 6. Drop the query+mailbox, keep only the mail index + // Here we release a lot of memory, this is the most important part ^^ + .filter_map(|(maybe_qr, midx)| { + let r = match maybe_qr { + Ok(qr) => match MailView::new(&qr, midx).map(|mv| crit.is_keep_on_query(&mv)) { + Ok(true) => Some(Ok(*midx)), + Ok(_) => None, + Err(e) => Some(Err(e)), + }, + Err(e) => Some(Err(e)), + }; + futures::future::ready(r) + }); + + // 7. Chain both streams (part resolved from index, part resolved from metadata+body) + let main_stream = futures::stream::iter(kept_idx) + .map(Ok) + .chain(query_stream) + .map_ok(|idx| match uid { + true => (idx.uid, idx.modseq), + _ => (idx.i, idx.modseq), + }); + + // 8. Do the actual computation + let internal_result: Vec<_> = main_stream.try_collect().await?; + let (selection, modseqs): (Vec<_>, Vec<_>) = internal_result.into_iter().unzip(); + + // 9. Aggregate the maximum modseq value + let maybe_modseq = match crit.is_modseq() { + true => modseqs.into_iter().max(), + _ => None, + }; + + // 10. Return the final result + Ok(( + vec![Body::Data(Data::Search(selection, maybe_modseq))], + maybe_modseq.is_some(), + )) + } + + // ---- + /// @FIXME index should be stored for longer than a single request + /// Instead they should be tied to the FrozenMailbox refresh + /// It's not trivial to refactor the code to do that, so we are doing + /// some useless computation for now... + fn index<'a>(&'a self) -> Result> { + Index::new(&self.internal.snapshot) + } + + /// Produce an OK [UIDVALIDITY _] message corresponding to `known_state` + fn uidvalidity_status(&self) -> Result> { + let uid_validity = Status::ok( + None, + Some(Code::UidValidity(self.uidvalidity())), + "UIDs valid", + ) + .map_err(Error::msg)?; + Ok(Body::Status(uid_validity)) + } + + pub(crate) fn uidvalidity(&self) -> ImapUidvalidity { + self.internal.snapshot.uidvalidity + } + + /// Produce an OK [UIDNEXT _] message corresponding to `known_state` + fn uidnext_status(&self) -> Result> { + let next_uid = Status::ok( + None, + Some(Code::UidNext(self.uidnext())), + "Predict next UID", + ) + .map_err(Error::msg)?; + Ok(Body::Status(next_uid)) + } + + pub(crate) fn uidnext(&self) -> ImapUid { + self.internal.snapshot.uidnext + } + + pub(crate) fn highestmodseq_status(&self) -> Result> { + Ok(Body::Status(Status::ok( + None, + Some(Code::Other(CodeOther::unvalidated( + format!("HIGHESTMODSEQ {}", self.highestmodseq()).into_bytes(), + ))), + "Highest", + )?)) + } + + pub(crate) fn highestmodseq(&self) -> ModSeq { + self.internal.snapshot.highestmodseq + } + + /// Produce an EXISTS message corresponding to the number of mails + /// in `known_state` + fn exists_status(&self) -> Result> { + Ok(Body::Data(Data::Exists(self.exists()?))) + } + + pub(crate) fn exists(&self) -> Result { + Ok(u32::try_from(self.internal.snapshot.idx_by_uid.len())?) + } + + /// Produce a RECENT message corresponding to the number of + /// recent mails in `known_state` + fn recent_status(&self) -> Result> { + Ok(Body::Data(Data::Recent(self.recent()?))) + } + + #[allow(dead_code)] + fn unseen_first_status(&self) -> Result>> { + Ok(self + .unseen_first()? + .map(|unseen_id| { + Status::ok(None, Some(Code::Unseen(unseen_id)), "First unseen.").map(Body::Status) + }) + .transpose()?) + } + + #[allow(dead_code)] + fn unseen_first(&self) -> Result> { + Ok(self + .internal + .snapshot + .table + .values() + .enumerate() + .find(|(_i, (_imap_uid, _modseq, flags))| !flags.contains(&"\\Seen".to_string())) + .map(|(i, _)| NonZeroU32::try_from(i as u32 + 1)) + .transpose()?) + } + + pub(crate) fn recent(&self) -> Result { + let recent = self + .internal + .snapshot + .idx_by_flag + .get(&"\\Recent".to_string()) + .map(|os| os.len()) + .unwrap_or(0); + Ok(u32::try_from(recent)?) + } + + /// Produce a FLAGS and a PERMANENTFLAGS message that indicates + /// the flags that are in `known_state` + default flags + fn flags_status(&self) -> Result>> { + let mut body = vec![]; + + // 1. Collecting all the possible flags in the mailbox + // 1.a Fetch them from our index + let mut known_flags: Vec = self + .internal + .snapshot + .idx_by_flag + .flags() + .filter_map(|f| match flags::from_str(f) { + Some(FlagFetch::Flag(fl)) => Some(fl), + _ => None, + }) + .collect(); + // 1.b Merge it with our default flags list + for f in DEFAULT_FLAGS.iter() { + if !known_flags.contains(f) { + known_flags.push(f.clone()); + } + } + // 1.c Create the IMAP message + body.push(Body::Data(Data::Flags(known_flags.clone()))); + + // 2. Returning flags that are persisted + // 2.a Always advertise our default flags + let mut permanent = DEFAULT_FLAGS + .iter() + .map(|f| FlagPerm::Flag(f.clone())) + .collect::>(); + // 2.b Say that we support any keyword flag + permanent.push(FlagPerm::Asterisk); + // 2.c Create the IMAP message + let permanent_flags = Status::ok( + None, + Some(Code::PermanentFlags(permanent)), + "Flags permitted", + ) + .map_err(Error::msg)?; + body.push(Body::Status(permanent_flags)); + + // Done! + Ok(body) + } + + pub(crate) fn unseen_count(&self) -> usize { + let total = self.internal.snapshot.table.len(); + let seen = self + .internal + .snapshot + .idx_by_flag + .get(&Flag::Seen.to_string()) + .map(|x| x.len()) + .unwrap_or(0); + total - seen + } +} + +#[cfg(test)] +mod tests { + use super::*; + use imap_codec::encode::Encoder; + use imap_codec::imap_types::core::Vec1; + use imap_codec::imap_types::fetch::Section; + use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName}; + use imap_codec::imap_types::response::Response; + use imap_codec::ResponseCodec; + use std::fs; + + use crate::cryptoblob; + use crate::imap::index::MailIndex; + use crate::imap::mail_view::MailView; + use crate::imap::mime_view; + use crate::mail::mailbox::MailMeta; + use crate::mail::query::QueryResult; + use crate::mail::unique_ident; + + #[test] + fn mailview_body_ext() -> Result<()> { + let ap = AttributesProxy::new( + &MacroOrMessageDataItemNames::MessageDataItemNames(vec![ + MessageDataItemName::BodyExt { + section: Some(Section::Header(None)), + partial: None, + peek: false, + }, + ]), + &[], + false, + ); + + let key = cryptoblob::gen_key(); + let meta = MailMeta { + internaldate: 0u64, + headers: vec![], + message_key: key, + rfc822_size: 8usize, + }; + + let index_entry = (NonZeroU32::MIN, NonZeroU64::MIN, vec![]); + let mail_in_idx = MailIndex { + i: NonZeroU32::MIN, + uid: index_entry.0, + modseq: index_entry.1, + uuid: unique_ident::gen_ident(), + flags: &index_entry.2, + }; + let rfc822 = b"Subject: hello\r\nFrom: a@a.a\r\nTo: b@b.b\r\nDate: Thu, 12 Oct 2023 08:45:28 +0000\r\n\r\nhello world"; + let qr = QueryResult::FullResult { + uuid: mail_in_idx.uuid.clone(), + metadata: meta, + content: rfc822.to_vec(), + }; + + let mv = MailView::new(&qr, &mail_in_idx)?; + let (res_body, _seen) = mv.filter(&ap)?; + + let fattr = match res_body { + Body::Data(Data::Fetch { + seq: _seq, + items: attr, + }) => Ok(attr), + _ => Err(anyhow!("Not a fetch body")), + }?; + + assert_eq!(fattr.as_ref().len(), 1); + + let (sec, _orig, _data) = match &fattr.as_ref()[0] { + MessageDataItem::BodyExt { + section, + origin, + data, + } => Ok((section, origin, data)), + _ => Err(anyhow!("not a body ext message attribute")), + }?; + + assert_eq!(sec.as_ref().unwrap(), &Section::Header(None)); + + Ok(()) + } + + /// Future automated test. We use lossy utf8 conversion + lowercase everything, + /// so this test might allow invalid results. But at least it allows us to quickly test a + /// large variety of emails. + /// Keep in mind that special cases must still be tested manually! + #[test] + fn fetch_body() -> Result<()> { + let prefixes = [ + /* *** MY OWN DATASET *** */ + "tests/emails/dxflrs/0001_simple", + "tests/emails/dxflrs/0002_mime", + "tests/emails/dxflrs/0003_mime-in-mime", + "tests/emails/dxflrs/0004_msg-in-msg", + // eml_codec do not support continuation for the moment + //"tests/emails/dxflrs/0005_mail-parser-readme", + "tests/emails/dxflrs/0006_single-mime", + "tests/emails/dxflrs/0007_raw_msg_in_rfc822", + /* *** (STRANGE) RFC *** */ + //"tests/emails/rfc/000", // must return text/enriched, we return text/plain + //"tests/emails/rfc/001", // does not recognize the multipart/external-body, breaks the + // whole parsing + //"tests/emails/rfc/002", // wrong date in email + + //"tests/emails/rfc/003", // dovecot fixes \r\r: the bytes number is wrong + text/enriched + + /* *** THIRD PARTY *** */ + //"tests/emails/thirdparty/000", // dovecot fixes \r\r: the bytes number is wrong + //"tests/emails/thirdparty/001", // same + "tests/emails/thirdparty/002", // same + + /* *** LEGACY *** */ + //"tests/emails/legacy/000", // same issue with \r\r + ]; + + for pref in prefixes.iter() { + println!("{}", pref); + let txt = fs::read(format!("{}.eml", pref))?; + let oracle = fs::read(format!("{}.dovecot.body", pref))?; + let message = eml_codec::parse_message(&txt).unwrap().1; + + let test_repr = Response::Data(Data::Fetch { + seq: NonZeroU32::new(1).unwrap(), + items: Vec1::from(MessageDataItem::Body(mime_view::bodystructure( + &message.child, + false, + )?)), + }); + let test_bytes = ResponseCodec::new().encode(&test_repr).dump(); + let test_str = String::from_utf8_lossy(&test_bytes).to_lowercase(); + + let oracle_str = + format!("* 1 FETCH {}\r\n", String::from_utf8_lossy(&oracle)).to_lowercase(); + + println!("aerogramme: {}\n\ndovecot: {}\n\n", test_str, oracle_str); + //println!("\n\n {} \n\n", String::from_utf8_lossy(&resp)); + assert_eq!(test_str, oracle_str); + } + + Ok(()) + } +} diff --git a/aero-proto/imap/mime_view.rs b/aero-proto/imap/mime_view.rs new file mode 100644 index 0000000..8bbbd2d --- /dev/null +++ b/aero-proto/imap/mime_view.rs @@ -0,0 +1,580 @@ +use std::borrow::Cow; +use std::collections::HashSet; +use std::num::NonZeroU32; + +use anyhow::{anyhow, bail, Result}; + +use imap_codec::imap_types::body::{ + BasicFields, Body as FetchBody, BodyStructure, MultiPartExtensionData, SinglePartExtensionData, + SpecificFields, +}; +use imap_codec::imap_types::core::{AString, IString, NString, Vec1}; +use imap_codec::imap_types::fetch::{Part as FetchPart, Section as FetchSection}; + +use eml_codec::{ + header, mime, mime::r#type::Deductible, part::composite, part::discrete, part::AnyPart, +}; + +use crate::imap::imf_view::ImfView; + +pub enum BodySection<'a> { + Full(Cow<'a, [u8]>), + Slice { + body: Cow<'a, [u8]>, + origin_octet: u32, + }, +} + +/// Logic for BODY[
]<> +/// Works in 3 times: +/// 1. Find the section (RootMime::subset) +/// 2. Apply the extraction logic (SelectedMime::extract), like TEXT, HEADERS, etc. +/// 3. Keep only the given subset provided by partial +/// +/// Example of message sections: +/// +/// ``` +/// HEADER ([RFC-2822] header of the message) +/// TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED +/// 1 TEXT/PLAIN +/// 2 APPLICATION/OCTET-STREAM +/// 3 MESSAGE/RFC822 +/// 3.HEADER ([RFC-2822] header of the message) +/// 3.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED +/// 3.1 TEXT/PLAIN +/// 3.2 APPLICATION/OCTET-STREAM +/// 4 MULTIPART/MIXED +/// 4.1 IMAGE/GIF +/// 4.1.MIME ([MIME-IMB] header for the IMAGE/GIF) +/// 4.2 MESSAGE/RFC822 +/// 4.2.HEADER ([RFC-2822] header of the message) +/// 4.2.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED +/// 4.2.1 TEXT/PLAIN +/// 4.2.2 MULTIPART/ALTERNATIVE +/// 4.2.2.1 TEXT/PLAIN +/// 4.2.2.2 TEXT/RICHTEXT +/// ``` +pub fn body_ext<'a>( + part: &'a AnyPart<'a>, + section: &'a Option>, + partial: &'a Option<(u32, NonZeroU32)>, +) -> Result> { + let root_mime = NodeMime(part); + let (extractor, path) = SubsettedSection::from(section); + let selected_mime = root_mime.subset(path)?; + let extracted_full = selected_mime.extract(&extractor)?; + Ok(extracted_full.to_body_section(partial)) +} + +/// Logic for BODY and BODYSTRUCTURE +/// +/// ```raw +/// b fetch 29878:29879 (BODY) +/// * 29878 FETCH (BODY (("text" "plain" ("charset" "utf-8") NIL NIL "quoted-printable" 3264 82)("text" "html" ("charset" "utf-8") NIL NIL "quoted-printable" 31834 643) "alternative")) +/// * 29879 FETCH (BODY ("text" "html" ("charset" "us-ascii") NIL NIL "7bit" 4107 131)) +/// ^^^^^^^^^^^^^^^^^^^^^^ ^^^ ^^^ ^^^^^^ ^^^^ ^^^ +/// | | | | | | number of lines +/// | | | | | size +/// | | | | content transfer encoding +/// | | | description +/// | | id +/// | parameter list +/// b OK Fetch completed (0.001 + 0.000 secs). +/// ``` +pub fn bodystructure(part: &AnyPart, is_ext: bool) -> Result> { + NodeMime(part).structure(is_ext) +} + +/// NodeMime +/// +/// Used for recursive logic on MIME. +/// See SelectedMime for inspection. +struct NodeMime<'a>(&'a AnyPart<'a>); +impl<'a> NodeMime<'a> { + /// A MIME object is a tree of elements. + /// The path indicates which element must be picked. + /// This function returns the picked element as the new view + fn subset(self, path: Option<&'a FetchPart>) -> Result> { + match path { + None => Ok(SelectedMime(self.0)), + Some(v) => self.rec_subset(v.0.as_ref()), + } + } + + fn rec_subset(self, path: &'a [NonZeroU32]) -> Result { + if path.is_empty() { + Ok(SelectedMime(self.0)) + } else { + match self.0 { + AnyPart::Mult(x) => { + let next = Self(x.children + .get(path[0].get() as usize - 1) + .ok_or(anyhow!("Unable to resolve subpath {:?}, current multipart has only {} elements", path, x.children.len()))?); + next.rec_subset(&path[1..]) + }, + AnyPart::Msg(x) => { + let next = Self(x.child.as_ref()); + next.rec_subset(path) + }, + _ => bail!("You tried to access a subpart on an atomic part (text or binary). Unresolved subpath {:?}", path), + } + } + } + + fn structure(&self, is_ext: bool) -> Result> { + match self.0 { + AnyPart::Txt(x) => NodeTxt(self, x).structure(is_ext), + AnyPart::Bin(x) => NodeBin(self, x).structure(is_ext), + AnyPart::Mult(x) => NodeMult(self, x).structure(is_ext), + AnyPart::Msg(x) => NodeMsg(self, x).structure(is_ext), + } + } +} + +//---------------------------------------------------------- + +/// A FetchSection must be handled in 2 times: +/// - First we must extract the MIME part +/// - Then we must process it as desired +/// The given struct mixes both work, so +/// we separate this work here. +enum SubsettedSection<'a> { + Part, + Header, + HeaderFields(&'a Vec1>), + HeaderFieldsNot(&'a Vec1>), + Text, + Mime, +} +impl<'a> SubsettedSection<'a> { + fn from(section: &'a Option) -> (Self, Option<&'a FetchPart>) { + match section { + Some(FetchSection::Text(maybe_part)) => (Self::Text, maybe_part.as_ref()), + Some(FetchSection::Header(maybe_part)) => (Self::Header, maybe_part.as_ref()), + Some(FetchSection::HeaderFields(maybe_part, fields)) => { + (Self::HeaderFields(fields), maybe_part.as_ref()) + } + Some(FetchSection::HeaderFieldsNot(maybe_part, fields)) => { + (Self::HeaderFieldsNot(fields), maybe_part.as_ref()) + } + Some(FetchSection::Mime(part)) => (Self::Mime, Some(part)), + Some(FetchSection::Part(part)) => (Self::Part, Some(part)), + None => (Self::Part, None), + } + } +} + +/// Used for current MIME inspection +/// +/// See NodeMime for recursive logic +pub struct SelectedMime<'a>(pub &'a AnyPart<'a>); +impl<'a> SelectedMime<'a> { + pub fn header_value(&'a self, to_match_ext: &[u8]) -> Option<&'a [u8]> { + let to_match = to_match_ext.to_ascii_lowercase(); + + self.eml_mime() + .kv + .iter() + .filter_map(|field| match field { + header::Field::Good(header::Kv2(k, v)) => Some((k, v)), + _ => None, + }) + .find(|(k, _)| k.to_ascii_lowercase() == to_match) + .map(|(_, v)| v) + .copied() + } + + /// The subsetted fetch section basically tells us the + /// extraction logic to apply on our selected MIME. + /// This function acts as a router for these logic. + fn extract(&self, extractor: &SubsettedSection<'a>) -> Result> { + match extractor { + SubsettedSection::Text => self.text(), + SubsettedSection::Header => self.header(), + SubsettedSection::HeaderFields(fields) => self.header_fields(fields, false), + SubsettedSection::HeaderFieldsNot(fields) => self.header_fields(fields, true), + SubsettedSection::Part => self.part(), + SubsettedSection::Mime => self.mime(), + } + } + + fn mime(&self) -> Result> { + let bytes = match &self.0 { + AnyPart::Txt(p) => p.mime.fields.raw, + AnyPart::Bin(p) => p.mime.fields.raw, + AnyPart::Msg(p) => p.child.mime().raw, + AnyPart::Mult(p) => p.mime.fields.raw, + }; + Ok(ExtractedFull(bytes.into())) + } + + fn part(&self) -> Result> { + let bytes = match &self.0 { + AnyPart::Txt(p) => p.body, + AnyPart::Bin(p) => p.body, + AnyPart::Msg(p) => p.raw_part, + AnyPart::Mult(_) => bail!("Multipart part has no body"), + }; + Ok(ExtractedFull(bytes.to_vec().into())) + } + + fn eml_mime(&self) -> &eml_codec::mime::NaiveMIME<'_> { + match &self.0 { + AnyPart::Msg(msg) => msg.child.mime(), + other => other.mime(), + } + } + + /// The [...] HEADER.FIELDS, and HEADER.FIELDS.NOT part + /// specifiers refer to the [RFC-2822] header of the message or of + /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. + /// HEADER.FIELDS and HEADER.FIELDS.NOT are followed by a list of + /// field-name (as defined in [RFC-2822]) names, and return a + /// subset of the header. The subset returned by HEADER.FIELDS + /// contains only those header fields with a field-name that + /// matches one of the names in the list; similarly, the subset + /// returned by HEADER.FIELDS.NOT contains only the header fields + /// with a non-matching field-name. The field-matching is + /// case-insensitive but otherwise exact. + fn header_fields( + &self, + fields: &'a Vec1>, + invert: bool, + ) -> Result> { + // Build a lowercase ascii hashset with the fields to fetch + let index = fields + .as_ref() + .iter() + .map(|x| { + match x { + AString::Atom(a) => a.inner().as_bytes(), + AString::String(IString::Literal(l)) => l.as_ref(), + AString::String(IString::Quoted(q)) => q.inner().as_bytes(), + } + .to_ascii_lowercase() + }) + .collect::>(); + + // Extract MIME headers + let mime = self.eml_mime(); + + // Filter our MIME headers based on the field index + // 1. Keep only the correctly formatted headers + // 2. Keep only based on the index presence or absence + // 3. Reduce as a byte vector + let buffer = mime + .kv + .iter() + .filter_map(|field| match field { + header::Field::Good(header::Kv2(k, v)) => Some((k, v)), + _ => None, + }) + .filter(|(k, _)| index.contains(&k.to_ascii_lowercase()) ^ invert) + .fold(vec![], |mut acc, (k, v)| { + acc.extend(*k); + acc.extend(b": "); + acc.extend(*v); + acc.extend(b"\r\n"); + acc + }); + + Ok(ExtractedFull(buffer.into())) + } + + /// The HEADER [...] part specifiers refer to the [RFC-2822] header of the message or of + /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. + /// ```raw + /// HEADER ([RFC-2822] header of the message) + /// ``` + fn header(&self) -> Result> { + let msg = self + .0 + .as_message() + .ok_or(anyhow!("Selected part must be a message/rfc822"))?; + Ok(ExtractedFull(msg.raw_headers.into())) + } + + /// The TEXT part specifier refers to the text body of the message, omitting the [RFC-2822] header. + fn text(&self) -> Result> { + let msg = self + .0 + .as_message() + .ok_or(anyhow!("Selected part must be a message/rfc822"))?; + Ok(ExtractedFull(msg.raw_body.into())) + } + + // ------------ + + /// Basic field of a MIME part that is + /// common to all parts + fn basic_fields(&self) -> Result> { + let sz = match self.0 { + AnyPart::Txt(x) => x.body.len(), + AnyPart::Bin(x) => x.body.len(), + AnyPart::Msg(x) => x.raw_part.len(), + AnyPart::Mult(_) => 0, + }; + let m = self.0.mime(); + let parameter_list = m + .ctype + .as_ref() + .map(|x| { + x.params + .iter() + .map(|p| { + ( + IString::try_from(String::from_utf8_lossy(p.name).to_string()), + IString::try_from(p.value.to_string()), + ) + }) + .filter(|(k, v)| k.is_ok() && v.is_ok()) + .map(|(k, v)| (k.unwrap(), v.unwrap())) + .collect() + }) + .unwrap_or(vec![]); + + Ok(BasicFields { + parameter_list, + id: NString( + m.id.as_ref() + .and_then(|ci| IString::try_from(ci.to_string()).ok()), + ), + description: NString( + m.description + .as_ref() + .and_then(|cd| IString::try_from(cd.to_string()).ok()), + ), + content_transfer_encoding: match m.transfer_encoding { + mime::mechanism::Mechanism::_8Bit => unchecked_istring("8bit"), + mime::mechanism::Mechanism::Binary => unchecked_istring("binary"), + mime::mechanism::Mechanism::QuotedPrintable => { + unchecked_istring("quoted-printable") + } + mime::mechanism::Mechanism::Base64 => unchecked_istring("base64"), + _ => unchecked_istring("7bit"), + }, + // @FIXME we can't compute the size of the message currently... + size: u32::try_from(sz)?, + }) + } +} + +// --------------------------- +struct NodeMsg<'a>(&'a NodeMime<'a>, &'a composite::Message<'a>); +impl<'a> NodeMsg<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let basic = SelectedMime(self.0 .0).basic_fields()?; + + Ok(BodyStructure::Single { + body: FetchBody { + basic, + specific: SpecificFields::Message { + envelope: Box::new(ImfView(&self.1.imf).message_envelope()), + body_structure: Box::new(NodeMime(&self.1.child).structure(is_ext)?), + number_of_lines: nol(self.1.raw_part), + }, + }, + extension_data: match is_ext { + true => Some(SinglePartExtensionData { + md5: NString(None), + tail: None, + }), + _ => None, + }, + }) + } +} +struct NodeMult<'a>(&'a NodeMime<'a>, &'a composite::Multipart<'a>); +impl<'a> NodeMult<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let itype = &self.1.mime.interpreted_type; + let subtype = IString::try_from(itype.subtype.to_string()) + .unwrap_or(unchecked_istring("alternative")); + + let inner_bodies = self + .1 + .children + .iter() + .filter_map(|inner| NodeMime(&inner).structure(is_ext).ok()) + .collect::>(); + + Vec1::validate(&inner_bodies)?; + let bodies = Vec1::unvalidated(inner_bodies); + + Ok(BodyStructure::Multi { + bodies, + subtype, + extension_data: match is_ext { + true => Some(MultiPartExtensionData { + parameter_list: vec![( + IString::try_from("boundary").unwrap(), + IString::try_from(self.1.mime.interpreted_type.boundary.to_string())?, + )], + tail: None, + }), + _ => None, + }, + }) + } +} +struct NodeTxt<'a>(&'a NodeMime<'a>, &'a discrete::Text<'a>); +impl<'a> NodeTxt<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let mut basic = SelectedMime(self.0 .0).basic_fields()?; + + // Get the interpreted content type, set it + let itype = match &self.1.mime.interpreted_type { + Deductible::Inferred(v) | Deductible::Explicit(v) => v, + }; + let subtype = + IString::try_from(itype.subtype.to_string()).unwrap_or(unchecked_istring("plain")); + + // Add charset to the list of parameters if we know it has been inferred as it will be + // missing from the parsed content. + if let Deductible::Inferred(charset) = &itype.charset { + basic.parameter_list.push(( + unchecked_istring("charset"), + IString::try_from(charset.to_string()).unwrap_or(unchecked_istring("us-ascii")), + )); + } + + Ok(BodyStructure::Single { + body: FetchBody { + basic, + specific: SpecificFields::Text { + subtype, + number_of_lines: nol(self.1.body), + }, + }, + extension_data: match is_ext { + true => Some(SinglePartExtensionData { + md5: NString(None), + tail: None, + }), + _ => None, + }, + }) + } +} + +struct NodeBin<'a>(&'a NodeMime<'a>, &'a discrete::Binary<'a>); +impl<'a> NodeBin<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let basic = SelectedMime(self.0 .0).basic_fields()?; + + let default = mime::r#type::NaiveType { + main: &b"application"[..], + sub: &b"octet-stream"[..], + params: vec![], + }; + let ct = self.1.mime.fields.ctype.as_ref().unwrap_or(&default); + + let r#type = IString::try_from(String::from_utf8_lossy(ct.main).to_string()).or(Err( + anyhow!("Unable to build IString from given Content-Type type given"), + ))?; + + let subtype = IString::try_from(String::from_utf8_lossy(ct.sub).to_string()).or(Err( + anyhow!("Unable to build IString from given Content-Type subtype given"), + ))?; + + Ok(BodyStructure::Single { + body: FetchBody { + basic, + specific: SpecificFields::Basic { r#type, subtype }, + }, + extension_data: match is_ext { + true => Some(SinglePartExtensionData { + md5: NString(None), + tail: None, + }), + _ => None, + }, + }) + } +} + +// --------------------------- + +struct ExtractedFull<'a>(Cow<'a, [u8]>); +impl<'a> ExtractedFull<'a> { + /// It is possible to fetch a substring of the designated text. + /// This is done by appending an open angle bracket ("<"), the + /// octet position of the first desired octet, a period, the + /// maximum number of octets desired, and a close angle bracket + /// (">") to the part specifier. If the starting octet is beyond + /// the end of the text, an empty string is returned. + /// + /// Any partial fetch that attempts to read beyond the end of the + /// text is truncated as appropriate. A partial fetch that starts + /// at octet 0 is returned as a partial fetch, even if this + /// truncation happened. + /// + /// Note: This means that BODY[]<0.2048> of a 1500-octet message + /// will return BODY[]<0> with a literal of size 1500, not + /// BODY[]. + /// + /// Note: A substring fetch of a HEADER.FIELDS or + /// HEADER.FIELDS.NOT part specifier is calculated after + /// subsetting the header. + fn to_body_section(self, partial: &'_ Option<(u32, NonZeroU32)>) -> BodySection<'a> { + match partial { + Some((begin, len)) => self.partialize(*begin, *len), + None => BodySection::Full(self.0), + } + } + + fn partialize(self, begin: u32, len: NonZeroU32) -> BodySection<'a> { + // Asked range is starting after the end of the content, + // returning an empty buffer + if begin as usize > self.0.len() { + return BodySection::Slice { + body: Cow::Borrowed(&[][..]), + origin_octet: begin, + }; + } + + // Asked range is ending after the end of the content, + // slice only the beginning of the buffer + if (begin + len.get()) as usize >= self.0.len() { + return BodySection::Slice { + body: match self.0 { + Cow::Borrowed(body) => Cow::Borrowed(&body[begin as usize..]), + Cow::Owned(body) => Cow::Owned(body[begin as usize..].to_vec()), + }, + origin_octet: begin, + }; + } + + // Range is included inside the considered content, + // this is the "happy case" + BodySection::Slice { + body: match self.0 { + Cow::Borrowed(body) => { + Cow::Borrowed(&body[begin as usize..(begin + len.get()) as usize]) + } + Cow::Owned(body) => { + Cow::Owned(body[begin as usize..(begin + len.get()) as usize].to_vec()) + } + }, + origin_octet: begin, + } + } +} + +/// ---- LEGACY + +/// s is set to static to ensure that only compile time values +/// checked by developpers are passed. +fn unchecked_istring(s: &'static str) -> IString { + IString::try_from(s).expect("this value is expected to be a valid imap-codec::IString") +} + +// Number Of Lines +fn nol(input: &[u8]) -> u32 { + input + .iter() + .filter(|x| **x == b'\n') + .count() + .try_into() + .unwrap_or(0) +} diff --git a/aero-proto/imap/mod.rs b/aero-proto/imap/mod.rs new file mode 100644 index 0000000..02ab9ce --- /dev/null +++ b/aero-proto/imap/mod.rs @@ -0,0 +1,421 @@ +mod attributes; +mod capability; +mod command; +mod flags; +mod flow; +mod imf_view; +mod index; +mod mail_view; +mod mailbox_view; +mod mime_view; +mod request; +mod response; +mod search; +mod session; + +use std::net::SocketAddr; + +use anyhow::{anyhow, bail, Context, Result}; +use futures::stream::{FuturesUnordered, StreamExt}; + +use tokio::net::TcpListener; +use tokio::sync::mpsc; +use tokio::sync::watch; + +use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status}; +use imap_codec::imap_types::{core::Text, response::Greeting}; +use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions}; +use imap_flow::stream::AnyStream; +use rustls_pemfile::{certs, private_key}; +use tokio_rustls::TlsAcceptor; + +use crate::config::{ImapConfig, ImapUnsecureConfig}; +use crate::imap::capability::ServerCapability; +use crate::imap::request::Request; +use crate::imap::response::{Body, ResponseOrIdle}; +use crate::imap::session::Instance; +use crate::login::ArcLoginProvider; + +/// Server is a thin wrapper to register our Services in BàL +pub struct Server { + bind_addr: SocketAddr, + login_provider: ArcLoginProvider, + capabilities: ServerCapability, + tls: Option, +} + +#[derive(Clone)] +struct ClientContext { + addr: SocketAddr, + login_provider: ArcLoginProvider, + must_exit: watch::Receiver, + server_capabilities: ServerCapability, +} + +pub fn new(config: ImapConfig, login: ArcLoginProvider) -> Result { + let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( + config.certs, + )?)) + .collect::, _>>()?; + let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( + config.key, + )?))? + .unwrap(); + + let tls_config = rustls::ServerConfig::builder() + .with_no_client_auth() + .with_single_cert(loaded_certs, loaded_key)?; + let acceptor = TlsAcceptor::from(Arc::new(tls_config)); + + Ok(Server { + bind_addr: config.bind_addr, + login_provider: login, + capabilities: ServerCapability::default(), + tls: Some(acceptor), + }) +} + +pub fn new_unsecure(config: ImapUnsecureConfig, login: ArcLoginProvider) -> Server { + Server { + bind_addr: config.bind_addr, + login_provider: login, + capabilities: ServerCapability::default(), + tls: None, + } +} + +impl Server { + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("IMAP server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("IMAP: accepted connection from {}", remote_addr); + let stream = match self.tls.clone() { + Some(acceptor) => { + let stream = match acceptor.accept(socket).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "TLS negociation failed"); + continue; + } + }; + AnyStream::new(stream) + } + None => AnyStream::new(socket), + }; + + let client = ClientContext { + addr: remote_addr.clone(), + login_provider: self.login_provider.clone(), + must_exit: must_exit.clone(), + server_capabilities: self.capabilities.clone(), + }; + let conn = tokio::spawn(NetLoop::handler(client, stream)); + connections.push(conn); + } + drop(tcp); + + tracing::info!("IMAP server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +use std::sync::Arc; +use tokio::sync::mpsc::*; +use tokio::sync::Notify; +use tokio_util::bytes::BytesMut; + +const PIPELINABLE_COMMANDS: usize = 64; + +// @FIXME a full refactor of this part of the code will be needed sooner or later +struct NetLoop { + ctx: ClientContext, + server: ServerFlow, + cmd_tx: Sender, + resp_rx: UnboundedReceiver, +} + +impl NetLoop { + async fn handler(ctx: ClientContext, sock: AnyStream) { + let addr = ctx.addr.clone(); + + let mut nl = match Self::new(ctx, sock).await { + Ok(nl) => { + tracing::debug!(addr=?addr, "netloop successfully initialized"); + nl + } + Err(e) => { + tracing::error!(addr=?addr, err=?e, "netloop can not be initialized, closing session"); + return; + } + }; + + match nl.core().await { + Ok(()) => { + tracing::debug!("closing successful netloop core for {:?}", addr); + } + Err(e) => { + tracing::error!("closing errored netloop core for {:?}: {}", addr, e); + } + } + } + + async fn new(ctx: ClientContext, sock: AnyStream) -> Result { + let mut opts = ServerFlowOptions::default(); + opts.crlf_relaxed = false; + opts.literal_accept_text = Text::unvalidated("OK"); + opts.literal_reject_text = Text::unvalidated("Literal rejected"); + + // Send greeting + let (server, _) = ServerFlow::send_greeting( + sock, + opts, + Greeting::ok( + Some(Code::Capability(ctx.server_capabilities.to_vec())), + "Aerogramme", + ) + .unwrap(), + ) + .await?; + + // Start a mailbox session in background + let (cmd_tx, cmd_rx) = mpsc::channel::(PIPELINABLE_COMMANDS); + let (resp_tx, resp_rx) = mpsc::unbounded_channel::(); + tokio::spawn(Self::session(ctx.clone(), cmd_rx, resp_tx)); + + // Return the object + Ok(NetLoop { + ctx, + server, + cmd_tx, + resp_rx, + }) + } + + /// Coms with the background session + async fn session( + ctx: ClientContext, + mut cmd_rx: Receiver, + resp_tx: UnboundedSender, + ) -> () { + let mut session = Instance::new(ctx.login_provider, ctx.server_capabilities); + loop { + let cmd = match cmd_rx.recv().await { + None => break, + Some(cmd_recv) => cmd_recv, + }; + + tracing::debug!(cmd=?cmd, sock=%ctx.addr, "command"); + let maybe_response = session.request(cmd).await; + tracing::debug!(cmd=?maybe_response, sock=%ctx.addr, "response"); + + match resp_tx.send(maybe_response) { + Err(_) => break, + Ok(_) => (), + }; + } + tracing::info!("runner is quitting"); + } + + async fn core(&mut self) -> Result<()> { + let mut maybe_idle: Option> = None; + loop { + tokio::select! { + // Managing imap_flow stuff + srv_evt = self.server.progress() => match srv_evt? { + ServerFlowEvent::ResponseSent { handle: _handle, response } => { + match response { + Response::Status(Status::Bye(_)) => return Ok(()), + _ => tracing::trace!("sent to {} content {:?}", self.ctx.addr, response), + } + }, + ServerFlowEvent::CommandReceived { command } => { + match self.cmd_tx.try_send(Request::ImapCommand(command)) { + Ok(_) => (), + Err(mpsc::error::TrySendError::Full(_)) => { + self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); + tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); + } + _ => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + } + } + }, + ServerFlowEvent::IdleCommandReceived { tag } => { + match self.cmd_tx.try_send(Request::IdleStart(tag)) { + Ok(_) => (), + Err(mpsc::error::TrySendError::Full(_)) => { + self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); + tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); + } + _ => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + } + } + } + ServerFlowEvent::IdleDoneReceived => { + tracing::trace!("client sent DONE and want to stop IDLE"); + maybe_idle.ok_or(anyhow!("Received IDLE done but not idling currently"))?.notify_one(); + maybe_idle = None; + } + flow => { + self.server.enqueue_status(Status::bye(None, "Unsupported server flow event").unwrap()); + tracing::error!("session task exited for {:?} due to unsupported flow {:?}", self.ctx.addr, flow); + } + }, + + // Managing response generated by Aerogramme + maybe_msg = self.resp_rx.recv() => match maybe_msg { + Some(ResponseOrIdle::Response(response)) => { + tracing::trace!("Interactive, server has a response for the client"); + for body_elem in response.body.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.server.enqueue_status(response.completion); + }, + Some(ResponseOrIdle::IdleAccept(stop)) => { + tracing::trace!("Interactive, server agreed to switch in idle mode"); + let cr = CommandContinuationRequest::basic(None, "Idling")?; + self.server.idle_accept(cr).or(Err(anyhow!("refused continuation for idle accept")))?; + self.cmd_tx.try_send(Request::IdlePoll)?; + if maybe_idle.is_some() { + bail!("Can't start IDLE if already idling"); + } + maybe_idle = Some(stop); + }, + Some(ResponseOrIdle::IdleEvent(elems)) => { + tracing::trace!("server imap session has some change to communicate to the client"); + for body_elem in elems.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.cmd_tx.try_send(Request::IdlePoll)?; + }, + Some(ResponseOrIdle::IdleReject(response)) => { + tracing::trace!("inform client that session rejected idle"); + self.server + .idle_reject(response.completion) + .or(Err(anyhow!("wrong reject command")))?; + }, + None => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + }, + Some(_) => unreachable!(), + + }, + + // When receiving a CTRL+C + _ = self.ctx.must_exit.changed() => { + tracing::trace!("Interactive, CTRL+C, exiting"); + self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); + }, + }; + } + } + + /* + async fn idle_mode(&mut self, mut buff: BytesMut, stop: Arc) -> Result { + // Flush send + loop { + tracing::trace!("flush server send"); + match self.server.progress_send().await? { + Some(..) => continue, + None => break, + } + } + + tokio::select! { + // Receiving IDLE event from background + maybe_msg = self.resp_rx.recv() => match maybe_msg { + // Session decided idle is terminated + Some(ResponseOrIdle::Response(response)) => { + tracing::trace!("server imap session said idle is done, sending response done, switching to interactive"); + for body_elem in response.body.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.server.enqueue_status(response.completion); + return Ok(LoopMode::Interactive) + }, + // Session has some information for user + Some(ResponseOrIdle::IdleEvent(elems)) => { + tracing::trace!("server imap session has some change to communicate to the client"); + for body_elem in elems.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.cmd_tx.try_send(Request::Idle)?; + return Ok(LoopMode::Idle(buff, stop)) + }, + + // Session crashed + None => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + return Ok(LoopMode::Interactive) + }, + + // Session can't start idling while already idling, it's a logic error! + Some(ResponseOrIdle::StartIdle(..)) => bail!("can't start idling while already idling!"), + }, + + // User is trying to interact with us + read_client_result = self.server.stream.read(&mut buff) => { + let _bytes_read = read_client_result?; + use imap_codec::decode::Decoder; + let codec = imap_codec::IdleDoneCodec::new(); + tracing::trace!("client sent some data for the server IMAP session"); + match codec.decode(&buff) { + Ok(([], imap_codec::imap_types::extensions::idle::IdleDone)) => { + // Session will be informed that it must stop idle + // It will generate the "done" message and change the loop mode + tracing::trace!("client sent DONE and want to stop IDLE"); + stop.notify_one() + }, + Err(_) => { + tracing::trace!("Unable to decode DONE, maybe not enough data were sent?"); + }, + _ => bail!("Client sent data after terminating the continuation without waiting for the server. This is an unsupported behavior and bug in Aerogramme, quitting."), + }; + + return Ok(LoopMode::Idle(buff, stop)) + }, + + // When receiving a CTRL+C + _ = self.ctx.must_exit.changed() => { + tracing::trace!("CTRL+C sent, aborting IDLE for this session"); + self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); + return Ok(LoopMode::Interactive) + }, + }; + }*/ +} diff --git a/aero-proto/imap/request.rs b/aero-proto/imap/request.rs new file mode 100644 index 0000000..cff18a3 --- /dev/null +++ b/aero-proto/imap/request.rs @@ -0,0 +1,9 @@ +use imap_codec::imap_types::command::Command; +use imap_codec::imap_types::core::Tag; + +#[derive(Debug)] +pub enum Request { + ImapCommand(Command<'static>), + IdleStart(Tag<'static>), + IdlePoll, +} diff --git a/aero-proto/imap/response.rs b/aero-proto/imap/response.rs new file mode 100644 index 0000000..b6a0e98 --- /dev/null +++ b/aero-proto/imap/response.rs @@ -0,0 +1,124 @@ +use anyhow::Result; +use imap_codec::imap_types::command::Command; +use imap_codec::imap_types::core::Tag; +use imap_codec::imap_types::response::{Code, Data, Status}; +use std::sync::Arc; +use tokio::sync::Notify; + +#[derive(Debug)] +pub enum Body<'a> { + Data(Data<'a>), + Status(Status<'a>), +} + +pub struct ResponseBuilder<'a> { + tag: Option>, + code: Option>, + text: String, + body: Vec>, +} + +impl<'a> ResponseBuilder<'a> { + pub fn to_req(mut self, cmd: &Command<'a>) -> Self { + self.tag = Some(cmd.tag.clone()); + self + } + pub fn tag(mut self, tag: Tag<'a>) -> Self { + self.tag = Some(tag); + self + } + + pub fn message(mut self, txt: impl Into) -> Self { + self.text = txt.into(); + self + } + + pub fn code(mut self, code: Code<'a>) -> Self { + self.code = Some(code); + self + } + + pub fn data(mut self, data: Data<'a>) -> Self { + self.body.push(Body::Data(data)); + self + } + + pub fn many_data(mut self, data: Vec>) -> Self { + for d in data.into_iter() { + self = self.data(d); + } + self + } + + #[allow(dead_code)] + pub fn info(mut self, status: Status<'a>) -> Self { + self.body.push(Body::Status(status)); + self + } + + #[allow(dead_code)] + pub fn many_info(mut self, status: Vec>) -> Self { + for d in status.into_iter() { + self = self.info(d); + } + self + } + + pub fn set_body(mut self, body: Vec>) -> Self { + self.body = body; + self + } + + pub fn ok(self) -> Result> { + Ok(Response { + completion: Status::ok(self.tag, self.code, self.text)?, + body: self.body, + }) + } + + pub fn no(self) -> Result> { + Ok(Response { + completion: Status::no(self.tag, self.code, self.text)?, + body: self.body, + }) + } + + pub fn bad(self) -> Result> { + Ok(Response { + completion: Status::bad(self.tag, self.code, self.text)?, + body: self.body, + }) + } +} + +#[derive(Debug)] +pub struct Response<'a> { + pub body: Vec>, + pub completion: Status<'a>, +} + +impl<'a> Response<'a> { + pub fn build() -> ResponseBuilder<'a> { + ResponseBuilder { + tag: None, + code: None, + text: "".to_string(), + body: vec![], + } + } + + pub fn bye() -> Result> { + Ok(Response { + completion: Status::bye(None, "bye")?, + body: vec![], + }) + } +} + +#[derive(Debug)] +pub enum ResponseOrIdle { + Response(Response<'static>), + IdleAccept(Arc), + IdleReject(Response<'static>), + IdleEvent(Vec>), +} diff --git a/aero-proto/imap/search.rs b/aero-proto/imap/search.rs new file mode 100644 index 0000000..37a7e9e --- /dev/null +++ b/aero-proto/imap/search.rs @@ -0,0 +1,477 @@ +use std::num::{NonZeroU32, NonZeroU64}; + +use imap_codec::imap_types::core::Vec1; +use imap_codec::imap_types::search::{MetadataItemSearch, SearchKey}; +use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; + +use crate::imap::index::MailIndex; +use crate::imap::mail_view::MailView; +use crate::mail::query::QueryScope; + +pub enum SeqType { + Undefined, + NonUid, + Uid, +} +impl SeqType { + pub fn is_uid(&self) -> bool { + matches!(self, Self::Uid) + } +} + +pub struct Criteria<'a>(pub &'a SearchKey<'a>); +impl<'a> Criteria<'a> { + /// Returns a set of email identifiers that is greater or equal + /// to the set of emails to return + pub fn to_sequence_set(&self) -> (SequenceSet, SeqType) { + match self.0 { + SearchKey::All => (sequence_set_all(), SeqType::Undefined), + SearchKey::SequenceSet(seq_set) => (seq_set.clone(), SeqType::NonUid), + SearchKey::Uid(seq_set) => (seq_set.clone(), SeqType::Uid), + SearchKey::Not(_inner) => { + tracing::debug!( + "using NOT in a search request is slow: it selects all identifiers" + ); + (sequence_set_all(), SeqType::Undefined) + } + SearchKey::Or(left, right) => { + tracing::debug!("using OR in a search request is slow: no deduplication is done"); + let (base, base_seqtype) = Self(&left).to_sequence_set(); + let (ext, ext_seqtype) = Self(&right).to_sequence_set(); + + // Check if we have a UID/ID conflict in fetching: now we don't know how to handle them + match (base_seqtype, ext_seqtype) { + (SeqType::Uid, SeqType::NonUid) | (SeqType::NonUid, SeqType::Uid) => { + (sequence_set_all(), SeqType::Undefined) + } + (SeqType::Undefined, x) | (x, _) => { + let mut new_vec = base.0.into_inner(); + new_vec.extend_from_slice(ext.0.as_ref()); + let seq = SequenceSet( + Vec1::try_from(new_vec) + .expect("merging non empty vec lead to non empty vec"), + ); + (seq, x) + } + } + } + SearchKey::And(search_list) => { + tracing::debug!( + "using AND in a search request is slow: no intersection is performed" + ); + // As we perform no intersection, we don't care if we mix uid or id. + // We only keep the smallest range, being it ID or UID, depending of + // which one has the less items. This is an approximation as UID ranges + // can have holes while ID ones can't. + search_list + .as_ref() + .iter() + .map(|crit| Self(&crit).to_sequence_set()) + .min_by(|(x, _), (y, _)| { + let x_size = approx_sequence_set_size(x); + let y_size = approx_sequence_set_size(y); + x_size.cmp(&y_size) + }) + .unwrap_or((sequence_set_all(), SeqType::Undefined)) + } + _ => (sequence_set_all(), SeqType::Undefined), + } + } + + /// Not really clever as we can have cases where we filter out + /// the email before needing to inspect its meta. + /// But for now we are seeking the most basic/stupid algorithm. + pub fn query_scope(&self) -> QueryScope { + use SearchKey::*; + match self.0 { + // Combinators + And(and_list) => and_list + .as_ref() + .iter() + .fold(QueryScope::Index, |prev, sk| { + prev.union(&Criteria(sk).query_scope()) + }), + Not(inner) => Criteria(inner).query_scope(), + Or(left, right) => Criteria(left) + .query_scope() + .union(&Criteria(right).query_scope()), + All => QueryScope::Index, + + // IMF Headers + Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) + | Subject(_) | To(_) => QueryScope::Partial, + // Internal Date is also stored in MailMeta + Before(_) | On(_) | Since(_) => QueryScope::Partial, + // Message size is also stored in MailMeta + Larger(_) | Smaller(_) => QueryScope::Partial, + // Text and Body require that we fetch the full content! + Text(_) | Body(_) => QueryScope::Full, + + _ => QueryScope::Index, + } + } + + pub fn is_modseq(&self) -> bool { + use SearchKey::*; + match self.0 { + And(and_list) => and_list + .as_ref() + .iter() + .any(|child| Criteria(child).is_modseq()), + Or(left, right) => Criteria(left).is_modseq() || Criteria(right).is_modseq(), + Not(child) => Criteria(child).is_modseq(), + ModSeq { .. } => true, + _ => false, + } + } + + /// Returns emails that we now for sure we want to keep + /// but also a second list of emails we need to investigate further by + /// fetching some remote data + pub fn filter_on_idx<'b>( + &self, + midx_list: &[&'b MailIndex<'b>], + ) -> (Vec<&'b MailIndex<'b>>, Vec<&'b MailIndex<'b>>) { + let (p1, p2): (Vec<_>, Vec<_>) = midx_list + .iter() + .map(|x| (x, self.is_keep_on_idx(x))) + .filter(|(_midx, decision)| decision.is_keep()) + .map(|(midx, decision)| (*midx, decision)) + .partition(|(_midx, decision)| matches!(decision, PartialDecision::Keep)); + + let to_keep = p1.into_iter().map(|(v, _)| v).collect(); + let to_fetch = p2.into_iter().map(|(v, _)| v).collect(); + (to_keep, to_fetch) + } + + // ---- + + /// Here we are doing a partial filtering: we do not have access + /// to the headers or to the body, so every time we encounter a rule + /// based on them, we need to keep it. + /// + /// @TODO Could be optimized on a per-email basis by also returning the QueryScope + /// when more information is needed! + fn is_keep_on_idx(&self, midx: &MailIndex) -> PartialDecision { + use SearchKey::*; + match self.0 { + // Combinator logic + And(expr_list) => expr_list + .as_ref() + .iter() + .fold(PartialDecision::Keep, |acc, cur| { + acc.and(&Criteria(cur).is_keep_on_idx(midx)) + }), + Or(left, right) => { + let left_decision = Criteria(left).is_keep_on_idx(midx); + let right_decision = Criteria(right).is_keep_on_idx(midx); + left_decision.or(&right_decision) + } + Not(expr) => Criteria(expr).is_keep_on_idx(midx).not(), + All => PartialDecision::Keep, + + // Sequence logic + maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, midx).into(), + maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, midx).into(), + ModSeq { + metadata_item, + modseq, + } => is_keep_modseq(metadata_item, modseq, midx).into(), + + // All the stuff we can't evaluate yet + Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) + | Subject(_) | To(_) | Before(_) | On(_) | Since(_) | Larger(_) | Smaller(_) + | Text(_) | Body(_) => PartialDecision::Postpone, + + unknown => { + tracing::error!("Unknown filter {:?}", unknown); + PartialDecision::Discard + } + } + } + + /// @TODO we re-eveluate twice the same logic. The correct way would be, on each pass, + /// to simplify the searck query, by removing the elements that were already checked. + /// For example if we have AND(OR(seqid(X), body(Y)), body(X)), we can't keep for sure + /// the email, as body(x) might be false. So we need to check it. But as seqid(x) is true, + /// we could simplify the request to just body(x) and truncate the first OR. Today, we are + /// not doing that, and thus we reevaluate everything. + pub fn is_keep_on_query(&self, mail_view: &MailView) -> bool { + use SearchKey::*; + match self.0 { + // Combinator logic + And(expr_list) => expr_list + .as_ref() + .iter() + .all(|cur| Criteria(cur).is_keep_on_query(mail_view)), + Or(left, right) => { + Criteria(left).is_keep_on_query(mail_view) + || Criteria(right).is_keep_on_query(mail_view) + } + Not(expr) => !Criteria(expr).is_keep_on_query(mail_view), + All => true, + + //@FIXME Reevaluating our previous logic... + maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, &mail_view.in_idx), + maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, &mail_view.in_idx), + ModSeq { + metadata_item, + modseq, + } => is_keep_modseq(metadata_item, modseq, &mail_view.in_idx).into(), + + // Filter on mail meta + Before(search_naive) => match mail_view.stored_naive_date() { + Ok(msg_naive) => &msg_naive < search_naive.as_ref(), + _ => false, + }, + On(search_naive) => match mail_view.stored_naive_date() { + Ok(msg_naive) => &msg_naive == search_naive.as_ref(), + _ => false, + }, + Since(search_naive) => match mail_view.stored_naive_date() { + Ok(msg_naive) => &msg_naive > search_naive.as_ref(), + _ => false, + }, + + // Message size is also stored in MailMeta + Larger(size_ref) => { + mail_view + .query_result + .metadata() + .expect("metadata were fetched") + .rfc822_size + > *size_ref as usize + } + Smaller(size_ref) => { + mail_view + .query_result + .metadata() + .expect("metadata were fetched") + .rfc822_size + < *size_ref as usize + } + + // Filter on well-known headers + Bcc(txt) => mail_view.is_header_contains_pattern(&b"bcc"[..], txt.as_ref()), + Cc(txt) => mail_view.is_header_contains_pattern(&b"cc"[..], txt.as_ref()), + From(txt) => mail_view.is_header_contains_pattern(&b"from"[..], txt.as_ref()), + Subject(txt) => mail_view.is_header_contains_pattern(&b"subject"[..], txt.as_ref()), + To(txt) => mail_view.is_header_contains_pattern(&b"to"[..], txt.as_ref()), + Header(hdr, txt) => mail_view.is_header_contains_pattern(hdr.as_ref(), txt.as_ref()), + + // Filter on Date header + SentBefore(search_naive) => mail_view + .imf() + .map(|imf| imf.naive_date().ok()) + .flatten() + .map(|msg_naive| &msg_naive < search_naive.as_ref()) + .unwrap_or(false), + SentOn(search_naive) => mail_view + .imf() + .map(|imf| imf.naive_date().ok()) + .flatten() + .map(|msg_naive| &msg_naive == search_naive.as_ref()) + .unwrap_or(false), + SentSince(search_naive) => mail_view + .imf() + .map(|imf| imf.naive_date().ok()) + .flatten() + .map(|msg_naive| &msg_naive > search_naive.as_ref()) + .unwrap_or(false), + + // Filter on the full content of the email + Text(txt) => mail_view + .content + .as_msg() + .map(|msg| { + msg.raw_part + .windows(txt.as_ref().len()) + .any(|win| win == txt.as_ref()) + }) + .unwrap_or(false), + Body(txt) => mail_view + .content + .as_msg() + .map(|msg| { + msg.raw_body + .windows(txt.as_ref().len()) + .any(|win| win == txt.as_ref()) + }) + .unwrap_or(false), + + unknown => { + tracing::error!("Unknown filter {:?}", unknown); + false + } + } + } +} + +// ---- Sequence things ---- +fn sequence_set_all() -> SequenceSet { + SequenceSet::from(Sequence::Range( + SeqOrUid::Value(NonZeroU32::MIN), + SeqOrUid::Asterisk, + )) +} + +// This is wrong as sequences can overlap +fn approx_sequence_set_size(seq_set: &SequenceSet) -> u64 { + seq_set.0.as_ref().iter().fold(0u64, |acc, seq| { + acc.saturating_add(approx_sequence_size(seq)) + }) +} + +// This is wrong as sequence UID can have holes, +// as we don't know the number of messages in the mailbox also +// we gave to guess +fn approx_sequence_size(seq: &Sequence) -> u64 { + match seq { + Sequence::Single(_) => 1, + Sequence::Range(SeqOrUid::Asterisk, _) | Sequence::Range(_, SeqOrUid::Asterisk) => u64::MAX, + Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { + let x2 = x2.get() as i64; + let x1 = x1.get() as i64; + (x2 - x1).abs().try_into().unwrap_or(1) + } + } +} + +// --- Partial decision things ---- + +enum PartialDecision { + Keep, + Discard, + Postpone, +} +impl From for PartialDecision { + fn from(x: bool) -> Self { + match x { + true => PartialDecision::Keep, + _ => PartialDecision::Discard, + } + } +} +impl PartialDecision { + fn not(&self) -> Self { + match self { + Self::Keep => Self::Discard, + Self::Discard => Self::Keep, + Self::Postpone => Self::Postpone, + } + } + + fn or(&self, other: &Self) -> Self { + match (self, other) { + (Self::Keep, _) | (_, Self::Keep) => Self::Keep, + (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, + (Self::Discard, Self::Discard) => Self::Discard, + } + } + + fn and(&self, other: &Self) -> Self { + match (self, other) { + (Self::Discard, _) | (_, Self::Discard) => Self::Discard, + (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, + (Self::Keep, Self::Keep) => Self::Keep, + } + } + + fn is_keep(&self) -> bool { + !matches!(self, Self::Discard) + } +} + +// ----- Search Key things --- +fn is_sk_flag(sk: &SearchKey) -> bool { + use SearchKey::*; + match sk { + Answered | Deleted | Draft | Flagged | Keyword(..) | New | Old | Recent | Seen + | Unanswered | Undeleted | Undraft | Unflagged | Unkeyword(..) | Unseen => true, + _ => false, + } +} + +fn is_keep_flag(sk: &SearchKey, midx: &MailIndex) -> bool { + use SearchKey::*; + match sk { + Answered => midx.is_flag_set("\\Answered"), + Deleted => midx.is_flag_set("\\Deleted"), + Draft => midx.is_flag_set("\\Draft"), + Flagged => midx.is_flag_set("\\Flagged"), + Keyword(kw) => midx.is_flag_set(kw.inner()), + New => { + let is_recent = midx.is_flag_set("\\Recent"); + let is_seen = midx.is_flag_set("\\Seen"); + is_recent && !is_seen + } + Old => { + let is_recent = midx.is_flag_set("\\Recent"); + !is_recent + } + Recent => midx.is_flag_set("\\Recent"), + Seen => midx.is_flag_set("\\Seen"), + Unanswered => { + let is_answered = midx.is_flag_set("\\Recent"); + !is_answered + } + Undeleted => { + let is_deleted = midx.is_flag_set("\\Deleted"); + !is_deleted + } + Undraft => { + let is_draft = midx.is_flag_set("\\Draft"); + !is_draft + } + Unflagged => { + let is_flagged = midx.is_flag_set("\\Flagged"); + !is_flagged + } + Unkeyword(kw) => { + let is_keyword_set = midx.is_flag_set(kw.inner()); + !is_keyword_set + } + Unseen => { + let is_seen = midx.is_flag_set("\\Seen"); + !is_seen + } + + // Not flag logic + _ => unreachable!(), + } +} + +fn is_sk_seq(sk: &SearchKey) -> bool { + use SearchKey::*; + match sk { + SequenceSet(..) | Uid(..) => true, + _ => false, + } +} +fn is_keep_seq(sk: &SearchKey, midx: &MailIndex) -> bool { + use SearchKey::*; + match sk { + SequenceSet(seq_set) => seq_set + .0 + .as_ref() + .iter() + .any(|seq| midx.is_in_sequence_i(seq)), + Uid(seq_set) => seq_set + .0 + .as_ref() + .iter() + .any(|seq| midx.is_in_sequence_uid(seq)), + _ => unreachable!(), + } +} + +fn is_keep_modseq( + filter: &Option, + modseq: &NonZeroU64, + midx: &MailIndex, +) -> bool { + if filter.is_some() { + tracing::warn!(filter=?filter, "Ignoring search metadata filter as it's not supported yet"); + } + modseq <= &midx.modseq +} diff --git a/aero-proto/imap/session.rs b/aero-proto/imap/session.rs new file mode 100644 index 0000000..fa3232a --- /dev/null +++ b/aero-proto/imap/session.rs @@ -0,0 +1,173 @@ +use crate::imap::capability::{ClientCapability, ServerCapability}; +use crate::imap::command::{anonymous, authenticated, selected}; +use crate::imap::flow; +use crate::imap::request::Request; +use crate::imap::response::{Response, ResponseOrIdle}; +use crate::login::ArcLoginProvider; +use anyhow::{anyhow, bail, Context, Result}; +use imap_codec::imap_types::{command::Command, core::Tag}; + +//----- +pub struct Instance { + pub login_provider: ArcLoginProvider, + pub server_capabilities: ServerCapability, + pub client_capabilities: ClientCapability, + pub state: flow::State, +} +impl Instance { + pub fn new(login_provider: ArcLoginProvider, cap: ServerCapability) -> Self { + let client_cap = ClientCapability::new(&cap); + Self { + login_provider, + state: flow::State::NotAuthenticated, + server_capabilities: cap, + client_capabilities: client_cap, + } + } + + pub async fn request(&mut self, req: Request) -> ResponseOrIdle { + match req { + Request::IdleStart(tag) => self.idle_init(tag), + Request::IdlePoll => self.idle_poll().await, + Request::ImapCommand(cmd) => self.command(cmd).await, + } + } + + pub fn idle_init(&mut self, tag: Tag<'static>) -> ResponseOrIdle { + // Build transition + //@FIXME the notifier should be hidden inside the state and thus not part of the transition! + let transition = flow::Transition::Idle(tag.clone(), tokio::sync::Notify::new()); + + // Try to apply the transition and get the stop notifier + let maybe_stop = self + .state + .apply(transition) + .context("IDLE transition failed") + .and_then(|_| { + self.state + .notify() + .ok_or(anyhow!("IDLE state has no Notify object")) + }); + + // Build an appropriate response + match maybe_stop { + Ok(stop) => ResponseOrIdle::IdleAccept(stop), + Err(e) => { + tracing::error!(err=?e, "unable to init idle due to a transition error"); + //ResponseOrIdle::IdleReject(tag) + let no = Response::build() + .tag(tag) + .message( + "Internal error, processing command triggered an illegal IMAP state transition", + ) + .no() + .unwrap(); + ResponseOrIdle::IdleReject(no) + } + } + } + + pub async fn idle_poll(&mut self) -> ResponseOrIdle { + match self.idle_poll_happy().await { + Ok(r) => r, + Err(e) => { + tracing::error!(err=?e, "something bad happened in idle"); + ResponseOrIdle::Response(Response::bye().unwrap()) + } + } + } + + pub async fn idle_poll_happy(&mut self) -> Result { + let (mbx, tag, stop) = match &mut self.state { + flow::State::Idle(_, ref mut mbx, _, tag, stop) => (mbx, tag.clone(), stop.clone()), + _ => bail!("Invalid session state, can't idle"), + }; + + tokio::select! { + _ = stop.notified() => { + self.state.apply(flow::Transition::UnIdle)?; + return Ok(ResponseOrIdle::Response(Response::build() + .tag(tag.clone()) + .message("IDLE completed") + .ok()?)) + }, + change = mbx.idle_sync() => { + tracing::debug!("idle event"); + return Ok(ResponseOrIdle::IdleEvent(change?)); + } + } + } + + pub async fn command(&mut self, cmd: Command<'static>) -> ResponseOrIdle { + // Command behavior is modulated by the state. + // To prevent state error, we handle the same command in separate code paths. + let (resp, tr) = match &mut self.state { + flow::State::NotAuthenticated => { + let ctx = anonymous::AnonymousContext { + req: &cmd, + login_provider: &self.login_provider, + server_capabilities: &self.server_capabilities, + }; + anonymous::dispatch(ctx).await + } + flow::State::Authenticated(ref user) => { + let ctx = authenticated::AuthenticatedContext { + req: &cmd, + server_capabilities: &self.server_capabilities, + client_capabilities: &mut self.client_capabilities, + user, + }; + authenticated::dispatch(ctx).await + } + flow::State::Selected(ref user, ref mut mailbox, ref perm) => { + let ctx = selected::SelectedContext { + req: &cmd, + server_capabilities: &self.server_capabilities, + client_capabilities: &mut self.client_capabilities, + user, + mailbox, + perm, + }; + selected::dispatch(ctx).await + } + flow::State::Idle(..) => Err(anyhow!("can not receive command while idling")), + flow::State::Logout => Response::build() + .tag(cmd.tag.clone()) + .message("No commands are allowed in the LOGOUT state.") + .bad() + .map(|r| (r, flow::Transition::None)), + } + .unwrap_or_else(|err| { + tracing::error!("Command error {:?} occured while processing {:?}", err, cmd); + ( + Response::build() + .to_req(&cmd) + .message("Internal error while processing command") + .bad() + .unwrap(), + flow::Transition::None, + ) + }); + + if let Err(e) = self.state.apply(tr) { + tracing::error!( + "Transition error {:?} occured while processing on command {:?}", + e, + cmd + ); + return ResponseOrIdle::Response(Response::build() + .to_req(&cmd) + .message( + "Internal error, processing command triggered an illegal IMAP state transition", + ) + .bad() + .unwrap()); + } + ResponseOrIdle::Response(resp) + + /*match &self.state { + flow::State::Idle(_, _, _, _, n) => ResponseOrIdle::StartIdle(n.clone()), + _ => ResponseOrIdle::Response(resp), + }*/ + } +} diff --git a/aero-proto/lmtp.rs b/aero-proto/lmtp.rs new file mode 100644 index 0000000..dcd4bcc --- /dev/null +++ b/aero-proto/lmtp.rs @@ -0,0 +1,221 @@ +use std::net::SocketAddr; +use std::{pin::Pin, sync::Arc}; + +use anyhow::Result; +use async_trait::async_trait; +use duplexify::Duplex; +use futures::{io, AsyncRead, AsyncReadExt, AsyncWrite}; +use futures::{ + stream, + stream::{FuturesOrdered, FuturesUnordered}, + StreamExt, +}; +use log::*; +use tokio::net::TcpListener; +use tokio::select; +use tokio::sync::watch; +use tokio_util::compat::*; + +use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode}; +use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata}; + +use crate::config::*; +use crate::login::*; +use crate::mail::incoming::EncryptedMessage; + +pub struct LmtpServer { + bind_addr: SocketAddr, + hostname: String, + login_provider: Arc, +} + +impl LmtpServer { + pub fn new( + config: LmtpConfig, + login_provider: Arc, + ) -> Arc { + Arc::new(Self { + bind_addr: config.bind_addr, + hostname: config.hostname, + login_provider, + }) + } + + pub async fn run(self: &Arc, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + info!("LMTP server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + info!("LMTP: accepted connection from {}", remote_addr); + + let conn = tokio::spawn(smtp_server::interact( + socket.compat(), + smtp_server::IsAlreadyTls::No, + (), + self.clone(), + )); + + connections.push(conn); + } + drop(tcp); + + info!("LMTP server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +// ---- + +pub struct Message { + to: Vec, +} + +#[async_trait] +impl Config for LmtpServer { + type Protocol = smtp_server::protocol::Lmtp; + + type ConnectionUserMeta = (); + type MailUserMeta = Message; + + fn hostname(&self, _conn_meta: &ConnectionMetadata<()>) -> &str { + &self.hostname + } + + async fn new_mail(&self, _conn_meta: &mut ConnectionMetadata<()>) -> Message { + Message { to: vec![] } + } + + async fn tls_accept( + &self, + _io: IO, + _conn_meta: &mut ConnectionMetadata<()>, + ) -> io::Result>, Pin>>> + where + IO: Send + AsyncRead + AsyncWrite, + { + Err(io::Error::new( + io::ErrorKind::InvalidInput, + "TLS not implemented for LMTP server", + )) + } + + async fn filter_from( + &self, + from: Option, + _meta: &mut MailMetadata, + _conn_meta: &mut ConnectionMetadata<()>, + ) -> Decision> { + Decision::Accept { + reply: reply::okay_from().convert(), + res: from, + } + } + + async fn filter_to( + &self, + to: Email, + meta: &mut MailMetadata, + _conn_meta: &mut ConnectionMetadata<()>, + ) -> Decision { + let to_str = match to.hostname.as_ref() { + Some(h) => format!("{}@{}", to.localpart, h), + None => to.localpart.to_string(), + }; + match self.login_provider.public_login(&to_str).await { + Ok(creds) => { + meta.user.to.push(creds); + Decision::Accept { + reply: reply::okay_to().convert(), + res: to, + } + } + Err(e) => Decision::Reject { + reply: Reply { + code: ReplyCode::POLICY_REASON, + ecode: None, + text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], + }, + }, + } + } + + async fn handle_mail<'resp, R>( + &'resp self, + reader: &mut EscapedDataReader<'_, R>, + meta: MailMetadata, + _conn_meta: &'resp mut ConnectionMetadata<()>, + ) -> Pin> + Send + 'resp>> + where + R: Send + Unpin + AsyncRead, + { + let err_response_stream = |meta: MailMetadata, msg: String| { + Box::pin( + stream::iter(meta.user.to.into_iter()).map(move |_| Decision::Reject { + reply: Reply { + code: ReplyCode::POLICY_REASON, + ecode: None, + text: vec![smtp_message::MaybeUtf8::Utf8(msg.clone())], + }, + }), + ) + }; + + let mut text = Vec::new(); + if let Err(e) = reader.read_to_end(&mut text).await { + return err_response_stream(meta, format!("io error: {}", e)); + } + reader.complete(); + let raw_size = text.len(); + + // Unescape email, shrink it also to remove last dot + let unesc_res = DataUnescaper::new(true).unescape(&mut text); + text.truncate(unesc_res.written); + tracing::debug!(prev_sz = raw_size, new_sz = text.len(), "unescaped"); + + let encrypted_message = match EncryptedMessage::new(text) { + Ok(x) => Arc::new(x), + Err(e) => return err_response_stream(meta, e.to_string()), + }; + + Box::pin( + meta.user + .to + .into_iter() + .map(move |creds| { + let encrypted_message = encrypted_message.clone(); + async move { + match encrypted_message.deliver_to(creds).await { + Ok(()) => Decision::Accept { + reply: reply::okay_mail().convert(), + res: (), + }, + Err(e) => Decision::Reject { + reply: Reply { + code: ReplyCode::POLICY_REASON, + ecode: None, + text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], + }, + }, + } + } + }) + .collect::>(), + ) + } +} diff --git a/aero-proto/sasl.rs b/aero-proto/sasl.rs new file mode 100644 index 0000000..fe292e1 --- /dev/null +++ b/aero-proto/sasl.rs @@ -0,0 +1,140 @@ +use std::net::SocketAddr; + +use anyhow::{anyhow, bail, Result}; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::io::BufStream; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt}; +use tokio::net::{TcpListener, TcpStream}; +use tokio::sync::watch; + +use aero_user::config::AuthConfig; +use aero_user::login::ArcLoginProvider; + + +pub struct AuthServer { + login_provider: ArcLoginProvider, + bind_addr: SocketAddr, +} + +impl AuthServer { + pub fn new(config: AuthConfig, login_provider: ArcLoginProvider) -> Self { + Self { + bind_addr: config.bind_addr, + login_provider, + } + } + + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!( + "SASL Authentication Protocol listening on {:#}", + self.bind_addr + ); + + let mut connections = FuturesUnordered::new(); + + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + + tracing::info!("AUTH: accepted connection from {}", remote_addr); + let conn = tokio::spawn( + NetLoop::new(socket, self.login_provider.clone(), must_exit.clone()).run_error(), + ); + + connections.push(conn); + } + drop(tcp); + + tracing::info!("AUTH server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +struct NetLoop { + login: ArcLoginProvider, + stream: BufStream, + stop: watch::Receiver, + state: State, + read_buf: Vec, + write_buf: BytesMut, +} + +impl NetLoop { + fn new(stream: TcpStream, login: ArcLoginProvider, stop: watch::Receiver) -> Self { + Self { + login, + stream: BufStream::new(stream), + state: State::Init, + stop, + read_buf: Vec::new(), + write_buf: BytesMut::new(), + } + } + + async fn run_error(self) { + match self.run().await { + Ok(()) => tracing::info!("Auth session succeeded"), + Err(e) => tracing::error!(err=?e, "Auth session failed"), + } + } + + async fn run(mut self) -> Result<()> { + loop { + tokio::select! { + read_res = self.stream.read_until(b'\n', &mut self.read_buf) => { + // Detect EOF / socket close + let bread = read_res?; + if bread == 0 { + tracing::info!("Reading buffer empty, connection has been closed. Exiting AUTH session."); + return Ok(()) + } + + // Parse command + let (_, cmd) = client_command(&self.read_buf).map_err(|_| anyhow!("Unable to parse command"))?; + tracing::trace!(cmd=?cmd, "Received command"); + + // Make some progress in our local state + self.state.progress(cmd, &self.login).await; + if matches!(self.state, State::Error) { + bail!("Internal state is in error, previous logs explain what went wrong"); + } + + // Build response + let srv_cmds = self.state.response(); + srv_cmds.iter().try_for_each(|r| { + tracing::trace!(cmd=?r, "Sent command"); + r.encode(&mut self.write_buf) + })?; + + // Send responses if at least one command response has been generated + if !srv_cmds.is_empty() { + self.stream.write_all(&self.write_buf).await?; + self.stream.flush().await?; + } + + // Reset buffers + self.read_buf.clear(); + self.write_buf.clear(); + }, + _ = self.stop.changed() => { + tracing::debug!("Server is stopping, quitting this runner"); + return Ok(()) + } + } + } + } +} diff --git a/aero-sasl/Cargo.toml b/aero-sasl/Cargo.toml new file mode 100644 index 0000000..3e66ff3 --- /dev/null +++ b/aero-sasl/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "aero-sasl" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "A partial and standalone implementation of the Dovecot SASL Auth Protocol" + +[dependencies] + +anyhow.workspace = true +base64.workspace = true +futures.workspace = true +nom.workspace = true +rand.workspace = true +tokio.workspace = true +tokio-util.workspace = true +tracing.workspace = true +hex.workspace = true + +#log.workspace = true +#serde.workspace = true diff --git a/aero-sasl/src/decode.rs b/aero-sasl/src/decode.rs new file mode 100644 index 0000000..f5d7b53 --- /dev/null +++ b/aero-sasl/src/decode.rs @@ -0,0 +1,243 @@ +use base64::Engine; +use nom::{ + branch::alt, + bytes::complete::{tag, tag_no_case, take, take_while, take_while1}, + character::complete::{tab, u16, u64}, + combinator::{map, opt, recognize, rest, value}, + error::{Error, ErrorKind}, + multi::{many1, separated_list0}, + sequence::{pair, preceded, tuple}, + IResult, +}; + +use super::types::*; + +pub fn client_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { + alt((version_command, cpid_command, auth_command, cont_command))(input) +} + +/* +fn server_command(buf: &u8) -> IResult<&u8, ServerCommand> { + unimplemented!(); +} +*/ + +// --------------------- + +fn version_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { + let mut parser = tuple((tag_no_case(b"VERSION"), tab, u64, tab, u64)); + + let (input, (_, _, major, _, minor)) = parser(input)?; + Ok((input, ClientCommand::Version(Version { major, minor }))) +} + +pub fn cpid_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { + preceded( + pair(tag_no_case(b"CPID"), tab), + map(u64, |v| ClientCommand::Cpid(v)), + )(input) +} + +fn mechanism<'a>(input: &'a [u8]) -> IResult<&'a [u8], Mechanism> { + alt(( + value(Mechanism::Plain, tag_no_case(b"PLAIN")), + value(Mechanism::Login, tag_no_case(b"LOGIN")), + ))(input) +} + +fn is_not_tab_or_esc_or_lf(c: u8) -> bool { + c != 0x09 && c != 0x01 && c != 0x0a // TAB or 0x01 or LF +} + +fn is_esc<'a>(input: &'a [u8]) -> IResult<&'a [u8], &[u8]> { + preceded(tag(&[0x01]), take(1usize))(input) +} + +fn parameter<'a>(input: &'a [u8]) -> IResult<&'a [u8], &[u8]> { + recognize(many1(alt((take_while1(is_not_tab_or_esc_or_lf), is_esc))))(input) +} + +fn parameter_str(input: &[u8]) -> IResult<&[u8], String> { + let (input, buf) = parameter(input)?; + + std::str::from_utf8(buf) + .map(|v| (input, v.to_string())) + .map_err(|_| nom::Err::Failure(Error::new(input, ErrorKind::TakeWhile1))) +} + +fn is_param_name_char(c: u8) -> bool { + is_not_tab_or_esc_or_lf(c) && c != 0x3d // = +} + +fn parameter_name(input: &[u8]) -> IResult<&[u8], String> { + let (input, buf) = take_while1(is_param_name_char)(input)?; + + std::str::from_utf8(buf) + .map(|v| (input, v.to_string())) + .map_err(|_| nom::Err::Failure(Error::new(input, ErrorKind::TakeWhile1))) +} + +fn service<'a>(input: &'a [u8]) -> IResult<&'a [u8], String> { + preceded(tag_no_case("service="), parameter_str)(input) +} + +fn auth_option<'a>(input: &'a [u8]) -> IResult<&'a [u8], AuthOption> { + use AuthOption::*; + alt(( + alt(( + value(Debug, tag_no_case(b"debug")), + value(NoPenalty, tag_no_case(b"no-penalty")), + value(ClientId, tag_no_case(b"client_id")), + value(NoLogin, tag_no_case(b"nologin")), + map(preceded(tag_no_case(b"session="), u64), |id| Session(id)), + map(preceded(tag_no_case(b"lip="), parameter_str), |ip| { + LocalIp(ip) + }), + map(preceded(tag_no_case(b"rip="), parameter_str), |ip| { + RemoteIp(ip) + }), + map(preceded(tag_no_case(b"lport="), u16), |port| { + LocalPort(port) + }), + map(preceded(tag_no_case(b"rport="), u16), |port| { + RemotePort(port) + }), + map(preceded(tag_no_case(b"real_rip="), parameter_str), |ip| { + RealRemoteIp(ip) + }), + map(preceded(tag_no_case(b"real_lip="), parameter_str), |ip| { + RealLocalIp(ip) + }), + map(preceded(tag_no_case(b"real_lport="), u16), |port| { + RealLocalPort(port) + }), + map(preceded(tag_no_case(b"real_rport="), u16), |port| { + RealRemotePort(port) + }), + )), + alt(( + map( + preceded(tag_no_case(b"local_name="), parameter_str), + |name| LocalName(name), + ), + map( + preceded(tag_no_case(b"forward_views="), parameter), + |views| ForwardViews(views.into()), + ), + map(preceded(tag_no_case(b"secured="), parameter_str), |info| { + Secured(Some(info)) + }), + value(Secured(None), tag_no_case(b"secured")), + value(CertUsername, tag_no_case(b"cert_username")), + map(preceded(tag_no_case(b"transport="), parameter_str), |ts| { + Transport(ts) + }), + map( + preceded(tag_no_case(b"tls_cipher="), parameter_str), + |cipher| TlsCipher(cipher), + ), + map( + preceded(tag_no_case(b"tls_cipher_bits="), parameter_str), + |bits| TlsCipherBits(bits), + ), + map(preceded(tag_no_case(b"tls_pfs="), parameter_str), |pfs| { + TlsPfs(pfs) + }), + map( + preceded(tag_no_case(b"tls_protocol="), parameter_str), + |proto| TlsProtocol(proto), + ), + map( + preceded(tag_no_case(b"valid-client-cert="), parameter_str), + |cert| ValidClientCert(cert), + ), + )), + alt(( + map(preceded(tag_no_case(b"resp="), base64), |data| Resp(data)), + map( + tuple((parameter_name, tag(b"="), parameter)), + |(n, _, v)| UnknownPair(n, v.into()), + ), + map(parameter, |v| UnknownBool(v.into())), + )), + ))(input) +} + +fn auth_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { + let mut parser = tuple(( + tag_no_case(b"AUTH"), + tab, + u64, + tab, + mechanism, + tab, + service, + map(opt(preceded(tab, separated_list0(tab, auth_option))), |o| { + o.unwrap_or(vec![]) + }), + )); + let (input, (_, _, id, _, mech, _, service, options)) = parser(input)?; + Ok(( + input, + ClientCommand::Auth { + id, + mech, + service, + options, + }, + )) +} + +fn is_base64_core(c: u8) -> bool { + c >= 0x30 && c <= 0x39 // 0-9 + || c >= 0x41 && c <= 0x5a // A-Z + || c >= 0x61 && c <= 0x7a // a-z + || c == 0x2b // + + || c == 0x2f // / +} + +fn is_base64_pad(c: u8) -> bool { + c == 0x3d // = +} + +fn base64(input: &[u8]) -> IResult<&[u8], Vec> { + let (input, (b64, _)) = tuple((take_while1(is_base64_core), take_while(is_base64_pad)))(input)?; + + let data = base64::engine::general_purpose::STANDARD_NO_PAD + .decode(b64) + .map_err(|_| nom::Err::Failure(Error::new(input, ErrorKind::TakeWhile1)))?; + + Ok((input, data)) +} + +/// @FIXME Dovecot does not say if base64 content must be padded or not +fn cont_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { + let mut parser = tuple((tag_no_case(b"CONT"), tab, u64, tab, base64)); + + let (input, (_, _, id, _, data)) = parser(input)?; + Ok((input, ClientCommand::Cont { id, data })) +} + +// ----------------------------------------------------------------- +// +// SASL DECODING +// +// ----------------------------------------------------------------- + +fn not_null(c: u8) -> bool { + c != 0x0 +} + +// impersonated user, login, password +pub fn auth_plain<'a>(input: &'a [u8]) -> IResult<&'a [u8], (&'a [u8], &'a [u8], &'a [u8])> { + map( + tuple(( + take_while(not_null), + take(1usize), + take_while(not_null), + take(1usize), + rest, + )), + |(imp, _, user, _, pass)| (imp, user, pass), + )(input) +} diff --git a/aero-sasl/src/encode.rs b/aero-sasl/src/encode.rs new file mode 100644 index 0000000..625d035 --- /dev/null +++ b/aero-sasl/src/encode.rs @@ -0,0 +1,157 @@ +use anyhow::Result; +use base64::Engine; +use tokio_util::bytes::{BufMut, BytesMut}; + +use super::types::*; + +pub trait Encode { + fn encode(&self, out: &mut BytesMut) -> Result<()>; +} + +fn tab_enc(out: &mut BytesMut) { + out.put(&[0x09][..]) +} + +fn lf_enc(out: &mut BytesMut) { + out.put(&[0x0A][..]) +} + +impl Encode for Mechanism { + fn encode(&self, out: &mut BytesMut) -> Result<()> { + match self { + Self::Plain => out.put(&b"PLAIN"[..]), + Self::Login => out.put(&b"LOGIN"[..]), + } + Ok(()) + } +} + +impl Encode for MechanismParameters { + fn encode(&self, out: &mut BytesMut) -> Result<()> { + match self { + Self::Anonymous => out.put(&b"anonymous"[..]), + Self::PlainText => out.put(&b"plaintext"[..]), + Self::Dictionary => out.put(&b"dictionary"[..]), + Self::Active => out.put(&b"active"[..]), + Self::ForwardSecrecy => out.put(&b"forward-secrecy"[..]), + Self::MutualAuth => out.put(&b"mutual-auth"[..]), + Self::Private => out.put(&b"private"[..]), + } + Ok(()) + } +} + +impl Encode for FailCode { + fn encode(&self, out: &mut BytesMut) -> Result<()> { + match self { + Self::TempFail => out.put(&b"temp_fail"[..]), + Self::AuthzFail => out.put(&b"authz_fail"[..]), + Self::UserDisabled => out.put(&b"user_disabled"[..]), + Self::PassExpired => out.put(&b"pass_expired"[..]), + }; + Ok(()) + } +} + +impl Encode for ServerCommand { + fn encode(&self, out: &mut BytesMut) -> Result<()> { + match self { + Self::Version(Version { major, minor }) => { + out.put(&b"VERSION"[..]); + tab_enc(out); + out.put(major.to_string().as_bytes()); + tab_enc(out); + out.put(minor.to_string().as_bytes()); + lf_enc(out); + } + Self::Spid(pid) => { + out.put(&b"SPID"[..]); + tab_enc(out); + out.put(pid.to_string().as_bytes()); + lf_enc(out); + } + Self::Cuid(pid) => { + out.put(&b"CUID"[..]); + tab_enc(out); + out.put(pid.to_string().as_bytes()); + lf_enc(out); + } + Self::Cookie(cval) => { + out.put(&b"COOKIE"[..]); + tab_enc(out); + out.put(hex::encode(cval).as_bytes()); + lf_enc(out); + } + Self::Mech { kind, parameters } => { + out.put(&b"MECH"[..]); + tab_enc(out); + kind.encode(out)?; + for p in parameters.iter() { + tab_enc(out); + p.encode(out)?; + } + lf_enc(out); + } + Self::Done => { + out.put(&b"DONE"[..]); + lf_enc(out); + } + Self::Cont { id, data } => { + out.put(&b"CONT"[..]); + tab_enc(out); + out.put(id.to_string().as_bytes()); + tab_enc(out); + if let Some(rdata) = data { + let b64 = base64::engine::general_purpose::STANDARD.encode(rdata); + out.put(b64.as_bytes()); + } + lf_enc(out); + } + Self::Ok { + id, + user_id, + extra_parameters, + } => { + out.put(&b"OK"[..]); + tab_enc(out); + out.put(id.to_string().as_bytes()); + if let Some(user) = user_id { + tab_enc(out); + out.put(&b"user="[..]); + out.put(user.as_bytes()); + } + for p in extra_parameters.iter() { + tab_enc(out); + out.put(&p[..]); + } + lf_enc(out); + } + Self::Fail { + id, + user_id, + code, + extra_parameters, + } => { + out.put(&b"FAIL"[..]); + tab_enc(out); + out.put(id.to_string().as_bytes()); + if let Some(user) = user_id { + tab_enc(out); + out.put(&b"user="[..]); + out.put(user.as_bytes()); + } + if let Some(code_val) = code { + tab_enc(out); + out.put(&b"code="[..]); + code_val.encode(out)?; + } + for p in extra_parameters.iter() { + tab_enc(out); + out.put(&p[..]); + } + lf_enc(out); + } + } + Ok(()) + } +} diff --git a/aero-sasl/src/flow.rs b/aero-sasl/src/flow.rs new file mode 100644 index 0000000..6cc698a --- /dev/null +++ b/aero-sasl/src/flow.rs @@ -0,0 +1,201 @@ +use futures::Future; +use rand::prelude::*; + +use super::types::*; +use super::decode::auth_plain; + +#[derive(Debug)] +pub enum AuthRes { + Success(String), + Failed(Option, Option), +} + +#[derive(Debug)] +pub enum State { + Error, + Init, + HandshakePart(Version), + HandshakeDone, + AuthPlainProgress { id: u64 }, + AuthDone { id: u64, res: AuthRes }, +} + +const SERVER_MAJOR: u64 = 1; +const SERVER_MINOR: u64 = 2; +const EMPTY_AUTHZ: &[u8] = &[]; +impl State { + pub fn new() -> Self { + Self::Init + } + + async fn try_auth_plain<'a, X, F>(&self, data: &'a [u8], login: X) -> AuthRes + where + X: FnOnce(&'a str, &'a str) -> F, + F: Future, + { + // Check that we can extract user's login+pass + let (ubin, pbin) = match auth_plain(&data) { + Ok(([], (authz, user, pass))) if authz == user || authz == EMPTY_AUTHZ => (user, pass), + Ok(_) => { + tracing::error!("Impersonating user is not supported"); + return AuthRes::Failed(None, None); + } + Err(e) => { + tracing::error!(err=?e, "Could not parse the SASL PLAIN data chunk"); + return AuthRes::Failed(None, None); + } + }; + + // Try to convert it to UTF-8 + let (user, password) = match (std::str::from_utf8(ubin), std::str::from_utf8(pbin)) { + (Ok(u), Ok(p)) => (u, p), + _ => { + tracing::error!("Username or password contain invalid UTF-8 characters"); + return AuthRes::Failed(None, None); + } + }; + + // Try to connect user + match login(user, password).await { + true => AuthRes::Success(user.to_string()), + false => { + tracing::warn!("login failed"); + AuthRes::Failed(Some(user.to_string()), None) + } + } + } + + pub async fn progress(&mut self, cmd: ClientCommand, login: X) + where + X: FnOnce(&str, &str) -> F, + F: Future, + { + let new_state = 'state: { + match (std::mem::replace(self, State::Error), cmd) { + (Self::Init, ClientCommand::Version(v)) => Self::HandshakePart(v), + (Self::HandshakePart(version), ClientCommand::Cpid(_cpid)) => { + if version.major != SERVER_MAJOR { + tracing::error!( + client_major = version.major, + server_major = SERVER_MAJOR, + "Unsupported client major version" + ); + break 'state Self::Error; + } + + Self::HandshakeDone + } + ( + Self::HandshakeDone { .. }, + ClientCommand::Auth { + id, mech, options, .. + }, + ) + | ( + Self::AuthDone { .. }, + ClientCommand::Auth { + id, mech, options, .. + }, + ) => { + if mech != Mechanism::Plain { + tracing::error!(mechanism=?mech, "Unsupported Authentication Mechanism"); + break 'state Self::AuthDone { + id, + res: AuthRes::Failed(None, None), + }; + } + + match options.last() { + Some(AuthOption::Resp(data)) => Self::AuthDone { + id, + res: self.try_auth_plain(&data, login).await, + }, + _ => Self::AuthPlainProgress { id }, + } + } + (Self::AuthPlainProgress { id }, ClientCommand::Cont { id: cid, data }) => { + // Check that ID matches + if cid != id { + tracing::error!( + auth_id = id, + cont_id = cid, + "CONT id does not match AUTH id" + ); + break 'state Self::AuthDone { + id, + res: AuthRes::Failed(None, None), + }; + } + + Self::AuthDone { + id, + res: self.try_auth_plain(&data, login).await, + } + } + _ => { + tracing::error!("This command is not valid in this context"); + Self::Error + } + } + }; + tracing::debug!(state=?new_state, "Made progress"); + *self = new_state; + } + + pub fn response(&self) -> Vec { + let mut srv_cmd: Vec = Vec::new(); + + match self { + Self::HandshakeDone { .. } => { + srv_cmd.push(ServerCommand::Version(Version { + major: SERVER_MAJOR, + minor: SERVER_MINOR, + })); + + srv_cmd.push(ServerCommand::Mech { + kind: Mechanism::Plain, + parameters: vec![MechanismParameters::PlainText], + }); + + srv_cmd.push(ServerCommand::Spid(15u64)); + srv_cmd.push(ServerCommand::Cuid(19350u64)); + + let mut cookie = [0u8; 16]; + thread_rng().fill(&mut cookie); + srv_cmd.push(ServerCommand::Cookie(cookie)); + + srv_cmd.push(ServerCommand::Done); + } + Self::AuthPlainProgress { id } => { + srv_cmd.push(ServerCommand::Cont { + id: *id, + data: None, + }); + } + Self::AuthDone { + id, + res: AuthRes::Success(user), + } => { + srv_cmd.push(ServerCommand::Ok { + id: *id, + user_id: Some(user.to_string()), + extra_parameters: vec![], + }); + } + Self::AuthDone { + id, + res: AuthRes::Failed(maybe_user, maybe_failcode), + } => { + srv_cmd.push(ServerCommand::Fail { + id: *id, + user_id: maybe_user.clone(), + code: maybe_failcode.clone(), + extra_parameters: vec![], + }); + } + _ => (), + }; + + srv_cmd + } +} diff --git a/aero-sasl/src/lib.rs b/aero-sasl/src/lib.rs new file mode 100644 index 0000000..230862a --- /dev/null +++ b/aero-sasl/src/lib.rs @@ -0,0 +1,43 @@ +/// Seek compatibility with the Dovecot Authentication Protocol +/// +/// ## Trace +/// +/// ```text +/// S: VERSION 1 2 +/// S: MECH PLAIN plaintext +/// S: MECH LOGIN plaintext +/// S: SPID 15 +/// S: CUID 17654 +/// S: COOKIE f56692bee41f471ed01bd83520025305 +/// S: DONE +/// C: VERSION 1 2 +/// C: CPID 1 +/// +/// C: AUTH 2 PLAIN service=smtp +/// S: CONT 2 +/// C: CONT 2 base64stringFollowingRFC4616== +/// S: OK 2 user=alice@example.tld +/// +/// C: AUTH 42 LOGIN service=smtp +/// S: CONT 42 VXNlcm5hbWU6 +/// C: CONT 42 b64User +/// S: CONT 42 UGFzc3dvcmQ6 +/// C: CONT 42 b64Pass +/// S: FAIL 42 user=alice +/// ``` +/// +/// ## RFC References +/// +/// PLAIN SASL - https://datatracker.ietf.org/doc/html/rfc4616 +/// +/// +/// ## Dovecot References +/// +/// https://doc.dovecot.org/developer_manual/design/auth_protocol/ +/// https://doc.dovecot.org/configuration_manual/authentication/authentication_mechanisms/#authentication-authentication-mechanisms +/// https://doc.dovecot.org/configuration_manual/howto/simple_virtual_install/#simple-virtual-install-smtp-auth +/// https://doc.dovecot.org/configuration_manual/howto/postfix_and_dovecot_sasl/#howto-postfix-and-dovecot-sasl +pub mod types; +pub mod encode; +pub mod decode; +pub mod flow; diff --git a/aero-sasl/src/types.rs b/aero-sasl/src/types.rs new file mode 100644 index 0000000..d71405e --- /dev/null +++ b/aero-sasl/src/types.rs @@ -0,0 +1,163 @@ +#[derive(Debug, Clone, PartialEq)] +pub enum Mechanism { + Plain, + Login, +} + +#[derive(Clone, Debug)] +pub enum AuthOption { + /// Unique session ID. Mainly used for logging. + Session(u64), + /// Local IP connected to by the client. In standard string format, e.g. 127.0.0.1 or ::1. + LocalIp(String), + /// Remote client IP + RemoteIp(String), + /// Local port connected to by the client. + LocalPort(u16), + /// Remote client port + RemotePort(u16), + /// When Dovecot proxy is used, the real_rip/real_port are the proxy’s IP/port and real_lip/real_lport are the backend’s IP/port where the proxy was connected to. + RealRemoteIp(String), + RealLocalIp(String), + RealLocalPort(u16), + RealRemotePort(u16), + /// TLS SNI name + LocalName(String), + /// Enable debugging for this lookup. + Debug, + /// List of fields that will become available via %{forward_*} variables. The list is double-tab-escaped, like: tab_escaped[tab_escaped(key=value)[...] + /// Note: we do not unescape the tabulation, and thus we don't parse the data + ForwardViews(Vec), + /// Remote user has secured transport to auth client (e.g. localhost, SSL, TLS). + Secured(Option), + /// The value can be “insecure”, “trusted” or “TLS”. + Transport(String), + /// TLS cipher being used. + TlsCipher(String), + /// The number of bits in the TLS cipher. + /// @FIXME: I don't know how if it's a string or an integer + TlsCipherBits(String), + /// TLS perfect forward secrecy algorithm (e.g. DH, ECDH) + TlsPfs(String), + /// TLS protocol name (e.g. SSLv3, TLSv1.2) + TlsProtocol(String), + /// Remote user has presented a valid SSL certificate. + ValidClientCert(String), + /// Ignore auth penalty tracking for this request + NoPenalty, + /// Unknown option sent by Postfix + NoLogin, + /// Username taken from client’s SSL certificate. + CertUsername, + /// IMAP ID string + ClientId, + /// An unknown key + UnknownPair(String, Vec), + UnknownBool(Vec), + /// Initial response for authentication mechanism. + /// NOTE: This must be the last parameter. Everything after it is ignored. + /// This is to avoid accidental security holes if user-given data is directly put to base64 string without filtering out tabs. + /// **This field is used when the data to pass is small, it's a way to "inline a continuation". + Resp(Vec), +} + +#[derive(Debug, Clone)] +pub struct Version { + pub major: u64, + pub minor: u64, +} + +#[derive(Debug)] +pub enum ClientCommand { + /// Both client and server should check that they support the same major version number. If they don’t, the other side isn’t expected to be talking the same protocol and should be disconnected. Minor version can be ignored. This document specifies the version number 1.2. + Version(Version), + /// CPID finishes the handshake from client. + Cpid(u64), + Auth { + /// ID is a connection-specific unique request identifier. It must be a 32bit number, so typically you’d just increment it by one. + id: u64, + /// A SASL mechanism (eg. LOGIN, PLAIN, etc.) + /// See: https://doc.dovecot.org/configuration_manual/authentication/authentication_mechanisms/#authentication-authentication-mechanisms + mech: Mechanism, + /// Service is the service requesting authentication, eg. pop3, imap, smtp. + service: String, + /// All the optional parameters + options: Vec, + }, + Cont { + /// The must match the of the AUTH command. + id: u64, + /// Data that will be serialized to / deserialized from base64 + data: Vec, + }, +} + +#[derive(Debug)] +pub enum MechanismParameters { + /// Anonymous authentication + Anonymous, + /// Transfers plaintext passwords + PlainText, + /// Subject to passive (dictionary) attack + Dictionary, + /// Subject to active (non-dictionary) attack + Active, + /// Provides forward secrecy between sessions + ForwardSecrecy, + /// Provides mutual authentication + MutualAuth, + /// Don’t advertise this as available SASL mechanism (eg. APOP) + Private, +} + +#[derive(Debug, Clone)] +pub enum FailCode { + /// This is a temporary internal failure, e.g. connection was lost to SQL database. + TempFail, + /// Authentication succeeded, but authorization failed (master user’s password was ok, but destination user was not ok). + AuthzFail, + /// User is disabled (password may or may not have been correct) + UserDisabled, + /// User’s password has expired. + PassExpired, +} + +#[derive(Debug)] +pub enum ServerCommand { + /// Both client and server should check that they support the same major version number. If they don’t, the other side isn’t expected to be talking the same protocol and should be disconnected. Minor version can be ignored. This document specifies the version number 1.2. + Version(Version), + /// CPID and SPID specify client and server Process Identifiers (PIDs). They should be unique identifiers for the specific process. UNIX process IDs are good choices. + /// SPID can be used by authentication client to tell master which server process handled the authentication. + Spid(u64), + /// CUID is a server process-specific unique connection identifier. It’s different each time a connection is established for the server. + /// CUID is currently useful only for APOP authentication. + Cuid(u64), + Mech { + kind: Mechanism, + parameters: Vec, + }, + /// COOKIE returns connection-specific 128 bit cookie in hex. It must be given to REQUEST command. (Protocol v1.1+ / Dovecot v2.0+) + Cookie([u8; 16]), + /// DONE finishes the handshake from server. + Done, + + Fail { + id: u64, + user_id: Option, + code: Option, + extra_parameters: Vec>, + }, + Cont { + id: u64, + data: Option>, + }, + /// FAIL and OK may contain multiple unspecified parameters which authentication client may handle specially. + /// The only one specified here is user= parameter, which should always be sent if the userid is known. + Ok { + id: u64, + user_id: Option, + extra_parameters: Vec>, + }, +} + + diff --git a/aero-user/Cargo.toml b/aero-user/Cargo.toml new file mode 100644 index 0000000..fc851e2 --- /dev/null +++ b/aero-user/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "aero-user" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "Represent an encrypted user profile" + +[dependencies] +anyhow.workspace = true +serde.workspace = true +zstd.workspace = true +sodiumoxide.workspace = true +log.workspace = true +async-trait.workspace = true +ldap3.workspace = true +base64.workspace = true +rand.workspace = true +tokio.workspace = true +aws-config.workspace = true +aws-sdk-s3.workspace = true +aws-smithy-runtime.workspace = true +aws-smithy-runtime-api.workspace = true +hyper-rustls.workspace = true +hyper-util.workspace = true +k2v-client.workspace = true +rmp-serde.workspace = true +toml.workspace = true +tracing.workspace = true +argon2.workspace = true diff --git a/aero-user/src/config.rs b/aero-user/src/config.rs new file mode 100644 index 0000000..7de2eac --- /dev/null +++ b/aero-user/src/config.rs @@ -0,0 +1,191 @@ +use std::collections::HashMap; +use std::io::{Read, Write}; +use std::net::SocketAddr; +use std::path::PathBuf; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct CompanionConfig { + pub pid: Option, + pub imap: ImapUnsecureConfig, + // @FIXME Add DAV + + #[serde(flatten)] + pub users: LoginStaticConfig, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ProviderConfig { + pub pid: Option, + pub imap: Option, + pub imap_unsecure: Option, + pub lmtp: Option, + pub auth: Option, + pub dav_unsecure: Option, + pub users: UserManagement, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(tag = "user_driver")] +pub enum UserManagement { + Demo, + Static(LoginStaticConfig), + Ldap(LoginLdapConfig), +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct AuthConfig { + pub bind_addr: SocketAddr, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct LmtpConfig { + pub bind_addr: SocketAddr, + pub hostname: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ImapConfig { + pub bind_addr: SocketAddr, + pub certs: PathBuf, + pub key: PathBuf, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct DavUnsecureConfig { + pub bind_addr: SocketAddr, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ImapUnsecureConfig { + pub bind_addr: SocketAddr, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct LoginStaticConfig { + pub user_list: PathBuf, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(tag = "storage_driver")] +pub enum LdapStorage { + Garage(LdapGarageConfig), + InMemory, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct LdapGarageConfig { + pub s3_endpoint: String, + pub k2v_endpoint: String, + pub aws_region: String, + + pub aws_access_key_id_attr: String, + pub aws_secret_access_key_attr: String, + pub bucket_attr: Option, + pub default_bucket: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct LoginLdapConfig { + // LDAP connection info + pub ldap_server: String, + #[serde(default)] + pub pre_bind_on_login: bool, + pub bind_dn: Option, + pub bind_password: Option, + pub search_base: String, + + // Schema-like info required for Aerogramme's logic + pub username_attr: String, + #[serde(default = "default_mail_attr")] + pub mail_attr: String, + + // The field that will contain the crypto root thingy + pub crypto_root_attr: String, + + // Storage related thing + #[serde(flatten)] + pub storage: LdapStorage, +} + +// ---- + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(tag = "storage_driver")] +pub enum StaticStorage { + Garage(StaticGarageConfig), + InMemory, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct StaticGarageConfig { + pub s3_endpoint: String, + pub k2v_endpoint: String, + pub aws_region: String, + + pub aws_access_key_id: String, + pub aws_secret_access_key: String, + pub bucket: String, +} + +pub type UserList = HashMap; + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct UserEntry { + #[serde(default)] + pub email_addresses: Vec, + pub password: String, + pub crypto_root: String, + + #[serde(flatten)] + pub storage: StaticStorage, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct SetupEntry { + #[serde(default)] + pub email_addresses: Vec, + + #[serde(default)] + pub clear_password: Option, + + #[serde(flatten)] + pub storage: StaticStorage, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(tag = "role")] +pub enum AnyConfig { + Companion(CompanionConfig), + Provider(ProviderConfig), +} + +// --- +pub fn read_config(config_file: PathBuf) -> Result { + let mut file = std::fs::OpenOptions::new() + .read(true) + .open(config_file.as_path())?; + + let mut config = String::new(); + file.read_to_string(&mut config)?; + + Ok(toml::from_str(&config)?) +} + +pub fn write_config(config_file: PathBuf, config: &T) -> Result<()> { + let mut file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(config_file.as_path())?; + + file.write_all(toml::to_string(config)?.as_bytes())?; + + Ok(()) +} + +fn default_mail_attr() -> String { + "mail".into() +} diff --git a/aero-user/src/cryptoblob.rs b/aero-user/src/cryptoblob.rs new file mode 100644 index 0000000..327a642 --- /dev/null +++ b/aero-user/src/cryptoblob.rs @@ -0,0 +1,67 @@ +//! Helper functions for secret-key encrypted blobs +//! that contain Zstd encrypted data + +use anyhow::{anyhow, Result}; +use serde::{Deserialize, Serialize}; +use zstd::stream::{decode_all as zstd_decode, encode_all as zstd_encode}; + +//use sodiumoxide::crypto::box_ as publicbox; +use sodiumoxide::crypto::secretbox::xsalsa20poly1305 as secretbox; + +pub use sodiumoxide::crypto::box_::{ + gen_keypair, PublicKey, SecretKey, PUBLICKEYBYTES, SECRETKEYBYTES, +}; +pub use sodiumoxide::crypto::secretbox::xsalsa20poly1305::{gen_key, Key, KEYBYTES}; + +pub fn open(cryptoblob: &[u8], key: &Key) -> Result> { + use secretbox::{Nonce, NONCEBYTES}; + + if cryptoblob.len() < NONCEBYTES { + return Err(anyhow!("Cyphertext too short")); + } + + // Decrypt -> get Zstd data + let nonce = Nonce::from_slice(&cryptoblob[..NONCEBYTES]).unwrap(); + let zstdblob = secretbox::open(&cryptoblob[NONCEBYTES..], &nonce, key) + .map_err(|_| anyhow!("Could not decrypt blob"))?; + + // Decompress zstd data + let mut reader = &zstdblob[..]; + let data = zstd_decode(&mut reader)?; + + Ok(data) +} + +pub fn seal(plainblob: &[u8], key: &Key) -> Result> { + use secretbox::{gen_nonce, NONCEBYTES}; + + // Compress data using zstd + let mut reader = plainblob; + let zstdblob = zstd_encode(&mut reader, 0)?; + + // Encrypt + let nonce = gen_nonce(); + let cryptoblob = secretbox::seal(&zstdblob, &nonce, key); + + let mut res = Vec::with_capacity(NONCEBYTES + cryptoblob.len()); + res.extend(nonce.as_ref()); + res.extend(cryptoblob); + + Ok(res) +} + +pub fn open_deserialize Deserialize<'de>>(cryptoblob: &[u8], key: &Key) -> Result { + let blob = open(cryptoblob, key)?; + + Ok(rmp_serde::decode::from_read_ref::<_, T>(&blob)?) +} + +pub fn seal_serialize(obj: T, key: &Key) -> Result> { + let mut wr = Vec::with_capacity(128); + let mut se = rmp_serde::Serializer::new(&mut wr) + .with_struct_map() + .with_string_variants(); + obj.serialize(&mut se)?; + + seal(&wr, key) +} diff --git a/aero-user/src/lib.rs b/aero-user/src/lib.rs new file mode 100644 index 0000000..9b08fe2 --- /dev/null +++ b/aero-user/src/lib.rs @@ -0,0 +1,9 @@ +pub mod config; +pub mod cryptoblob; +pub mod login; +pub mod storage; + +// A user is composed of 3 things: +// - An identity (login) +// - A storage profile (storage) +// - Some cryptography data (cryptoblob) diff --git a/aero-user/src/login/demo_provider.rs b/aero-user/src/login/demo_provider.rs new file mode 100644 index 0000000..11c7d54 --- /dev/null +++ b/aero-user/src/login/demo_provider.rs @@ -0,0 +1,51 @@ +use crate::login::*; +use crate::storage::*; + +pub struct DemoLoginProvider { + keys: CryptoKeys, + in_memory_store: in_memory::MemDb, +} + +impl DemoLoginProvider { + pub fn new() -> Self { + Self { + keys: CryptoKeys::init(), + in_memory_store: in_memory::MemDb::new(), + } + } +} + +#[async_trait] +impl LoginProvider for DemoLoginProvider { + async fn login(&self, username: &str, password: &str) -> Result { + tracing::debug!(user=%username, "login"); + + if username != "alice" { + bail!("user does not exist"); + } + + if password != "hunter2" { + bail!("wrong password"); + } + + let storage = self.in_memory_store.builder("alice").await; + let keys = self.keys.clone(); + + Ok(Credentials { storage, keys }) + } + + async fn public_login(&self, email: &str) -> Result { + tracing::debug!(user=%email, "public_login"); + if email != "alice@example.tld" { + bail!("invalid email address"); + } + + let storage = self.in_memory_store.builder("alice").await; + let public_key = self.keys.public.clone(); + + Ok(PublicCredentials { + storage, + public_key, + }) + } +} diff --git a/aero-user/src/login/ldap_provider.rs b/aero-user/src/login/ldap_provider.rs new file mode 100644 index 0000000..ca5a356 --- /dev/null +++ b/aero-user/src/login/ldap_provider.rs @@ -0,0 +1,264 @@ +use async_trait::async_trait; +use ldap3::{LdapConnAsync, Scope, SearchEntry}; +use log::debug; + +use crate::config::*; +use crate::storage; +use super::*; + +pub struct LdapLoginProvider { + ldap_server: String, + + pre_bind_on_login: bool, + bind_dn_and_pw: Option<(String, String)>, + + search_base: String, + attrs_to_retrieve: Vec, + username_attr: String, + mail_attr: String, + crypto_root_attr: String, + + storage_specific: StorageSpecific, + in_memory_store: storage::in_memory::MemDb, + garage_store: storage::garage::GarageRoot, +} + +enum BucketSource { + Constant(String), + Attr(String), +} + +enum StorageSpecific { + InMemory, + Garage { + from_config: LdapGarageConfig, + bucket_source: BucketSource, + }, +} + +impl LdapLoginProvider { + pub fn new(config: LoginLdapConfig) -> Result { + let bind_dn_and_pw = match (config.bind_dn, config.bind_password) { + (Some(dn), Some(pw)) => Some((dn, pw)), + (None, None) => None, + _ => bail!( + "If either of `bind_dn` or `bind_password` is set, the other must be set as well." + ), + }; + + if config.pre_bind_on_login && bind_dn_and_pw.is_none() { + bail!("Cannot use `pre_bind_on_login` without setting `bind_dn` and `bind_password`"); + } + + let mut attrs_to_retrieve = vec![ + config.username_attr.clone(), + config.mail_attr.clone(), + config.crypto_root_attr.clone(), + ]; + + // storage specific + let specific = match config.storage { + LdapStorage::InMemory => StorageSpecific::InMemory, + LdapStorage::Garage(grgconf) => { + attrs_to_retrieve.push(grgconf.aws_access_key_id_attr.clone()); + attrs_to_retrieve.push(grgconf.aws_secret_access_key_attr.clone()); + + let bucket_source = + match (grgconf.default_bucket.clone(), grgconf.bucket_attr.clone()) { + (Some(b), None) => BucketSource::Constant(b), + (None, Some(a)) => BucketSource::Attr(a), + _ => bail!("Must set `bucket` or `bucket_attr`, but not both"), + }; + + if let BucketSource::Attr(a) = &bucket_source { + attrs_to_retrieve.push(a.clone()); + } + + StorageSpecific::Garage { + from_config: grgconf, + bucket_source, + } + } + }; + + Ok(Self { + ldap_server: config.ldap_server, + pre_bind_on_login: config.pre_bind_on_login, + bind_dn_and_pw, + search_base: config.search_base, + attrs_to_retrieve, + username_attr: config.username_attr, + mail_attr: config.mail_attr, + crypto_root_attr: config.crypto_root_attr, + storage_specific: specific, + //@FIXME should be created outside of the login provider + //Login provider should return only a cryptoroot + a storage URI + //storage URI that should be resolved outside... + in_memory_store: storage::in_memory::MemDb::new(), + garage_store: storage::garage::GarageRoot::new()?, + }) + } + + async fn storage_creds_from_ldap_user(&self, user: &SearchEntry) -> Result { + let storage: Builder = match &self.storage_specific { + StorageSpecific::InMemory => { + self.in_memory_store + .builder(&get_attr(user, &self.username_attr)?) + .await + } + StorageSpecific::Garage { + from_config, + bucket_source, + } => { + let aws_access_key_id = get_attr(user, &from_config.aws_access_key_id_attr)?; + let aws_secret_access_key = + get_attr(user, &from_config.aws_secret_access_key_attr)?; + let bucket = match bucket_source { + BucketSource::Constant(b) => b.clone(), + BucketSource::Attr(a) => get_attr(user, &a)?, + }; + + self.garage_store.user(storage::garage::GarageConf { + region: from_config.aws_region.clone(), + s3_endpoint: from_config.s3_endpoint.clone(), + k2v_endpoint: from_config.k2v_endpoint.clone(), + aws_access_key_id, + aws_secret_access_key, + bucket, + })? + } + }; + + Ok(storage) + } +} + +#[async_trait] +impl LoginProvider for LdapLoginProvider { + async fn login(&self, username: &str, password: &str) -> Result { + check_identifier(username)?; + + let (conn, mut ldap) = LdapConnAsync::new(&self.ldap_server).await?; + ldap3::drive!(conn); + + if self.pre_bind_on_login { + let (dn, pw) = self.bind_dn_and_pw.as_ref().unwrap(); + ldap.simple_bind(dn, pw).await?.success()?; + } + + let (matches, _res) = ldap + .search( + &self.search_base, + Scope::Subtree, + &format!( + "(&(objectClass=inetOrgPerson)({}={}))", + self.username_attr, username + ), + &self.attrs_to_retrieve, + ) + .await? + .success()?; + + if matches.is_empty() { + bail!("Invalid username"); + } + if matches.len() > 1 { + bail!("Invalid username (multiple matching accounts)"); + } + let user = SearchEntry::construct(matches.into_iter().next().unwrap()); + debug!( + "Found matching LDAP user for username {}: {}", + username, user.dn + ); + + // Try to login against LDAP server with provided password + // to check user's password + ldap.simple_bind(&user.dn, password) + .await? + .success() + .context("Invalid password")?; + debug!("Ldap login with user name {} successfull", username); + + // cryptography + let crstr = get_attr(&user, &self.crypto_root_attr)?; + let cr = CryptoRoot(crstr); + let keys = cr.crypto_keys(password)?; + + // storage + let storage = self.storage_creds_from_ldap_user(&user).await?; + + drop(ldap); + + Ok(Credentials { storage, keys }) + } + + async fn public_login(&self, email: &str) -> Result { + check_identifier(email)?; + + let (dn, pw) = match self.bind_dn_and_pw.as_ref() { + Some(x) => x, + None => bail!("Missing bind_dn and bind_password in LDAP login provider config"), + }; + + let (conn, mut ldap) = LdapConnAsync::new(&self.ldap_server).await?; + ldap3::drive!(conn); + ldap.simple_bind(dn, pw).await?.success()?; + + let (matches, _res) = ldap + .search( + &self.search_base, + Scope::Subtree, + &format!( + "(&(objectClass=inetOrgPerson)({}={}))", + self.mail_attr, email + ), + &self.attrs_to_retrieve, + ) + .await? + .success()?; + + if matches.is_empty() { + bail!("No such user account"); + } + if matches.len() > 1 { + bail!("Multiple matching user accounts"); + } + let user = SearchEntry::construct(matches.into_iter().next().unwrap()); + debug!("Found matching LDAP user for email {}: {}", email, user.dn); + + // cryptography + let crstr = get_attr(&user, &self.crypto_root_attr)?; + let cr = CryptoRoot(crstr); + let public_key = cr.public_key()?; + + // storage + let storage = self.storage_creds_from_ldap_user(&user).await?; + drop(ldap); + + Ok(PublicCredentials { + storage, + public_key, + }) + } +} + +fn get_attr(user: &SearchEntry, attr: &str) -> Result { + Ok(user + .attrs + .get(attr) + .ok_or(anyhow!("Missing attr: {}", attr))? + .iter() + .next() + .ok_or(anyhow!("No value for attr: {}", attr))? + .clone()) +} + +fn check_identifier(id: &str) -> Result<()> { + let is_ok = id + .chars() + .all(|c| c.is_alphanumeric() || "-+_.@".contains(c)); + if !is_ok { + bail!("Invalid username/email address, must contain only a-z A-Z 0-9 - + _ . @"); + } + Ok(()) +} diff --git a/aero-user/src/login/mod.rs b/aero-user/src/login/mod.rs new file mode 100644 index 0000000..5e54b4a --- /dev/null +++ b/aero-user/src/login/mod.rs @@ -0,0 +1,245 @@ +pub mod demo_provider; +pub mod ldap_provider; +pub mod static_provider; + +use std::sync::Arc; + +use anyhow::{anyhow, bail, Context, Result}; +use async_trait::async_trait; +use base64::Engine; +use rand::prelude::*; + +use crate::cryptoblob::*; +use crate::storage::*; + +/// The trait LoginProvider defines the interface for a login provider that allows +/// to retrieve storage and cryptographic credentials for access to a user account +/// from their username and password. +#[async_trait] +pub trait LoginProvider { + /// The login method takes an account's password as an input to decypher + /// decryption keys and obtain full access to the user's account. + async fn login(&self, username: &str, password: &str) -> Result; + /// The public_login method takes an account's email address and returns + /// public credentials for adding mails to the user's inbox. + async fn public_login(&self, email: &str) -> Result; +} + +/// ArcLoginProvider is simply an alias on a structure that is used +/// in many places in the code +pub type ArcLoginProvider = Arc; + +/// The struct Credentials represent all of the necessary information to interact +/// with a user account's data after they are logged in. +#[derive(Clone, Debug)] +pub struct Credentials { + /// The storage credentials are used to authenticate access to the underlying storage (S3, K2V) + pub storage: Builder, + /// The cryptographic keys are used to encrypt and decrypt data stored in S3 and K2V + pub keys: CryptoKeys, +} + +#[derive(Clone, Debug)] +pub struct PublicCredentials { + /// The storage credentials are used to authenticate access to the underlying storage (S3, K2V) + pub storage: Builder, + pub public_key: PublicKey, +} + +use serde::{Deserialize, Serialize}; +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct CryptoRoot(pub String); + +impl CryptoRoot { + pub fn create_pass(password: &str, k: &CryptoKeys) -> Result { + let bytes = k.password_seal(password)?; + let b64 = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes); + let cr = format!("aero:cryptoroot:pass:{}", b64); + Ok(Self(cr)) + } + + pub fn create_cleartext(k: &CryptoKeys) -> Self { + let bytes = k.serialize(); + let b64 = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes); + let cr = format!("aero:cryptoroot:cleartext:{}", b64); + Self(cr) + } + + pub fn create_incoming(pk: &PublicKey) -> Self { + let bytes: &[u8] = &pk[..]; + let b64 = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes); + let cr = format!("aero:cryptoroot:incoming:{}", b64); + Self(cr) + } + + pub fn public_key(&self) -> Result { + match self.0.splitn(4, ':').collect::>()[..] { + ["aero", "cryptoroot", "pass", b64blob] => { + let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; + if blob.len() < 32 { + bail!( + "Decoded data is {} bytes long, expect at least 32 bytes", + blob.len() + ); + } + PublicKey::from_slice(&blob[..32]).context("must be a valid public key") + } + ["aero", "cryptoroot", "cleartext", b64blob] => { + let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; + Ok(CryptoKeys::deserialize(&blob)?.public) + } + ["aero", "cryptoroot", "incoming", b64blob] => { + let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; + if blob.len() < 32 { + bail!( + "Decoded data is {} bytes long, expect at least 32 bytes", + blob.len() + ); + } + PublicKey::from_slice(&blob[..32]).context("must be a valid public key") + } + ["aero", "cryptoroot", "keyring", _] => { + bail!("keyring is not yet implemented!") + } + _ => bail!(format!( + "passed string '{}' is not a valid cryptoroot", + self.0 + )), + } + } + pub fn crypto_keys(&self, password: &str) -> Result { + match self.0.splitn(4, ':').collect::>()[..] { + ["aero", "cryptoroot", "pass", b64blob] => { + let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; + CryptoKeys::password_open(password, &blob) + } + ["aero", "cryptoroot", "cleartext", b64blob] => { + let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; + CryptoKeys::deserialize(&blob) + } + ["aero", "cryptoroot", "incoming", _] => { + bail!("incoming cryptoroot does not contain a crypto key!") + } + ["aero", "cryptoroot", "keyring", _] => { + bail!("keyring is not yet implemented!") + } + _ => bail!(format!( + "passed string '{}' is not a valid cryptoroot", + self.0 + )), + } + } +} + +/// The struct CryptoKeys contains the cryptographic keys used to encrypt and decrypt +/// data in a user's mailbox. +#[derive(Clone, Debug)] +pub struct CryptoKeys { + /// Master key for symmetric encryption of mailbox data + pub master: Key, + /// Public/private keypair for encryption of incomming emails (secret part) + pub secret: SecretKey, + /// Public/private keypair for encryption of incomming emails (public part) + pub public: PublicKey, +} + +// ---- + +impl CryptoKeys { + /// Initialize a new cryptography root + pub fn init() -> Self { + let (public, secret) = gen_keypair(); + let master = gen_key(); + CryptoKeys { + master, + secret, + public, + } + } + + // Clear text serialize/deserialize + /// Serialize the root as bytes without encryption + fn serialize(&self) -> [u8; 64] { + let mut res = [0u8; 64]; + res[..32].copy_from_slice(self.master.as_ref()); + res[32..].copy_from_slice(self.secret.as_ref()); + res + } + + /// Deserialize a clear text crypto root without encryption + fn deserialize(bytes: &[u8]) -> Result { + if bytes.len() != 64 { + bail!("Invalid length: {}, expected 64", bytes.len()); + } + let master = Key::from_slice(&bytes[..32]).unwrap(); + let secret = SecretKey::from_slice(&bytes[32..]).unwrap(); + let public = secret.public_key(); + Ok(Self { + master, + secret, + public, + }) + } + + // Password sealed keys serialize/deserialize + pub fn password_open(password: &str, blob: &[u8]) -> Result { + let _pubkey = &blob[0..32]; + let kdf_salt = &blob[32..64]; + let password_openned = try_open_encrypted_keys(kdf_salt, password, &blob[64..])?; + + let keys = Self::deserialize(&password_openned)?; + Ok(keys) + } + + pub fn password_seal(&self, password: &str) -> Result> { + let mut kdf_salt = [0u8; 32]; + thread_rng().fill(&mut kdf_salt); + + // Calculate key for password secret box + let password_key = derive_password_key(&kdf_salt, password)?; + + // Seal a secret box that contains our crypto keys + let password_sealed = seal(&self.serialize(), &password_key)?; + + // Create blob + let password_blob = [&self.public[..], &kdf_salt[..], &password_sealed].concat(); + + Ok(password_blob) + } +} + +fn derive_password_key(kdf_salt: &[u8], password: &str) -> Result { + Ok(Key::from_slice(&argon2_kdf(kdf_salt, password.as_bytes(), 32)?).unwrap()) +} + +fn try_open_encrypted_keys( + kdf_salt: &[u8], + password: &str, + encrypted_keys: &[u8], +) -> Result> { + let password_key = derive_password_key(kdf_salt, password)?; + open(encrypted_keys, &password_key) +} + +// ---- UTIL ---- + +pub fn argon2_kdf(salt: &[u8], password: &[u8], output_len: usize) -> Result> { + use argon2::{password_hash, Algorithm, Argon2, ParamsBuilder, PasswordHasher, Version}; + + let params = ParamsBuilder::new() + .output_len(output_len) + .build() + .map_err(|e| anyhow!("Invalid argon2 params: {}", e))?; + let argon2 = Argon2::new(Algorithm::default(), Version::default(), params); + + let b64_salt = base64::engine::general_purpose::STANDARD_NO_PAD.encode(salt); + let valid_salt = password_hash::Salt::from_b64(&b64_salt) + .map_err(|e| anyhow!("Invalid salt, error {}", e))?; + let hash = argon2 + .hash_password(password, valid_salt) + .map_err(|e| anyhow!("Unable to hash: {}", e))?; + + let hash = hash.hash.ok_or(anyhow!("Missing output"))?; + assert!(hash.len() == output_len); + Ok(hash.as_bytes().to_vec()) +} diff --git a/aero-user/src/login/static_provider.rs b/aero-user/src/login/static_provider.rs new file mode 100644 index 0000000..ed39343 --- /dev/null +++ b/aero-user/src/login/static_provider.rs @@ -0,0 +1,188 @@ +use std::collections::HashMap; +use std::path::PathBuf; + +use anyhow::{anyhow, bail}; +use async_trait::async_trait; +use tokio::signal::unix::{signal, SignalKind}; +use tokio::sync::watch; + +use crate::config::*; +use crate::login::*; +use crate::storage; + +pub struct ContextualUserEntry { + pub username: String, + pub config: UserEntry, +} + +#[derive(Default)] +pub struct UserDatabase { + users: HashMap>, + users_by_email: HashMap>, +} + +pub struct StaticLoginProvider { + user_db: watch::Receiver, + in_memory_store: storage::in_memory::MemDb, + garage_store: storage::garage::GarageRoot, +} + +pub async fn update_user_list(config: PathBuf, up: watch::Sender) -> Result<()> { + let mut stream = signal(SignalKind::user_defined1()) + .expect("failed to install SIGUSR1 signal hander for reload"); + + loop { + let ulist: UserList = match read_config(config.clone()) { + Ok(x) => x, + Err(e) => { + tracing::warn!(path=%config.as_path().to_string_lossy(), error=%e, "Unable to load config"); + stream.recv().await; + continue; + } + }; + + let users = ulist + .into_iter() + .map(|(username, config)| { + ( + username.clone(), + Arc::new(ContextualUserEntry { username, config }), + ) + }) + .collect::>(); + + let mut users_by_email = HashMap::new(); + for (_, u) in users.iter() { + for m in u.config.email_addresses.iter() { + if users_by_email.contains_key(m) { + tracing::warn!("Several users have the same email address: {}", m); + stream.recv().await; + continue; + } + users_by_email.insert(m.clone(), u.clone()); + } + } + + tracing::info!("{} users loaded", users.len()); + up.send(UserDatabase { + users, + users_by_email, + }) + .context("update user db config")?; + stream.recv().await; + tracing::info!("Received SIGUSR1, reloading"); + } +} + +impl StaticLoginProvider { + pub async fn new(config: LoginStaticConfig) -> Result { + let (tx, mut rx) = watch::channel(UserDatabase::default()); + + tokio::spawn(update_user_list(config.user_list, tx)); + rx.changed().await?; + + Ok(Self { + user_db: rx, + in_memory_store: storage::in_memory::MemDb::new(), + garage_store: storage::garage::GarageRoot::new()?, + }) + } +} + +#[async_trait] +impl LoginProvider for StaticLoginProvider { + async fn login(&self, username: &str, password: &str) -> Result { + tracing::debug!(user=%username, "login"); + let user = { + let user_db = self.user_db.borrow(); + match user_db.users.get(username) { + None => bail!("User {} does not exist", username), + Some(u) => u.clone(), + } + }; + + tracing::debug!(user=%username, "verify password"); + if !verify_password(password, &user.config.password)? { + bail!("Wrong password"); + } + + tracing::debug!(user=%username, "fetch keys"); + let storage: storage::Builder = match &user.config.storage { + StaticStorage::InMemory => self.in_memory_store.builder(username).await, + StaticStorage::Garage(grgconf) => { + self.garage_store.user(storage::garage::GarageConf { + region: grgconf.aws_region.clone(), + k2v_endpoint: grgconf.k2v_endpoint.clone(), + s3_endpoint: grgconf.s3_endpoint.clone(), + aws_access_key_id: grgconf.aws_access_key_id.clone(), + aws_secret_access_key: grgconf.aws_secret_access_key.clone(), + bucket: grgconf.bucket.clone(), + })? + } + }; + + let cr = CryptoRoot(user.config.crypto_root.clone()); + let keys = cr.crypto_keys(password)?; + + tracing::debug!(user=%username, "logged"); + Ok(Credentials { storage, keys }) + } + + async fn public_login(&self, email: &str) -> Result { + let user = { + let user_db = self.user_db.borrow(); + match user_db.users_by_email.get(email) { + None => bail!("Email {} does not exist", email), + Some(u) => u.clone(), + } + }; + tracing::debug!(user=%user.username, "public_login"); + + let storage: storage::Builder = match &user.config.storage { + StaticStorage::InMemory => self.in_memory_store.builder(&user.username).await, + StaticStorage::Garage(grgconf) => { + self.garage_store.user(storage::garage::GarageConf { + region: grgconf.aws_region.clone(), + k2v_endpoint: grgconf.k2v_endpoint.clone(), + s3_endpoint: grgconf.s3_endpoint.clone(), + aws_access_key_id: grgconf.aws_access_key_id.clone(), + aws_secret_access_key: grgconf.aws_secret_access_key.clone(), + bucket: grgconf.bucket.clone(), + })? + } + }; + + let cr = CryptoRoot(user.config.crypto_root.clone()); + let public_key = cr.public_key()?; + + Ok(PublicCredentials { + storage, + public_key, + }) + } +} + +pub fn hash_password(password: &str) -> Result { + use argon2::{ + password_hash::{rand_core::OsRng, PasswordHasher, SaltString}, + Argon2, + }; + let salt = SaltString::generate(&mut OsRng); + let argon2 = Argon2::default(); + Ok(argon2 + .hash_password(password.as_bytes(), &salt) + .map_err(|e| anyhow!("Argon2 error: {}", e))? + .to_string()) +} + +pub fn verify_password(password: &str, hash: &str) -> Result { + use argon2::{ + password_hash::{PasswordHash, PasswordVerifier}, + Argon2, + }; + let parsed_hash = + PasswordHash::new(hash).map_err(|e| anyhow!("Invalid hashed password: {}", e))?; + Ok(Argon2::default() + .verify_password(password.as_bytes(), &parsed_hash) + .is_ok()) +} diff --git a/aero-user/src/storage/garage.rs b/aero-user/src/storage/garage.rs new file mode 100644 index 0000000..7e930c3 --- /dev/null +++ b/aero-user/src/storage/garage.rs @@ -0,0 +1,538 @@ +use aws_sdk_s3::{self as s3, error::SdkError, operation::get_object::GetObjectError}; +use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder; +use aws_smithy_runtime_api::client::http::SharedHttpClient; +use hyper_rustls::HttpsConnector; +use hyper_util::client::legacy::{connect::HttpConnector, Client as HttpClient}; +use hyper_util::rt::TokioExecutor; +use serde::Serialize; + +use super::*; + +pub struct GarageRoot { + k2v_http: HttpClient, k2v_client::Body>, + aws_http: SharedHttpClient, +} + +impl GarageRoot { + pub fn new() -> anyhow::Result { + let connector = hyper_rustls::HttpsConnectorBuilder::new() + .with_native_roots()? + .https_or_http() + .enable_http1() + .enable_http2() + .build(); + let k2v_http = HttpClient::builder(TokioExecutor::new()).build(connector); + let aws_http = HyperClientBuilder::new().build_https(); + Ok(Self { k2v_http, aws_http }) + } + + pub fn user(&self, conf: GarageConf) -> anyhow::Result> { + let mut unicity: Vec = vec![]; + unicity.extend_from_slice(file!().as_bytes()); + unicity.append(&mut rmp_serde::to_vec(&conf)?); + + Ok(Arc::new(GarageUser { + conf, + aws_http: self.aws_http.clone(), + k2v_http: self.k2v_http.clone(), + unicity, + })) + } +} + +#[derive(Clone, Debug, Serialize)] +pub struct GarageConf { + pub region: String, + pub s3_endpoint: String, + pub k2v_endpoint: String, + pub aws_access_key_id: String, + pub aws_secret_access_key: String, + pub bucket: String, +} + +//@FIXME we should get rid of this builder +//and allocate a S3 + K2V client only once per user +//(and using a shared HTTP client) +#[derive(Clone, Debug)] +pub struct GarageUser { + conf: GarageConf, + aws_http: SharedHttpClient, + k2v_http: HttpClient, k2v_client::Body>, + unicity: Vec, +} + +#[async_trait] +impl IBuilder for GarageUser { + async fn build(&self) -> Result { + let s3_creds = s3::config::Credentials::new( + self.conf.aws_access_key_id.clone(), + self.conf.aws_secret_access_key.clone(), + None, + None, + "aerogramme", + ); + + let sdk_config = aws_config::from_env() + .region(aws_config::Region::new(self.conf.region.clone())) + .credentials_provider(s3_creds) + .http_client(self.aws_http.clone()) + .endpoint_url(self.conf.s3_endpoint.clone()) + .load() + .await; + + let s3_config = aws_sdk_s3::config::Builder::from(&sdk_config) + .force_path_style(true) + .build(); + + let s3_client = aws_sdk_s3::Client::from_conf(s3_config); + + let k2v_config = k2v_client::K2vClientConfig { + endpoint: self.conf.k2v_endpoint.clone(), + region: self.conf.region.clone(), + aws_access_key_id: self.conf.aws_access_key_id.clone(), + aws_secret_access_key: self.conf.aws_secret_access_key.clone(), + bucket: self.conf.bucket.clone(), + user_agent: None, + }; + + let k2v_client = + match k2v_client::K2vClient::new_with_client(k2v_config, self.k2v_http.clone()) { + Err(e) => { + tracing::error!("unable to build k2v client: {}", e); + return Err(StorageError::Internal); + } + Ok(v) => v, + }; + + Ok(Box::new(GarageStore { + bucket: self.conf.bucket.clone(), + s3: s3_client, + k2v: k2v_client, + })) + } + fn unique(&self) -> UnicityBuffer { + UnicityBuffer(self.unicity.clone()) + } +} + +pub struct GarageStore { + bucket: String, + s3: s3::Client, + k2v: k2v_client::K2vClient, +} + +fn causal_to_row_val(row_ref: RowRef, causal_value: k2v_client::CausalValue) -> RowVal { + let new_row_ref = row_ref.with_causality(causal_value.causality.into()); + let row_values = causal_value + .value + .into_iter() + .map(|k2v_value| match k2v_value { + k2v_client::K2vValue::Tombstone => Alternative::Tombstone, + k2v_client::K2vValue::Value(v) => Alternative::Value(v), + }) + .collect::>(); + + RowVal { + row_ref: new_row_ref, + value: row_values, + } +} + +#[async_trait] +impl IStore for GarageStore { + async fn row_fetch<'a>(&self, select: &Selector<'a>) -> Result, StorageError> { + tracing::trace!(select=%select, command="row_fetch"); + let (pk_list, batch_op) = match select { + Selector::Range { + shard, + sort_begin, + sort_end, + } => ( + vec![shard.to_string()], + vec![k2v_client::BatchReadOp { + partition_key: shard, + filter: k2v_client::Filter { + start: Some(sort_begin), + end: Some(sort_end), + ..k2v_client::Filter::default() + }, + ..k2v_client::BatchReadOp::default() + }], + ), + Selector::List(row_ref_list) => ( + row_ref_list + .iter() + .map(|row_ref| row_ref.uid.shard.to_string()) + .collect::>(), + row_ref_list + .iter() + .map(|row_ref| k2v_client::BatchReadOp { + partition_key: &row_ref.uid.shard, + filter: k2v_client::Filter { + start: Some(&row_ref.uid.sort), + ..k2v_client::Filter::default() + }, + single_item: true, + ..k2v_client::BatchReadOp::default() + }) + .collect::>(), + ), + Selector::Prefix { shard, sort_prefix } => ( + vec![shard.to_string()], + vec![k2v_client::BatchReadOp { + partition_key: shard, + filter: k2v_client::Filter { + prefix: Some(sort_prefix), + ..k2v_client::Filter::default() + }, + ..k2v_client::BatchReadOp::default() + }], + ), + Selector::Single(row_ref) => { + let causal_value = match self + .k2v + .read_item(&row_ref.uid.shard, &row_ref.uid.sort) + .await + { + Err(k2v_client::Error::NotFound) => { + tracing::debug!( + "K2V item not found shard={}, sort={}, bucket={}", + row_ref.uid.shard, + row_ref.uid.sort, + self.bucket, + ); + return Err(StorageError::NotFound); + } + Err(e) => { + tracing::error!( + "K2V read item shard={}, sort={}, bucket={} failed: {}", + row_ref.uid.shard, + row_ref.uid.sort, + self.bucket, + e + ); + return Err(StorageError::Internal); + } + Ok(v) => v, + }; + + let row_val = causal_to_row_val((*row_ref).clone(), causal_value); + return Ok(vec![row_val]); + } + }; + + let all_raw_res = match self.k2v.read_batch(&batch_op).await { + Err(e) => { + tracing::error!( + "k2v read batch failed for {:?}, bucket {} with err: {}", + select, + self.bucket, + e + ); + return Err(StorageError::Internal); + } + Ok(v) => v, + }; + //println!("fetch res -> {:?}", all_raw_res); + + let row_vals = + all_raw_res + .into_iter() + .zip(pk_list.into_iter()) + .fold(vec![], |mut acc, (page, pk)| { + page.items + .into_iter() + .map(|(sk, cv)| causal_to_row_val(RowRef::new(&pk, &sk), cv)) + .for_each(|rr| acc.push(rr)); + + acc + }); + tracing::debug!(fetch_count = row_vals.len(), command = "row_fetch"); + + Ok(row_vals) + } + async fn row_rm<'a>(&self, select: &Selector<'a>) -> Result<(), StorageError> { + tracing::trace!(select=%select, command="row_rm"); + let del_op = match select { + Selector::Range { + shard, + sort_begin, + sort_end, + } => vec![k2v_client::BatchDeleteOp { + partition_key: shard, + prefix: None, + start: Some(sort_begin), + end: Some(sort_end), + single_item: false, + }], + Selector::List(row_ref_list) => { + // Insert null values with causality token = delete + let batch_op = row_ref_list + .iter() + .map(|v| k2v_client::BatchInsertOp { + partition_key: &v.uid.shard, + sort_key: &v.uid.sort, + causality: v.causality.clone().map(|ct| ct.into()), + value: k2v_client::K2vValue::Tombstone, + }) + .collect::>(); + + return match self.k2v.insert_batch(&batch_op).await { + Err(e) => { + tracing::error!("Unable to delete the list of values: {}", e); + Err(StorageError::Internal) + } + Ok(_) => Ok(()), + }; + } + Selector::Prefix { shard, sort_prefix } => vec![k2v_client::BatchDeleteOp { + partition_key: shard, + prefix: Some(sort_prefix), + start: None, + end: None, + single_item: false, + }], + Selector::Single(row_ref) => { + // Insert null values with causality token = delete + let batch_op = vec![k2v_client::BatchInsertOp { + partition_key: &row_ref.uid.shard, + sort_key: &row_ref.uid.sort, + causality: row_ref.causality.clone().map(|ct| ct.into()), + value: k2v_client::K2vValue::Tombstone, + }]; + + return match self.k2v.insert_batch(&batch_op).await { + Err(e) => { + tracing::error!("Unable to delete the list of values: {}", e); + Err(StorageError::Internal) + } + Ok(_) => Ok(()), + }; + } + }; + + // Finally here we only have prefix & range + match self.k2v.delete_batch(&del_op).await { + Err(e) => { + tracing::error!("delete batch error: {}", e); + Err(StorageError::Internal) + } + Ok(_) => Ok(()), + } + } + + async fn row_insert(&self, values: Vec) -> Result<(), StorageError> { + tracing::trace!(entries=%values.iter().map(|v| v.row_ref.to_string()).collect::>().join(","), command="row_insert"); + let batch_ops = values + .iter() + .map(|v| k2v_client::BatchInsertOp { + partition_key: &v.row_ref.uid.shard, + sort_key: &v.row_ref.uid.sort, + causality: v.row_ref.causality.clone().map(|ct| ct.into()), + value: v + .value + .iter() + .next() + .map(|cv| match cv { + Alternative::Value(buff) => k2v_client::K2vValue::Value(buff.clone()), + Alternative::Tombstone => k2v_client::K2vValue::Tombstone, + }) + .unwrap_or(k2v_client::K2vValue::Tombstone), + }) + .collect::>(); + + match self.k2v.insert_batch(&batch_ops).await { + Err(e) => { + tracing::error!("k2v can't insert some value: {}", e); + Err(StorageError::Internal) + } + Ok(v) => Ok(v), + } + } + async fn row_poll(&self, value: &RowRef) -> Result { + tracing::trace!(entry=%value, command="row_poll"); + loop { + if let Some(ct) = &value.causality { + match self + .k2v + .poll_item(&value.uid.shard, &value.uid.sort, ct.clone().into(), None) + .await + { + Err(e) => { + tracing::error!("Unable to poll item: {}", e); + return Err(StorageError::Internal); + } + Ok(None) => continue, + Ok(Some(cv)) => return Ok(causal_to_row_val(value.clone(), cv)), + } + } else { + match self.k2v.read_item(&value.uid.shard, &value.uid.sort).await { + Err(k2v_client::Error::NotFound) => { + self.k2v + .insert_item(&value.uid.shard, &value.uid.sort, vec![0u8], None) + .await + .map_err(|e| { + tracing::error!("Unable to insert item in polling logic: {}", e); + StorageError::Internal + })?; + } + Err(e) => { + tracing::error!("Unable to read item in polling logic: {}", e); + return Err(StorageError::Internal); + } + Ok(cv) => return Ok(causal_to_row_val(value.clone(), cv)), + } + } + } + } + + async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result { + tracing::trace!(entry=%blob_ref, command="blob_fetch"); + let maybe_out = self + .s3 + .get_object() + .bucket(self.bucket.to_string()) + .key(blob_ref.0.to_string()) + .send() + .await; + + let object_output = match maybe_out { + Ok(output) => output, + Err(SdkError::ServiceError(x)) => match x.err() { + GetObjectError::NoSuchKey(_) => return Err(StorageError::NotFound), + e => { + tracing::warn!("Blob Fetch Error, Service Error: {}", e); + return Err(StorageError::Internal); + } + }, + Err(e) => { + tracing::warn!("Blob Fetch Error, {}", e); + return Err(StorageError::Internal); + } + }; + + let buffer = match object_output.body.collect().await { + Ok(aggreg) => aggreg.to_vec(), + Err(e) => { + tracing::warn!("Fetching body failed with {}", e); + return Err(StorageError::Internal); + } + }; + + let mut bv = BlobVal::new(blob_ref.clone(), buffer); + if let Some(meta) = object_output.metadata { + bv.meta = meta; + } + tracing::debug!("Fetched {}/{}", self.bucket, blob_ref.0); + Ok(bv) + } + async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError> { + tracing::trace!(entry=%blob_val.blob_ref, command="blob_insert"); + let streamable_value = s3::primitives::ByteStream::from(blob_val.value); + + let maybe_send = self + .s3 + .put_object() + .bucket(self.bucket.to_string()) + .key(blob_val.blob_ref.0.to_string()) + .set_metadata(Some(blob_val.meta)) + .body(streamable_value) + .send() + .await; + + match maybe_send { + Err(e) => { + tracing::error!("unable to send object: {}", e); + Err(StorageError::Internal) + } + Ok(_) => { + tracing::debug!("Inserted {}/{}", self.bucket, blob_val.blob_ref.0); + Ok(()) + } + } + } + async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError> { + tracing::trace!(src=%src, dst=%dst, command="blob_copy"); + let maybe_copy = self + .s3 + .copy_object() + .bucket(self.bucket.to_string()) + .key(dst.0.clone()) + .copy_source(format!("/{}/{}", self.bucket.to_string(), src.0.clone())) + .send() + .await; + + match maybe_copy { + Err(e) => { + tracing::error!( + "unable to copy object {} to {} (bucket: {}), error: {}", + src.0, + dst.0, + self.bucket, + e + ); + Err(StorageError::Internal) + } + Ok(_) => { + tracing::debug!("copied {} to {} (bucket: {})", src.0, dst.0, self.bucket); + Ok(()) + } + } + } + async fn blob_list(&self, prefix: &str) -> Result, StorageError> { + tracing::trace!(prefix = prefix, command = "blob_list"); + let maybe_list = self + .s3 + .list_objects_v2() + .bucket(self.bucket.to_string()) + .prefix(prefix) + .into_paginator() + .send() + .try_collect() + .await; + + match maybe_list { + Err(e) => { + tracing::error!( + "listing prefix {} on bucket {} failed: {}", + prefix, + self.bucket, + e + ); + Err(StorageError::Internal) + } + Ok(pagin_list_out) => Ok(pagin_list_out + .into_iter() + .map(|list_out| list_out.contents.unwrap_or(vec![])) + .flatten() + .map(|obj| BlobRef(obj.key.unwrap_or(String::new()))) + .collect::>()), + } + } + async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError> { + tracing::trace!(entry=%blob_ref, command="blob_rm"); + let maybe_delete = self + .s3 + .delete_object() + .bucket(self.bucket.to_string()) + .key(blob_ref.0.clone()) + .send() + .await; + + match maybe_delete { + Err(e) => { + tracing::error!( + "unable to delete {} (bucket: {}), error {}", + blob_ref.0, + self.bucket, + e + ); + Err(StorageError::Internal) + } + Ok(_) => { + tracing::debug!("deleted {} (bucket: {})", blob_ref.0, self.bucket); + Ok(()) + } + } + } +} diff --git a/aero-user/src/storage/in_memory.rs b/aero-user/src/storage/in_memory.rs new file mode 100644 index 0000000..a676797 --- /dev/null +++ b/aero-user/src/storage/in_memory.rs @@ -0,0 +1,334 @@ +use crate::storage::*; +use std::collections::BTreeMap; +use std::ops::Bound::{self, Excluded, Included, Unbounded}; +use std::sync::RwLock; +use tokio::sync::Notify; + +/// This implementation is very inneficient, and not completely correct +/// Indeed, when the connector is dropped, the memory is freed. +/// It means that when a user disconnects, its data are lost. +/// It's intended only for basic debugging, do not use it for advanced tests... + +#[derive(Debug, Default)] +pub struct MemDb(tokio::sync::Mutex>>); +impl MemDb { + pub fn new() -> Self { + Self(tokio::sync::Mutex::new(HashMap::new())) + } + + pub async fn builder(&self, username: &str) -> Arc { + let mut global_storage = self.0.lock().await; + global_storage + .entry(username.to_string()) + .or_insert(MemBuilder::new(username)) + .clone() + } +} + +#[derive(Debug, Clone)] +enum InternalData { + Tombstone, + Value(Vec), +} +impl InternalData { + fn to_alternative(&self) -> Alternative { + match self { + Self::Tombstone => Alternative::Tombstone, + Self::Value(x) => Alternative::Value(x.clone()), + } + } +} + +#[derive(Debug)] +struct InternalRowVal { + data: Vec, + version: u64, + change: Arc, +} +impl std::default::Default for InternalRowVal { + fn default() -> Self { + Self { + data: vec![], + version: 1, + change: Arc::new(Notify::new()), + } + } +} +impl InternalRowVal { + fn concurrent_values(&self) -> Vec { + self.data.iter().map(InternalData::to_alternative).collect() + } + + fn to_row_val(&self, row_ref: RowRef) -> RowVal { + RowVal { + row_ref: row_ref.with_causality(self.version.to_string()), + value: self.concurrent_values(), + } + } +} + +#[derive(Debug, Default, Clone)] +struct InternalBlobVal { + data: Vec, + metadata: HashMap, +} +impl InternalBlobVal { + fn to_blob_val(&self, bref: &BlobRef) -> BlobVal { + BlobVal { + blob_ref: bref.clone(), + meta: self.metadata.clone(), + value: self.data.clone(), + } + } +} + +type ArcRow = Arc>>>; +type ArcBlob = Arc>>; + +#[derive(Clone, Debug)] +pub struct MemBuilder { + unicity: Vec, + row: ArcRow, + blob: ArcBlob, +} + +impl MemBuilder { + pub fn new(user: &str) -> Arc { + tracing::debug!("initialize membuilder for {}", user); + let mut unicity: Vec = vec![]; + unicity.extend_from_slice(file!().as_bytes()); + unicity.extend_from_slice(user.as_bytes()); + Arc::new(Self { + unicity, + row: Arc::new(RwLock::new(HashMap::new())), + blob: Arc::new(RwLock::new(BTreeMap::new())), + }) + } +} + +#[async_trait] +impl IBuilder for MemBuilder { + async fn build(&self) -> Result { + Ok(Box::new(MemStore { + row: self.row.clone(), + blob: self.blob.clone(), + })) + } + + fn unique(&self) -> UnicityBuffer { + UnicityBuffer(self.unicity.clone()) + } +} + +pub struct MemStore { + row: ArcRow, + blob: ArcBlob, +} + +fn prefix_last_bound(prefix: &str) -> Bound { + let mut sort_end = prefix.to_string(); + match sort_end.pop() { + None => Unbounded, + Some(ch) => { + let nc = char::from_u32(ch as u32 + 1).unwrap(); + sort_end.push(nc); + Excluded(sort_end) + } + } +} + +impl MemStore { + fn row_rm_single(&self, entry: &RowRef) -> Result<(), StorageError> { + tracing::trace!(entry=%entry, command="row_rm_single"); + let mut store = self.row.write().or(Err(StorageError::Internal))?; + let shard = &entry.uid.shard; + let sort = &entry.uid.sort; + + let cauz = match entry.causality.as_ref().map(|v| v.parse::()) { + Some(Ok(v)) => v, + _ => 0, + }; + + let bt = store.entry(shard.to_string()).or_default(); + let intval = bt.entry(sort.to_string()).or_default(); + + if cauz == intval.version { + intval.data.clear(); + } + intval.data.push(InternalData::Tombstone); + intval.version += 1; + intval.change.notify_waiters(); + + Ok(()) + } +} + +#[async_trait] +impl IStore for MemStore { + async fn row_fetch<'a>(&self, select: &Selector<'a>) -> Result, StorageError> { + tracing::trace!(select=%select, command="row_fetch"); + let store = self.row.read().or(Err(StorageError::Internal))?; + + match select { + Selector::Range { + shard, + sort_begin, + sort_end, + } => Ok(store + .get(*shard) + .unwrap_or(&BTreeMap::new()) + .range(( + Included(sort_begin.to_string()), + Excluded(sort_end.to_string()), + )) + .map(|(k, v)| v.to_row_val(RowRef::new(shard, k))) + .collect::>()), + Selector::List(rlist) => { + let mut acc = vec![]; + for row_ref in rlist { + let maybe_intval = store + .get(&row_ref.uid.shard) + .map(|v| v.get(&row_ref.uid.sort)) + .flatten(); + if let Some(intval) = maybe_intval { + acc.push(intval.to_row_val(row_ref.clone())); + } + } + Ok(acc) + } + Selector::Prefix { shard, sort_prefix } => { + let last_bound = prefix_last_bound(sort_prefix); + + Ok(store + .get(*shard) + .unwrap_or(&BTreeMap::new()) + .range((Included(sort_prefix.to_string()), last_bound)) + .map(|(k, v)| v.to_row_val(RowRef::new(shard, k))) + .collect::>()) + } + Selector::Single(row_ref) => { + let intval = store + .get(&row_ref.uid.shard) + .ok_or(StorageError::NotFound)? + .get(&row_ref.uid.sort) + .ok_or(StorageError::NotFound)?; + Ok(vec![intval.to_row_val((*row_ref).clone())]) + } + } + } + + async fn row_rm<'a>(&self, select: &Selector<'a>) -> Result<(), StorageError> { + tracing::trace!(select=%select, command="row_rm"); + + let values = match select { + Selector::Range { .. } | Selector::Prefix { .. } => self + .row_fetch(select) + .await? + .into_iter() + .map(|rv| rv.row_ref) + .collect::>(), + Selector::List(rlist) => rlist.clone(), + Selector::Single(row_ref) => vec![(*row_ref).clone()], + }; + + for v in values.into_iter() { + self.row_rm_single(&v)?; + } + Ok(()) + } + + async fn row_insert(&self, values: Vec) -> Result<(), StorageError> { + tracing::trace!(entries=%values.iter().map(|v| v.row_ref.to_string()).collect::>().join(","), command="row_insert"); + let mut store = self.row.write().or(Err(StorageError::Internal))?; + for v in values.into_iter() { + let shard = v.row_ref.uid.shard; + let sort = v.row_ref.uid.sort; + + let val = match v.value.into_iter().next() { + Some(Alternative::Value(x)) => x, + _ => vec![], + }; + + let cauz = match v.row_ref.causality.map(|v| v.parse::()) { + Some(Ok(v)) => v, + _ => 0, + }; + + let bt = store.entry(shard).or_default(); + let intval = bt.entry(sort).or_default(); + + if cauz == intval.version { + intval.data.clear(); + } + intval.data.push(InternalData::Value(val)); + intval.version += 1; + intval.change.notify_waiters(); + } + Ok(()) + } + async fn row_poll(&self, value: &RowRef) -> Result { + tracing::trace!(entry=%value, command="row_poll"); + let shard = &value.uid.shard; + let sort = &value.uid.sort; + let cauz = match value.causality.as_ref().map(|v| v.parse::()) { + Some(Ok(v)) => v, + _ => 0, + }; + + let notify_me = { + let mut store = self.row.write().or(Err(StorageError::Internal))?; + let bt = store.entry(shard.to_string()).or_default(); + let intval = bt.entry(sort.to_string()).or_default(); + + if intval.version != cauz { + return Ok(intval.to_row_val(value.clone())); + } + intval.change.clone() + }; + + notify_me.notified().await; + + let res = self.row_fetch(&Selector::Single(value)).await?; + res.into_iter().next().ok_or(StorageError::NotFound) + } + + async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result { + tracing::trace!(entry=%blob_ref, command="blob_fetch"); + let store = self.blob.read().or(Err(StorageError::Internal))?; + store + .get(&blob_ref.0) + .ok_or(StorageError::NotFound) + .map(|v| v.to_blob_val(blob_ref)) + } + async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError> { + tracing::trace!(entry=%blob_val.blob_ref, command="blob_insert"); + let mut store = self.blob.write().or(Err(StorageError::Internal))?; + let entry = store.entry(blob_val.blob_ref.0.clone()).or_default(); + entry.data = blob_val.value.clone(); + entry.metadata = blob_val.meta.clone(); + Ok(()) + } + async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError> { + tracing::trace!(src=%src, dst=%dst, command="blob_copy"); + let mut store = self.blob.write().or(Err(StorageError::Internal))?; + let blob_src = store.entry(src.0.clone()).or_default().clone(); + store.insert(dst.0.clone(), blob_src); + Ok(()) + } + async fn blob_list(&self, prefix: &str) -> Result, StorageError> { + tracing::trace!(prefix = prefix, command = "blob_list"); + let store = self.blob.read().or(Err(StorageError::Internal))?; + let last_bound = prefix_last_bound(prefix); + let blist = store + .range((Included(prefix.to_string()), last_bound)) + .map(|(k, _)| BlobRef(k.to_string())) + .collect::>(); + Ok(blist) + } + async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError> { + tracing::trace!(entry=%blob_ref, command="blob_rm"); + let mut store = self.blob.write().or(Err(StorageError::Internal))?; + store.remove(&blob_ref.0); + Ok(()) + } +} diff --git a/aero-user/src/storage/mod.rs b/aero-user/src/storage/mod.rs new file mode 100644 index 0000000..f5eb8d3 --- /dev/null +++ b/aero-user/src/storage/mod.rs @@ -0,0 +1,180 @@ +/* + * + * This abstraction goal is to leverage all the semantic of Garage K2V+S3, + * to be as tailored as possible to it ; it aims to be a zero-cost abstraction + * compared to when we where directly using the K2V+S3 client. + * + * My idea: we can encapsulate the causality token + * into the object system so it is not exposed. + */ + +pub mod garage; +pub mod in_memory; + +use std::collections::HashMap; +use std::hash::Hash; +use std::sync::Arc; + +use async_trait::async_trait; + +#[derive(Debug, Clone)] +pub enum Alternative { + Tombstone, + Value(Vec), +} +type ConcurrentValues = Vec; + +#[derive(Debug, Clone)] +pub enum StorageError { + NotFound, + Internal, +} +impl std::fmt::Display for StorageError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("Storage Error: ")?; + match self { + Self::NotFound => f.write_str("Item not found"), + Self::Internal => f.write_str("An internal error occured"), + } + } +} +impl std::error::Error for StorageError {} + +#[derive(Debug, Clone, PartialEq)] +pub struct RowUid { + pub shard: String, + pub sort: String, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct RowRef { + pub uid: RowUid, + pub causality: Option, +} +impl std::fmt::Display for RowRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "RowRef({}, {}, {:?})", + self.uid.shard, self.uid.sort, self.causality + ) + } +} + +impl RowRef { + pub fn new(shard: &str, sort: &str) -> Self { + Self { + uid: RowUid { + shard: shard.to_string(), + sort: sort.to_string(), + }, + causality: None, + } + } + pub fn with_causality(mut self, causality: String) -> Self { + self.causality = Some(causality); + self + } +} + +#[derive(Debug, Clone)] +pub struct RowVal { + pub row_ref: RowRef, + pub value: ConcurrentValues, +} + +impl RowVal { + pub fn new(row_ref: RowRef, value: Vec) -> Self { + Self { + row_ref, + value: vec![Alternative::Value(value)], + } + } +} + +#[derive(Debug, Clone)] +pub struct BlobRef(pub String); +impl std::fmt::Display for BlobRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "BlobRef({})", self.0) + } +} + +#[derive(Debug, Clone)] +pub struct BlobVal { + pub blob_ref: BlobRef, + pub meta: HashMap, + pub value: Vec, +} +impl BlobVal { + pub fn new(blob_ref: BlobRef, value: Vec) -> Self { + Self { + blob_ref, + value, + meta: HashMap::new(), + } + } + + pub fn with_meta(mut self, k: String, v: String) -> Self { + self.meta.insert(k, v); + self + } +} + +#[derive(Debug)] +pub enum Selector<'a> { + Range { + shard: &'a str, + sort_begin: &'a str, + sort_end: &'a str, + }, + List(Vec), // list of (shard_key, sort_key) + #[allow(dead_code)] + Prefix { + shard: &'a str, + sort_prefix: &'a str, + }, + Single(&'a RowRef), +} +impl<'a> std::fmt::Display for Selector<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Range { + shard, + sort_begin, + sort_end, + } => write!(f, "Range({}, [{}, {}[)", shard, sort_begin, sort_end), + Self::List(list) => write!(f, "List({:?})", list), + Self::Prefix { shard, sort_prefix } => write!(f, "Prefix({}, {})", shard, sort_prefix), + Self::Single(row_ref) => write!(f, "Single({})", row_ref), + } + } +} + +#[async_trait] +pub trait IStore { + async fn row_fetch<'a>(&self, select: &Selector<'a>) -> Result, StorageError>; + async fn row_rm<'a>(&self, select: &Selector<'a>) -> Result<(), StorageError>; + async fn row_insert(&self, values: Vec) -> Result<(), StorageError>; + async fn row_poll(&self, value: &RowRef) -> Result; + + async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result; + async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError>; + async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError>; + async fn blob_list(&self, prefix: &str) -> Result, StorageError>; + async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError>; +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct UnicityBuffer(Vec); + +#[async_trait] +pub trait IBuilder: std::fmt::Debug { + async fn build(&self) -> Result; + + /// Returns an opaque buffer that uniquely identifies this builder + fn unique(&self) -> UnicityBuffer; +} + +pub type Builder = Arc; +pub type Store = Box; diff --git a/aerogramme/Cargo.toml b/aerogramme/Cargo.toml new file mode 100644 index 0000000..e408aec --- /dev/null +++ b/aerogramme/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "aerogramme" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "A robust email server" + +[[test]] +name = "behavior" +path = "tests/behavior.rs" +harness = false diff --git a/aerogramme/src/k2v_util.rs b/aerogramme/src/k2v_util.rs new file mode 100644 index 0000000..3cd969b --- /dev/null +++ b/aerogramme/src/k2v_util.rs @@ -0,0 +1,26 @@ +/* +use anyhow::Result; +// ---- UTIL: function to wait for a value to have changed in K2V ---- + +pub async fn k2v_wait_value_changed( + k2v: &storage::RowStore, + key: &storage::RowRef, +) -> Result { + loop { + if let Some(ct) = prev_ct { + match k2v.poll_item(pk, sk, ct.clone(), None).await? { + None => continue, + Some(cv) => return Ok(cv), + } + } else { + match k2v.read_item(pk, sk).await { + Err(k2v_client::Error::NotFound) => { + k2v.insert_item(pk, sk, vec![0u8], None).await?; + } + Err(e) => return Err(e.into()), + Ok(cv) => return Ok(cv), + } + } + } +} +*/ diff --git a/aerogramme/src/lib.rs b/aerogramme/src/lib.rs new file mode 100644 index 0000000..f065478 --- /dev/null +++ b/aerogramme/src/lib.rs @@ -0,0 +1,19 @@ +#![feature(type_alias_impl_trait)] +#![feature(async_fn_in_trait)] +#![feature(async_closure)] +#![feature(trait_alias)] + +pub mod auth; +pub mod bayou; +pub mod config; +pub mod cryptoblob; +pub mod dav; +pub mod imap; +pub mod k2v_util; +pub mod lmtp; +pub mod login; +pub mod mail; +pub mod server; +pub mod storage; +pub mod timestamp; +pub mod user; diff --git a/aerogramme/src/main.rs b/aerogramme/src/main.rs new file mode 100644 index 0000000..43b4dca --- /dev/null +++ b/aerogramme/src/main.rs @@ -0,0 +1,407 @@ +use std::io::Read; +use std::path::PathBuf; + +use anyhow::{bail, Context, Result}; +use clap::{Parser, Subcommand}; +use nix::{sys::signal, unistd::Pid}; + +use aerogramme::config::*; +use aerogramme::login::{static_provider::*, *}; +use aerogramme::server::Server; + +#[derive(Parser, Debug)] +#[clap(author, version, about, long_about = None)] +struct Args { + #[clap(subcommand)] + command: Command, + + /// A special mode dedicated to developers, NOT INTENDED FOR PRODUCTION + #[clap(long)] + dev: bool, + + #[clap( + short, + long, + env = "AEROGRAMME_CONFIG", + default_value = "aerogramme.toml" + )] + /// Path to the main Aerogramme configuration file + config_file: PathBuf, +} + +#[derive(Subcommand, Debug)] +enum Command { + #[clap(subcommand)] + /// A daemon to be run by the end user, on a personal device + Companion(CompanionCommand), + + #[clap(subcommand)] + /// A daemon to be run by the service provider, on a server + Provider(ProviderCommand), + + #[clap(subcommand)] + /// Specific tooling, should not be part of a normal workflow, for debug & experimentation only + Tools(ToolsCommand), + //Test, +} + +#[derive(Subcommand, Debug)] +enum ToolsCommand { + /// Manage crypto roots + #[clap(subcommand)] + CryptoRoot(CryptoRootCommand), + + PasswordHash { + #[clap(env = "AEROGRAMME_PASSWORD")] + maybe_password: Option, + }, +} + +#[derive(Subcommand, Debug)] +enum CryptoRootCommand { + /// Generate a new crypto-root protected with a password + New { + #[clap(env = "AEROGRAMME_PASSWORD")] + maybe_password: Option, + }, + /// Generate a new clear text crypto-root, store it securely! + NewClearText, + /// Change the password of a crypto key + ChangePassword { + #[clap(env = "AEROGRAMME_OLD_PASSWORD")] + maybe_old_password: Option, + + #[clap(env = "AEROGRAMME_NEW_PASSWORD")] + maybe_new_password: Option, + + #[clap(short, long, env = "AEROGRAMME_CRYPTO_ROOT")] + crypto_root: String, + }, + /// From a given crypto-key, derive one containing only the public key + DeriveIncoming { + #[clap(short, long, env = "AEROGRAMME_CRYPTO_ROOT")] + crypto_root: String, + }, +} + +#[derive(Subcommand, Debug)] +enum CompanionCommand { + /// Runs the IMAP proxy + Daemon, + Reload { + #[clap(short, long, env = "AEROGRAMME_PID")] + pid: Option, + }, + Wizard, + #[clap(subcommand)] + Account(AccountManagement), +} + +#[derive(Subcommand, Debug)] +enum ProviderCommand { + /// Runs the IMAP+LMTP server daemon + Daemon, + /// Reload the daemon + Reload { + #[clap(short, long, env = "AEROGRAMME_PID")] + pid: Option, + }, + /// Manage static accounts + #[clap(subcommand)] + Account(AccountManagement), +} + +#[derive(Subcommand, Debug)] +enum AccountManagement { + /// Add an account + Add { + #[clap(short, long)] + login: String, + #[clap(short, long)] + setup: PathBuf, + }, + /// Delete an account + Delete { + #[clap(short, long)] + login: String, + }, + /// Change password for a given account + ChangePassword { + #[clap(env = "AEROGRAMME_OLD_PASSWORD")] + maybe_old_password: Option, + + #[clap(env = "AEROGRAMME_NEW_PASSWORD")] + maybe_new_password: Option, + + #[clap(short, long)] + login: String, + }, +} + +#[cfg(tokio_unstable)] +fn tracer() { + console_subscriber::init(); +} + +#[cfg(not(tokio_unstable))] +fn tracer() { + tracing_subscriber::fmt::init(); +} + +#[tokio::main] +async fn main() -> Result<()> { + if std::env::var("RUST_LOG").is_err() { + std::env::set_var("RUST_LOG", "main=info,aerogramme=info,k2v_client=info") + } + + // Abort on panic (same behavior as in Go) + std::panic::set_hook(Box::new(|panic_info| { + eprintln!("{}", panic_info); + eprintln!("{:?}", backtrace::Backtrace::new()); + std::process::abort(); + })); + + tracer(); + + let args = Args::parse(); + let any_config = if args.dev { + use std::net::*; + AnyConfig::Provider(ProviderConfig { + pid: None, + imap: None, + imap_unsecure: Some(ImapUnsecureConfig { + bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1143), + }), + dav_unsecure: Some(DavUnsecureConfig { + bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 8087), + }), + lmtp: Some(LmtpConfig { + bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1025), + hostname: "example.tld".to_string(), + }), + auth: Some(AuthConfig { + bind_addr: SocketAddr::new( + IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), + 12345, + ), + }), + users: UserManagement::Demo, + }) + } else { + read_config(args.config_file)? + }; + + match (&args.command, any_config) { + (Command::Companion(subcommand), AnyConfig::Companion(config)) => match subcommand { + CompanionCommand::Daemon => { + let server = Server::from_companion_config(config).await?; + server.run().await?; + } + CompanionCommand::Reload { pid } => reload(*pid, config.pid)?, + CompanionCommand::Wizard => { + unimplemented!(); + } + CompanionCommand::Account(cmd) => { + let user_file = config.users.user_list; + account_management(&args.command, cmd, user_file)?; + } + }, + (Command::Provider(subcommand), AnyConfig::Provider(config)) => match subcommand { + ProviderCommand::Daemon => { + let server = Server::from_provider_config(config).await?; + server.run().await?; + } + ProviderCommand::Reload { pid } => reload(*pid, config.pid)?, + ProviderCommand::Account(cmd) => { + let user_file = match config.users { + UserManagement::Static(conf) => conf.user_list, + _ => { + panic!("Only static account management is supported from Aerogramme.") + } + }; + account_management(&args.command, cmd, user_file)?; + } + }, + (Command::Provider(_), AnyConfig::Companion(_)) => { + bail!("Your want to run a 'Provider' command but your configuration file has role 'Companion'."); + } + (Command::Companion(_), AnyConfig::Provider(_)) => { + bail!("Your want to run a 'Companion' command but your configuration file has role 'Provider'."); + } + (Command::Tools(subcommand), _) => match subcommand { + ToolsCommand::PasswordHash { maybe_password } => { + let password = match maybe_password { + Some(pwd) => pwd.clone(), + None => rpassword::prompt_password("Enter password: ")?, + }; + println!("{}", hash_password(&password)?); + } + ToolsCommand::CryptoRoot(crcommand) => match crcommand { + CryptoRootCommand::New { maybe_password } => { + let password = match maybe_password { + Some(pwd) => pwd.clone(), + None => { + let password = rpassword::prompt_password("Enter password: ")?; + let password_confirm = + rpassword::prompt_password("Confirm password: ")?; + if password != password_confirm { + bail!("Passwords don't match."); + } + password + } + }; + let crypto_keys = CryptoKeys::init(); + let cr = CryptoRoot::create_pass(&password, &crypto_keys)?; + println!("{}", cr.0); + } + CryptoRootCommand::NewClearText => { + let crypto_keys = CryptoKeys::init(); + let cr = CryptoRoot::create_cleartext(&crypto_keys); + println!("{}", cr.0); + } + CryptoRootCommand::ChangePassword { + maybe_old_password, + maybe_new_password, + crypto_root, + } => { + let old_password = match maybe_old_password { + Some(pwd) => pwd.to_string(), + None => rpassword::prompt_password("Enter old password: ")?, + }; + + let new_password = match maybe_new_password { + Some(pwd) => pwd.to_string(), + None => { + let password = rpassword::prompt_password("Enter new password: ")?; + let password_confirm = + rpassword::prompt_password("Confirm new password: ")?; + if password != password_confirm { + bail!("Passwords don't match."); + } + password + } + }; + + let keys = CryptoRoot(crypto_root.to_string()).crypto_keys(&old_password)?; + let cr = CryptoRoot::create_pass(&new_password, &keys)?; + println!("{}", cr.0); + } + CryptoRootCommand::DeriveIncoming { crypto_root } => { + let pubkey = CryptoRoot(crypto_root.to_string()).public_key()?; + let cr = CryptoRoot::create_incoming(&pubkey); + println!("{}", cr.0); + } + }, + }, + } + + Ok(()) +} + +fn reload(pid: Option, pid_path: Option) -> Result<()> { + let final_pid = match (pid, pid_path) { + (Some(pid), _) => pid, + (_, Some(path)) => { + let mut f = std::fs::OpenOptions::new().read(true).open(path)?; + let mut pidstr = String::new(); + f.read_to_string(&mut pidstr)?; + pidstr.parse::()? + } + _ => bail!("Unable to infer your daemon's PID"), + }; + let pid = Pid::from_raw(final_pid); + signal::kill(pid, signal::Signal::SIGUSR1)?; + Ok(()) +} + +fn account_management(root: &Command, cmd: &AccountManagement, users: PathBuf) -> Result<()> { + let mut ulist: UserList = + read_config(users.clone()).context(format!("'{:?}' must be a user database", users))?; + + match cmd { + AccountManagement::Add { login, setup } => { + tracing::debug!(user = login, "will-create"); + let stp: SetupEntry = read_config(setup.clone()) + .context(format!("'{:?}' must be a setup file", setup))?; + tracing::debug!(user = login, "loaded setup entry"); + + let password = match stp.clear_password { + Some(pwd) => pwd, + None => { + let password = rpassword::prompt_password("Enter password: ")?; + let password_confirm = rpassword::prompt_password("Confirm password: ")?; + if password != password_confirm { + bail!("Passwords don't match."); + } + password + } + }; + + let crypto_keys = CryptoKeys::init(); + let crypto_root = match root { + Command::Provider(_) => CryptoRoot::create_pass(&password, &crypto_keys)?, + Command::Companion(_) => CryptoRoot::create_cleartext(&crypto_keys), + _ => unreachable!(), + }; + + let hash = hash_password(password.as_str()).context("unable to hash password")?; + + ulist.insert( + login.clone(), + UserEntry { + email_addresses: stp.email_addresses, + password: hash, + crypto_root: crypto_root.0, + storage: stp.storage, + }, + ); + + write_config(users.clone(), &ulist)?; + } + AccountManagement::Delete { login } => { + tracing::debug!(user = login, "will-delete"); + ulist.remove(login); + write_config(users.clone(), &ulist)?; + } + AccountManagement::ChangePassword { + maybe_old_password, + maybe_new_password, + login, + } => { + let mut user = ulist.remove(login).context("user must exist first")?; + + let old_password = match maybe_old_password { + Some(pwd) => pwd.to_string(), + None => rpassword::prompt_password("Enter old password: ")?, + }; + + if !verify_password(&old_password, &user.password)? { + bail!(format!("invalid password for login {}", login)); + } + + let crypto_keys = CryptoRoot(user.crypto_root).crypto_keys(&old_password)?; + + let new_password = match maybe_new_password { + Some(pwd) => pwd.to_string(), + None => { + let password = rpassword::prompt_password("Enter new password: ")?; + let password_confirm = rpassword::prompt_password("Confirm new password: ")?; + if password != password_confirm { + bail!("Passwords don't match."); + } + password + } + }; + let new_hash = hash_password(&new_password)?; + let new_crypto_root = CryptoRoot::create_pass(&new_password, &crypto_keys)?; + + user.password = new_hash; + user.crypto_root = new_crypto_root.0; + + ulist.insert(login.clone(), user); + write_config(users.clone(), &ulist)?; + } + }; + + Ok(()) +} diff --git a/aerogramme/src/server.rs b/aerogramme/src/server.rs new file mode 100644 index 0000000..09e91ad --- /dev/null +++ b/aerogramme/src/server.rs @@ -0,0 +1,147 @@ +use std::io::Write; +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::Result; +use futures::try_join; +use log::*; +use tokio::sync::watch; + +use crate::auth; +use crate::config::*; +use crate::dav; +use crate::imap; +use crate::lmtp::*; +use crate::login::ArcLoginProvider; +use crate::login::{demo_provider::*, ldap_provider::*, static_provider::*}; + +pub struct Server { + lmtp_server: Option>, + imap_unsecure_server: Option, + imap_server: Option, + auth_server: Option, + dav_unsecure_server: Option, + pid_file: Option, +} + +impl Server { + pub async fn from_companion_config(config: CompanionConfig) -> Result { + tracing::info!("Init as companion"); + let login = Arc::new(StaticLoginProvider::new(config.users).await?); + + let lmtp_server = None; + let imap_unsecure_server = Some(imap::new_unsecure(config.imap, login.clone())); + Ok(Self { + lmtp_server, + imap_unsecure_server, + imap_server: None, + auth_server: None, + dav_unsecure_server: None, + pid_file: config.pid, + }) + } + + pub async fn from_provider_config(config: ProviderConfig) -> Result { + tracing::info!("Init as provider"); + let login: ArcLoginProvider = match config.users { + UserManagement::Demo => Arc::new(DemoLoginProvider::new()), + UserManagement::Static(x) => Arc::new(StaticLoginProvider::new(x).await?), + UserManagement::Ldap(x) => Arc::new(LdapLoginProvider::new(x)?), + }; + + let lmtp_server = config.lmtp.map(|lmtp| LmtpServer::new(lmtp, login.clone())); + let imap_unsecure_server = config + .imap_unsecure + .map(|imap| imap::new_unsecure(imap, login.clone())); + let imap_server = config + .imap + .map(|imap| imap::new(imap, login.clone())) + .transpose()?; + let auth_server = config + .auth + .map(|auth| auth::AuthServer::new(auth, login.clone())); + let dav_unsecure_server = config + .dav_unsecure + .map(|dav_config| dav::new_unsecure(dav_config, login.clone())); + + Ok(Self { + lmtp_server, + imap_unsecure_server, + imap_server, + dav_unsecure_server, + auth_server, + pid_file: config.pid, + }) + } + + pub async fn run(self) -> Result<()> { + let pid = std::process::id(); + tracing::info!(pid = pid, "Starting main loops"); + + // write the pid file + if let Some(pid_file) = self.pid_file { + let mut file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(pid_file)?; + file.write_all(pid.to_string().as_bytes())?; + drop(file); + } + + let (exit_signal, provoke_exit) = watch_ctrl_c(); + let _exit_on_err = move |err: anyhow::Error| { + error!("Error: {}", err); + let _ = provoke_exit.send(true); + }; + + try_join!( + async { + match self.lmtp_server.as_ref() { + None => Ok(()), + Some(s) => s.run(exit_signal.clone()).await, + } + }, + async { + match self.imap_unsecure_server { + None => Ok(()), + Some(s) => s.run(exit_signal.clone()).await, + } + }, + async { + match self.imap_server { + None => Ok(()), + Some(s) => s.run(exit_signal.clone()).await, + } + }, + async { + match self.auth_server { + None => Ok(()), + Some(a) => a.run(exit_signal.clone()).await, + } + }, + async { + match self.dav_unsecure_server { + None => Ok(()), + Some(s) => s.run(exit_signal.clone()).await, + } + } + )?; + + Ok(()) + } +} + +pub fn watch_ctrl_c() -> (watch::Receiver, Arc>) { + let (send_cancel, watch_cancel) = watch::channel(false); + let send_cancel = Arc::new(send_cancel); + let send_cancel_2 = send_cancel.clone(); + tokio::spawn(async move { + tokio::signal::ctrl_c() + .await + .expect("failed to install CTRL+C signal handler"); + info!("Received CTRL+C, shutting down."); + send_cancel.send(true).unwrap(); + }); + (watch_cancel, send_cancel_2) +} diff --git a/flake.nix b/flake.nix index 01dfda1..c6ae4ce 100644 --- a/flake.nix +++ b/flake.nix @@ -186,12 +186,12 @@ shell = gpkgs.mkShell { buildInputs = [ cargo2nix.packages.x86_64-linux.default - fenix.packages.x86_64-linux.minimal.toolchain - fenix.packages.x86_64-linux.rust-analyzer + fenix.packages.x86_64-linux.complete.toolchain + #fenix.packages.x86_64-linux.rust-analyzer ]; shellHook = '' - echo "AEROGRAME DEVELOPMENT SHELL ${fenix.packages.x86_64-linux.minimal.rustc}" - export RUST_SRC_PATH="${fenix.packages.x86_64-linux.latest.rust-src}/lib/rustlib/src/rust/library" + echo "AEROGRAME DEVELOPMENT SHELL ${fenix.packages.x86_64-linux.complete.toolchain}" + export RUST_SRC_PATH="${fenix.packages.x86_64-linux.complete.toolchain}/lib/rustlib/src/rust/library" export RUST_ANALYZER_INTERNALS_DO_NOT_USE='this is unstable' ''; }; diff --git a/fuzz/.gitignore b/fuzz/.gitignore deleted file mode 100644 index 1a45eee..0000000 --- a/fuzz/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -target -corpus -artifacts -coverage diff --git a/fuzz/Cargo.lock b/fuzz/Cargo.lock deleted file mode 100644 index 08fa951..0000000 --- a/fuzz/Cargo.lock +++ /dev/null @@ -1,4249 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "abnf-core" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182d1f071b906a9f59269c89af101515a5cbe58f723eb6717e7fe7445c0dea" -dependencies = [ - "nom 7.1.3", -] - -[[package]] -name = "addr2line" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "aerogramme" -version = "0.3.0" -dependencies = [ - "anyhow", - "argon2", - "async-trait", - "aws-config", - "aws-sdk-s3", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "backtrace", - "base64 0.21.7", - "chrono", - "clap", - "console-subscriber", - "duplexify", - "eml-codec", - "futures", - "hex", - "http 1.1.0", - "http-body-util", - "hyper 1.2.0", - "hyper-rustls 0.26.0", - "hyper-util", - "im", - "imap-codec", - "imap-flow", - "itertools 0.10.5", - "k2v-client", - "lazy_static", - "ldap3", - "log", - "nix", - "nom 7.1.3", - "quick-xml", - "rand", - "rmp-serde", - "rpassword", - "rustls 0.22.2", - "rustls-pemfile 2.1.1", - "serde", - "smtp-message", - "smtp-server", - "sodiumoxide", - "thiserror", - "tokio", - "tokio-rustls 0.25.0", - "tokio-util", - "toml", - "tracing", - "tracing-subscriber", - "zstd", -] - -[[package]] -name = "aerogramme-fuzz" -version = "0.0.0" -dependencies = [ - "aerogramme", - "libfuzzer-sys", - "quick-xml", - "tokio", -] - -[[package]] -name = "aho-corasick" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" -dependencies = [ - "memchr", -] - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anyhow" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" - -[[package]] -name = "arbitrary" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" - -[[package]] -name = "argon2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" -dependencies = [ - "base64ct", - "blake2", - "cpufeatures", - "password-hash", -] - -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - -[[package]] -name = "asn1-rs" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ff05a702273012438132f449575dbc804e27b2f3cbe3069aa237d26c98fa33" -dependencies = [ - "asn1-rs-derive", - "asn1-rs-impl", - "displaydoc", - "nom 7.1.3", - "num-traits", - "rusticata-macros", - "thiserror", - "time", -] - -[[package]] -name = "asn1-rs-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8b7511298d5b7784b40b092d9e9dcd3a627a5707e4b5e507931ab0d44eeebf" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "synstructure", -] - -[[package]] -name = "asn1-rs-impl" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" -dependencies = [ - "concurrent-queue", - "event-listener 5.2.0", - "event-listener-strategy 0.5.0", - "futures-core", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "async-executor" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c" -dependencies = [ - "async-lock 3.3.0", - "async-task", - "concurrent-queue", - "fastrand 2.0.1", - "futures-lite 2.2.0", - "slab", -] - -[[package]] -name = "async-fs" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.2.0", - "async-executor", - "async-io 2.3.1", - "async-lock 3.3.0", - "blocking", - "futures-lite 2.2.0", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65" -dependencies = [ - "async-lock 3.3.0", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite 2.2.0", - "parking", - "polling 3.5.0", - "rustix 0.38.31", - "slab", - "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-lock" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" -dependencies = [ - "event-listener 4.0.3", - "event-listener-strategy 0.4.0", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "async-net" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" -dependencies = [ - "async-io 1.13.0", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "async-process" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" -dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", - "async-signal", - "blocking", - "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.31", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-signal" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" -dependencies = [ - "async-io 2.3.1", - "async-lock 2.8.0", - "atomic-waker", - "cfg-if", - "futures-core", - "futures-io", - "rustix 0.38.31", - "signal-hook-registry", - "slab", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 1.13.0", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite 0.2.13", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - -[[package]] -name = "async-stream" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "async-task" -version = "4.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" - -[[package]] -name = "async-trait" -version = "0.1.77" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - -[[package]] -name = "auto_enums" -version = "0.7.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe0dfe45d75158751e195799f47ea02e81f570aa24bc5ef999cdd9e888c4b5c3" -dependencies = [ - "auto_enums_core", - "auto_enums_derive", -] - -[[package]] -name = "auto_enums_core" -version = "0.7.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da47c46001293a2c4b744d731958be22cff408a2ab76e2279328f9713b1267b4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "auto_enums_derive" -version = "0.7.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41aed1da83ecdc799503b7cb94da1b45a34d72b49caf40a61d9cf5b88ec07cfd" -dependencies = [ - "autocfg", - "derive_utils", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "aws-config" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b96342ea8948ab9bef3e6234ea97fc32e2d8a88d8fb6a084e52267317f94b6b" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-sdk-sso", - "aws-sdk-ssooidc", - "aws-sdk-sts", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "fastrand 2.0.1", - "hex", - "http 0.2.12", - "hyper 0.14.28", - "ring 0.17.8", - "time", - "tokio", - "tracing", - "zeroize", -] - -[[package]] -name = "aws-credential-types" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d" -dependencies = [ - "aws-smithy-async", - "aws-smithy-runtime-api", - "aws-smithy-types", - "zeroize", -] - -[[package]] -name = "aws-runtime" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e38bab716c8bf07da24be07ecc02e0f5656ce8f30a891322ecdcb202f943b85" -dependencies = [ - "aws-credential-types", - "aws-sigv4", - "aws-smithy-async", - "aws-smithy-eventstream", - "aws-smithy-http", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "fastrand 2.0.1", - "http 0.2.12", - "http-body 0.4.6", - "percent-encoding", - "pin-project-lite 0.2.13", - "tracing", - "uuid", -] - -[[package]] -name = "aws-sdk-config" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07979fd68679736ba306d6ea2a4dc2fd835ac4d454942c5d8920ef83ed2f979f" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "http 0.2.12", - "once_cell", - "regex-lite", - "tracing", -] - -[[package]] -name = "aws-sdk-s3" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d35d39379445970fc3e4ddf7559fff2c32935ce0b279f9cb27080d6b7c6d94" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-sigv4", - "aws-smithy-async", - "aws-smithy-checksums", - "aws-smithy-eventstream", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-smithy-xml", - "aws-types", - "bytes", - "http 0.2.12", - "http-body 0.4.6", - "once_cell", - "percent-encoding", - "regex-lite", - "tracing", - "url", -] - -[[package]] -name = "aws-sdk-sso" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d84bd3925a17c9adbf6ec65d52104a44a09629d8f70290542beeee69a95aee7f" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "http 0.2.12", - "once_cell", - "regex-lite", - "tracing", -] - -[[package]] -name = "aws-sdk-ssooidc" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c2dae39e997f58bc4d6292e6244b26ba630c01ab671b6f9f44309de3eb80ab8" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-types", - "bytes", - "http 0.2.12", - "once_cell", - "regex-lite", - "tracing", -] - -[[package]] -name = "aws-sdk-sts" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17fd9a53869fee17cea77e352084e1aa71e2c5e323d974c13a9c2bcfd9544c7f" -dependencies = [ - "aws-credential-types", - "aws-runtime", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-json", - "aws-smithy-query", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "aws-smithy-xml", - "aws-types", - "http 0.2.12", - "once_cell", - "regex-lite", - "tracing", -] - -[[package]] -name = "aws-sigv4" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1" -dependencies = [ - "aws-credential-types", - "aws-smithy-eventstream", - "aws-smithy-http", - "aws-smithy-runtime-api", - "aws-smithy-types", - "bytes", - "crypto-bigint 0.5.5", - "form_urlencoded", - "hex", - "hmac", - "http 0.2.12", - "http 1.1.0", - "once_cell", - "p256", - "percent-encoding", - "ring 0.17.8", - "sha2", - "subtle", - "time", - "tracing", - "zeroize", -] - -[[package]] -name = "aws-smithy-async" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" -dependencies = [ - "futures-util", - "pin-project-lite 0.2.13", - "tokio", -] - -[[package]] -name = "aws-smithy-checksums" -version = "0.60.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fd4b66f2a8e7c84d7e97bda2666273d41d2a2e25302605bcf906b7b2661ae5e" -dependencies = [ - "aws-smithy-http", - "aws-smithy-types", - "bytes", - "crc32c", - "crc32fast", - "hex", - "http 0.2.12", - "http-body 0.4.6", - "md-5", - "pin-project-lite 0.2.13", - "sha1", - "sha2", - "tracing", -] - -[[package]] -name = "aws-smithy-eventstream" -version = "0.60.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858" -dependencies = [ - "aws-smithy-types", - "bytes", - "crc32fast", -] - -[[package]] -name = "aws-smithy-http" -version = "0.60.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6ca214a6a26f1b7ebd63aa8d4f5e2194095643023f9608edf99a58247b9d80d" -dependencies = [ - "aws-smithy-eventstream", - "aws-smithy-runtime-api", - "aws-smithy-types", - "bytes", - "bytes-utils", - "futures-core", - "http 0.2.12", - "http-body 0.4.6", - "once_cell", - "percent-encoding", - "pin-project-lite 0.2.13", - "pin-utils", - "tracing", -] - -[[package]] -name = "aws-smithy-json" -version = "0.60.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1af80ecf3057fb25fe38d1687e94c4601a7817c6a1e87c1b0635f7ecb644ace5" -dependencies = [ - "aws-smithy-types", -] - -[[package]] -name = "aws-smithy-query" -version = "0.60.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb27084f72ea5fc20033efe180618677ff4a2f474b53d84695cfe310a6526cbc" -dependencies = [ - "aws-smithy-types", - "urlencoding", -] - -[[package]] -name = "aws-smithy-runtime" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb5fca54a532a36ff927fbd7407a7c8eb9c3b4faf72792ba2965ea2cad8ed55" -dependencies = [ - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-runtime-api", - "aws-smithy-types", - "bytes", - "fastrand 2.0.1", - "h2 0.3.24", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "hyper-rustls 0.24.2", - "once_cell", - "pin-project-lite 0.2.13", - "pin-utils", - "rustls 0.21.10", - "tokio", - "tracing", -] - -[[package]] -name = "aws-smithy-runtime-api" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22389cb6f7cac64f266fb9f137745a9349ced7b47e0d2ba503e9e40ede4f7060" -dependencies = [ - "aws-smithy-async", - "aws-smithy-types", - "bytes", - "http 0.2.12", - "http 1.1.0", - "pin-project-lite 0.2.13", - "tokio", - "tracing", - "zeroize", -] - -[[package]] -name = "aws-smithy-types" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f081da5481210523d44ffd83d9f0740320050054006c719eae0232d411f024d3" -dependencies = [ - "base64-simd", - "bytes", - "bytes-utils", - "futures-core", - "http 0.2.12", - "http-body 0.4.6", - "itoa", - "num-integer", - "pin-project-lite 0.2.13", - "pin-utils", - "ryu", - "serde", - "time", - "tokio", - "tokio-util", -] - -[[package]] -name = "aws-smithy-xml" -version = "0.60.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fccd8f595d0ca839f9f2548e66b99514a85f92feb4c01cf2868d93eb4888a42" -dependencies = [ - "xmlparser", -] - -[[package]] -name = "aws-types" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1" -dependencies = [ - "aws-credential-types", - "aws-smithy-async", - "aws-smithy-runtime-api", - "aws-smithy-types", - "http 0.2.12", - "rustc_version", - "tracing", -] - -[[package]] -name = "axum" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" -dependencies = [ - "async-trait", - "axum-core", - "bitflags 1.3.2", - "bytes", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite 0.2.13", - "rustversion", - "serde", - "sync_wrapper", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - -[[package]] -name = "backtrace" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" -dependencies = [ - "addr2line", - "cc", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", -] - -[[package]] -name = "base16ct" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64-simd" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" -dependencies = [ - "outref", - "vsimd", -] - -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" - -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - -[[package]] -name = "bitvec" -version = "0.19.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "blake2" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" -dependencies = [ - "digest", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blocking" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" -dependencies = [ - "async-channel 2.2.0", - "async-lock 3.3.0", - "async-task", - "fastrand 2.0.1", - "futures-io", - "futures-lite 2.2.0", - "piper", - "tracing", -] - -[[package]] -name = "bounded-static" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2325bd33fa7e3018e7e37f5b0591ba009124963b5a3f8b7cae6d0a8c1028ed4" -dependencies = [ - "bounded-static-derive", -] - -[[package]] -name = "bounded-static-derive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f10dd247355bf631d98d2753d87ae62c84c8dcb996ad9b24a4168e0aec29bd6b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "bumpalo" -version = "3.15.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea184aa71bb362a1157c896979544cc23974e08fd265f29ea96b59f0b4a555b" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" - -[[package]] -name = "bytes-utils" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" -dependencies = [ - "bytes", - "either", -] - -[[package]] -name = "cc" -version = "1.0.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" -dependencies = [ - "jobserver", - "libc", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", - "num-traits", - "wasm-bindgen", - "windows-targets 0.52.4", -] - -[[package]] -name = "clap" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" -dependencies = [ - "atty", - "bitflags 1.3.2", - "clap_derive", - "clap_lex", - "indexmap 1.9.3", - "once_cell", - "strsim", - "termcolor", - "textwrap", -] - -[[package]] -name = "clap_derive" -version = "3.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "clap_lex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", -] - -[[package]] -name = "concurrent-queue" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "console-api" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" -dependencies = [ - "futures-core", - "prost", - "prost-types", - "tonic", - "tracing-core", -] - -[[package]] -name = "console-subscriber" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" -dependencies = [ - "console-api", - "crossbeam-channel", - "crossbeam-utils", - "futures-task", - "hdrhistogram", - "humantime", - "prost-types", - "serde", - "serde_json", - "thread_local", - "tokio", - "tokio-stream", - "tonic", - "tracing", - "tracing-core", - "tracing-subscriber", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" - -[[package]] -name = "cpufeatures" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" -dependencies = [ - "libc", -] - -[[package]] -name = "crc32c" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" -dependencies = [ - "rustc_version", -] - -[[package]] -name = "crc32fast" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" - -[[package]] -name = "crypto-bigint" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" -dependencies = [ - "generic-array", - "rand_core", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "rand_core", - "subtle", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "data-encoding" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" - -[[package]] -name = "der" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "der-parser" -version = "7.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe398ac75057914d7d07307bf67dc7f3f574a26783b4fc7805a20ffa9f506e82" -dependencies = [ - "asn1-rs", - "displaydoc", - "nom 7.1.3", - "num-bigint", - "num-traits", - "rusticata-macros", -] - -[[package]] -name = "deranged" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" -dependencies = [ - "powerfmt", -] - -[[package]] -name = "derive_utils" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532b4c15dccee12c7044f1fcad956e98410860b22231e44a3b827464797ca7bf" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", - "subtle", -] - -[[package]] -name = "displaydoc" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "duplexify" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1cc346cd6db38ceab2d33f59b26024c3ddb8e75f047c6cafbcbc016ea8065d5" -dependencies = [ - "async-std", - "pin-project-lite 0.1.12", -] - -[[package]] -name = "ecdsa" -version = "0.14.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" -dependencies = [ - "der", - "elliptic-curve", - "rfc6979", - "signature", -] - -[[package]] -name = "ed25519" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" -dependencies = [ - "signature", -] - -[[package]] -name = "either" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" - -[[package]] -name = "elliptic-curve" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" -dependencies = [ - "base16ct", - "crypto-bigint 0.4.9", - "der", - "digest", - "ff", - "generic-array", - "group", - "pkcs8", - "rand_core", - "sec1", - "subtle", - "zeroize", -] - -[[package]] -name = "eml-codec" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4499124d87abce26a57ef96ece800fa8babc38fbedd81c607c340ae83d46d2e" -dependencies = [ - "base64 0.21.7", - "chrono", - "encoding_rs", - "nom 7.1.3", -] - -[[package]] -name = "encoding_rs" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "errno" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "event-listener" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "event-listener" -version = "5.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "event-listener-strategy" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" -dependencies = [ - "event-listener 4.0.3", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291" -dependencies = [ - "event-listener 5.2.0", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - -[[package]] -name = "fastrand" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" - -[[package]] -name = "ff" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" -dependencies = [ - "rand_core", - "subtle", -] - -[[package]] -name = "flate2" -version = "1.0.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "form_urlencoded" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "funty" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" - -[[package]] -name = "futures" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" - -[[package]] -name = "futures-executor" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" - -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite 0.2.13", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" -dependencies = [ - "fastrand 2.0.1", - "futures-core", - "futures-io", - "parking", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "futures-macro" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "futures-sink" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" - -[[package]] -name = "futures-task" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" - -[[package]] -name = "futures-util" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite 0.2.13", - "pin-utils", - "slab", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "gimli" -version = "0.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" - -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "group" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" -dependencies = [ - "ff", - "rand_core", - "subtle", -] - -[[package]] -name = "h2" -version = "0.3.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.2.5", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 1.1.0", - "indexmap 2.2.5", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" - -[[package]] -name = "hdrhistogram" -version = "7.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" -dependencies = [ - "base64 0.21.7", - "byteorder", - "flate2", - "nom 7.1.3", - "num-traits", -] - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "http-body" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" -dependencies = [ - "bytes", - "http 1.1.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" -dependencies = [ - "bytes", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "pin-project-lite 0.2.13", -] - -[[package]] -name = "httparse" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - -[[package]] -name = "hyper" -version = "0.14.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.3.24", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite 0.2.13", - "socket2 0.5.6", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "h2 0.4.2", - "http 1.1.0", - "http-body 1.0.0", - "httparse", - "httpdate", - "itoa", - "pin-project-lite 0.2.13", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.28", - "log", - "rustls 0.21.10", - "rustls-native-certs 0.6.3", - "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-rustls" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" -dependencies = [ - "futures-util", - "http 1.1.0", - "hyper 1.2.0", - "hyper-util", - "log", - "rustls 0.22.2", - "rustls-native-certs 0.7.0", - "rustls-pki-types", - "tokio", - "tokio-rustls 0.25.0", - "tower-service", -] - -[[package]] -name = "hyper-timeout" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" -dependencies = [ - "hyper 0.14.28", - "pin-project-lite 0.2.13", - "tokio", - "tokio-io-timeout", -] - -[[package]] -name = "hyper-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "hyper 1.2.0", - "pin-project-lite 0.2.13", - "socket2 0.5.6", - "tokio", - "tower", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "idna" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "idna" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "im" -version = "15.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" -dependencies = [ - "bitmaps", - "rand_core", - "rand_xoshiro", - "sized-chunks", - "typenum", - "version_check", -] - -[[package]] -name = "imap-codec" -version = "2.0.0" -source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" -dependencies = [ - "abnf-core", - "base64 0.21.7", - "bounded-static", - "chrono", - "imap-types", - "log", - "nom 7.1.3", - "thiserror", -] - -[[package]] -name = "imap-flow" -version = "0.1.0" -source = "git+https://github.com/duesee/imap-flow.git?branch=main#dce759a8531f317e8d7311fb032b366db6698e38" -dependencies = [ - "bounded-static", - "bytes", - "imap-codec", - "imap-types", - "thiserror", - "tokio", - "tracing", -] - -[[package]] -name = "imap-types" -version = "2.0.0" -source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" -dependencies = [ - "base64 0.21.7", - "bounded-static", - "chrono", - "thiserror", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - -[[package]] -name = "indexmap" -version = "2.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" -dependencies = [ - "equivalent", - "hashbrown 0.14.3", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" - -[[package]] -name = "jobserver" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" -dependencies = [ - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "k2v-client" -version = "0.0.4" -source = "git+https://git.deuxfleurs.fr/Deuxfleurs/garage.git?branch=k2v/shared_http_client#8b35a946d9f6b31b26b9783acbfab984316051f4" -dependencies = [ - "aws-sdk-config", - "aws-sigv4", - "base64 0.21.7", - "hex", - "http 1.1.0", - "http-body-util", - "hyper 1.2.0", - "hyper-rustls 0.26.0", - "hyper-util", - "log", - "percent-encoding", - "serde", - "serde_json", - "sha2", - "thiserror", - "tokio", -] - -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "lber" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a99b520993b21a6faab32643cf4726573dc18ca4cf2d48cbeb24d248c86c930" -dependencies = [ - "byteorder", - "bytes", - "nom 2.2.1", -] - -[[package]] -name = "ldap3" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce38dafca0608c64cc0146fb782b06abb8d946dae7a3af23c89a95da24f6b84d" -dependencies = [ - "async-trait", - "bytes", - "futures", - "futures-util", - "lazy_static", - "lber", - "log", - "nom 2.2.1", - "percent-encoding", - "ring 0.16.20", - "rustls 0.20.9", - "rustls-native-certs 0.6.3", - "thiserror", - "tokio", - "tokio-rustls 0.23.4", - "tokio-stream", - "tokio-util", - "url", - "x509-parser", -] - -[[package]] -name = "lexical-core" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" -dependencies = [ - "arrayvec", - "bitflags 1.3.2", - "cfg-if", - "ryu", - "static_assertions", -] - -[[package]] -name = "libc" -version = "0.2.153" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" - -[[package]] -name = "libfuzzer-sys" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a96cfd5557eb82f2b83fed4955246c988d331975a002961b07c81584d107e7f7" -dependencies = [ - "arbitrary", - "cc", - "once_cell", -] - -[[package]] -name = "libsodium-sys" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b779387cd56adfbc02ea4a668e704f729be8d6a6abd2c27ca5ee537849a92fd" -dependencies = [ - "cc", - "libc", - "pkg-config", - "walkdir", -] - -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - -[[package]] -name = "linux-raw-sys" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" - -[[package]] -name = "log" -version = "0.4.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" -dependencies = [ - "value-bag", -] - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "matches" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" - -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest", -] - -[[package]] -name = "memchr" -version = "2.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" -dependencies = [ - "adler", -] - -[[package]] -name = "mio" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.48.0", -] - -[[package]] -name = "nix" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" -dependencies = [ - "bitflags 2.4.2", - "cfg-if", - "libc", -] - -[[package]] -name = "nom" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf51a729ecf40266a2368ad335a5fdde43471f545a967109cd62146ecf8b66ff" - -[[package]] -name = "nom" -version = "6.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" -dependencies = [ - "bitvec", - "funty", - "lexical-core", - "memchr", - "version_check", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - -[[package]] -name = "num-bigint" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi 0.3.9", - "libc", -] - -[[package]] -name = "object" -version = "0.32.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" -dependencies = [ - "memchr", -] - -[[package]] -name = "oid-registry" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38e20717fa0541f39bd146692035c37bedfa532b3e5071b35761082407546b2a" -dependencies = [ - "asn1-rs", -] - -[[package]] -name = "once_cell" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" - -[[package]] -name = "outref" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "p256" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" -dependencies = [ - "ecdsa", - "elliptic-curve", - "sha2", -] - -[[package]] -name = "parking" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" - -[[package]] -name = "password-hash" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" -dependencies = [ - "base64ct", - "rand_core", - "subtle", -] - -[[package]] -name = "paste" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" - -[[package]] -name = "percent-encoding" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" - -[[package]] -name = "pin-project" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - -[[package]] -name = "pin-project-lite" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "piper" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" -dependencies = [ - "atomic-waker", - "fastrand 2.0.1", - "futures-io", -] - -[[package]] -name = "pkcs8" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "pkg-config" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" - -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite 0.2.13", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24f040dee2588b4963afb4e420540439d126f73fdacf4a9c486a96d840bac3c9" -dependencies = [ - "cfg-if", - "concurrent-queue", - "pin-project-lite 0.2.13", - "rustix 0.38.31", - "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.78" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "prost" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-derive" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" -dependencies = [ - "anyhow", - "itertools 0.11.0", - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "prost-types" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" -dependencies = [ - "prost", -] - -[[package]] -name = "quick-xml" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" -dependencies = [ - "memchr", - "tokio", -] - -[[package]] -name = "quote" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rand_xoshiro" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" -dependencies = [ - "rand_core", -] - -[[package]] -name = "regex" -version = "1.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata 0.4.6", - "regex-syntax 0.8.2", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.8.2", -] - -[[package]] -name = "regex-lite" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" - -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" - -[[package]] -name = "rfc6979" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" -dependencies = [ - "crypto-bigint 0.4.9", - "hmac", - "zeroize", -] - -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if", - "getrandom", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.52.0", -] - -[[package]] -name = "rmp" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" -dependencies = [ - "byteorder", - "num-traits", - "paste", -] - -[[package]] -name = "rmp-serde" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "723ecff9ad04f4ad92fe1c8ca6c20d2196d9286e9c60727c4cb5511629260e9d" -dependencies = [ - "byteorder", - "rmp", - "serde", -] - -[[package]] -name = "rpassword" -version = "7.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" -dependencies = [ - "libc", - "rtoolbox", - "windows-sys 0.48.0", -] - -[[package]] -name = "rtoolbox" -version = "0.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" -dependencies = [ - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" - -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] - -[[package]] -name = "rusticata-macros" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" -dependencies = [ - "nom 7.1.3", -] - -[[package]] -name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" -dependencies = [ - "bitflags 2.4.2", - "errno", - "libc", - "linux-raw-sys 0.4.13", - "windows-sys 0.52.0", -] - -[[package]] -name = "rustls" -version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" -dependencies = [ - "log", - "ring 0.16.20", - "sct", - "webpki", -] - -[[package]] -name = "rustls" -version = "0.21.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" -dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki 0.101.7", - "sct", -] - -[[package]] -name = "rustls" -version = "0.22.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" -dependencies = [ - "log", - "ring 0.17.8", - "rustls-pki-types", - "rustls-webpki 0.102.2", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" -dependencies = [ - "openssl-probe", - "rustls-pemfile 1.0.4", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-native-certs" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" -dependencies = [ - "openssl-probe", - "rustls-pemfile 2.1.1", - "rustls-pki-types", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - -[[package]] -name = "rustls-pemfile" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" -dependencies = [ - "base64 0.21.7", - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8" - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - -[[package]] -name = "rustls-webpki" -version = "0.102.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" -dependencies = [ - "ring 0.17.8", - "rustls-pki-types", - "untrusted 0.9.0", -] - -[[package]] -name = "rustversion" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" - -[[package]] -name = "ryu" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - -[[package]] -name = "sec1" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "subtle", - "zeroize", -] - -[[package]] -name = "security-framework" -version = "2.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" - -[[package]] -name = "serde" -version = "1.0.197" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.197" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "serde_json" -version = "1.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "signal-hook-registry" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" -dependencies = [ - "libc", -] - -[[package]] -name = "signature" -version = "1.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" -dependencies = [ - "digest", - "rand_core", -] - -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - -[[package]] -name = "slab" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" - -[[package]] -name = "smol" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" -dependencies = [ - "async-channel 1.9.0", - "async-executor", - "async-fs", - "async-io 1.13.0", - "async-lock 2.8.0", - "async-net", - "async-process", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "smtp-message" -version = "0.1.0" -source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" -dependencies = [ - "auto_enums", - "futures", - "idna 0.2.3", - "lazy_static", - "nom 6.1.2", - "pin-project", - "regex-automata 0.1.10", - "serde", -] - -[[package]] -name = "smtp-server" -version = "0.1.0" -source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" -dependencies = [ - "async-trait", - "chrono", - "duplexify", - "futures", - "smol", - "smtp-message", - "smtp-server-types", -] - -[[package]] -name = "smtp-server-types" -version = "0.1.0" -source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" -dependencies = [ - "serde", - "smtp-message", -] - -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "socket2" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "sodiumoxide" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" -dependencies = [ - "ed25519", - "libc", - "libsodium-sys", - "serde", -] - -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - -[[package]] -name = "spki" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "subtle" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "textwrap" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" - -[[package]] -name = "thiserror" -version = "1.0.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "time" -version = "0.3.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" - -[[package]] -name = "time-macros" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tinyvec" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.36.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" -dependencies = [ - "backtrace", - "bytes", - "libc", - "mio", - "num_cpus", - "pin-project-lite 0.2.13", - "signal-hook-registry", - "socket2 0.5.6", - "tokio-macros", - "tracing", - "windows-sys 0.48.0", -] - -[[package]] -name = "tokio-io-timeout" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" -dependencies = [ - "pin-project-lite 0.2.13", - "tokio", -] - -[[package]] -name = "tokio-macros" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.9", - "tokio", - "webpki", -] - -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.10", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" -dependencies = [ - "rustls 0.22.2", - "rustls-pki-types", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" -dependencies = [ - "futures-core", - "pin-project-lite 0.2.13", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "pin-project-lite 0.2.13", - "tokio", - "tracing", -] - -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - -[[package]] -name = "tonic" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" -dependencies = [ - "async-stream", - "async-trait", - "axum", - "base64 0.21.7", - "bytes", - "h2 0.3.24", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "hyper-timeout", - "percent-encoding", - "pin-project", - "prost", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "indexmap 1.9.3", - "pin-project", - "pin-project-lite 0.2.13", - "rand", - "slab", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-layer" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" - -[[package]] -name = "tower-service" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" - -[[package]] -name = "tracing" -version = "0.1.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" -dependencies = [ - "log", - "pin-project-lite 0.2.13", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "tracing-core" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "unicode-bidi" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "unicode-normalization" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "url" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" -dependencies = [ - "form_urlencoded", - "idna 0.5.0", - "percent-encoding", -] - -[[package]] -name = "urlencoding" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" - -[[package]] -name = "uuid" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "value-bag" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "vsimd" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" - -[[package]] -name = "waker-fn" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.52", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" - -[[package]] -name = "web-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki" -version = "0.22.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.4", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.4", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" -dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" - -[[package]] -name = "wyz" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" - -[[package]] -name = "x509-parser" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb9bace5b5589ffead1afb76e43e34cff39cd0f3ce7e170ae0c29e53b88eb1c" -dependencies = [ - "asn1-rs", - "base64 0.13.1", - "data-encoding", - "der-parser", - "lazy_static", - "nom 7.1.3", - "oid-registry", - "rusticata-macros", - "thiserror", - "time", -] - -[[package]] -name = "xmlparser" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" - -[[package]] -name = "zeroize" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" - -[[package]] -name = "zstd" -version = "0.9.2+zstd.1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2390ea1bf6c038c39674f22d95f0564725fc06034a47129179810b2fc58caa54" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "4.1.3+zstd.1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e99d81b99fb3c2c2c794e3fe56c305c63d5173a16a46b5850b07c935ffc7db79" -dependencies = [ - "libc", - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "1.6.2+zstd.1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2daf2f248d9ea44454bfcb2516534e8b8ad2fc91bf818a1885495fc42bc8ac9f" -dependencies = [ - "cc", - "libc", -] diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml deleted file mode 100644 index 25c1f15..0000000 --- a/fuzz/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -name = "aerogramme-fuzz" -version = "0.0.0" -publish = false -edition = "2021" - -[package.metadata] -cargo-fuzz = true - -[dependencies] -libfuzzer-sys = "0.4" -tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } -quick-xml = { version = "0.31", features = ["async-tokio"] } - -[dependencies.aerogramme] -path = ".." - -[patch.crates-io] -imap-types = { git = "https://github.com/superboum/imap-codec", branch = "custom/aerogramme" } -imap-codec = { git = "https://github.com/superboum/imap-codec", branch = "custom/aerogramme" } - -[[bin]] -name = "dav" -path = "fuzz_targets/dav.rs" -test = false -doc = false -bench = false diff --git a/fuzz/fuzz_targets/dav.rs b/fuzz/fuzz_targets/dav.rs deleted file mode 100644 index 7549a03..0000000 --- a/fuzz/fuzz_targets/dav.rs +++ /dev/null @@ -1,48 +0,0 @@ -#![no_main] - -use libfuzzer_sys::fuzz_target; -use aerogramme::dav::{types, realization, xml}; -use quick_xml::reader::NsReader; -use tokio::runtime::Runtime; -use tokio::io::AsyncWriteExt; - -async fn serialize(elem: &impl xml::QWrite) -> Vec { - let mut buffer = Vec::new(); - let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); - let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; - let mut writer = xml::Writer { q, ns_to_apply }; - - elem.qwrite(&mut writer).await.expect("xml serialization"); - tokio_buffer.flush().await.expect("tokio buffer flush"); - - return buffer -} - -type Object = types::Multistatus>; - -fuzz_target!(|data: &[u8]| { - let rt = Runtime::new().expect("tokio runtime initialization"); - - rt.block_on(async { - // 1. Setup fuzzing by finding an input that seems correct, do not crash yet then. - let mut rdr = match xml::Reader::new(NsReader::from_reader(data)).await { - Err(_) => return, - Ok(r) => r, - }; - let reference = match rdr.find::().await { - Err(_) => return, - Ok(m) => m, - }; - - // 2. Re-serialize the input - let my_serialization = serialize(&reference).await; - - // 3. De-serialize my serialization - let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())).await.expect("XML Reader init"); - let comparison = rdr2.find::().await.expect("Deserialize again"); - - // 4. Both the first decoding and last decoding must be identical - assert_eq!(reference, comparison); - }) -}); diff --git a/src/auth.rs b/src/auth.rs deleted file mode 100644 index 064c90c..0000000 --- a/src/auth.rs +++ /dev/null @@ -1,941 +0,0 @@ -use std::net::SocketAddr; - -use anyhow::{anyhow, bail, Result}; -use futures::stream::{FuturesUnordered, StreamExt}; -use tokio::io::BufStream; -use tokio::io::{AsyncBufReadExt, AsyncWriteExt}; -use tokio::net::{TcpListener, TcpStream}; -use tokio::sync::watch; - -use crate::config::AuthConfig; -use crate::login::ArcLoginProvider; - -/// Seek compatibility with the Dovecot Authentication Protocol -/// -/// ## Trace -/// -/// ```text -/// S: VERSION 1 2 -/// S: MECH PLAIN plaintext -/// S: MECH LOGIN plaintext -/// S: SPID 15 -/// S: CUID 17654 -/// S: COOKIE f56692bee41f471ed01bd83520025305 -/// S: DONE -/// C: VERSION 1 2 -/// C: CPID 1 -/// -/// C: AUTH 2 PLAIN service=smtp -/// S: CONT 2 -/// C: CONT 2 base64stringFollowingRFC4616== -/// S: OK 2 user=alice@example.tld -/// -/// C: AUTH 42 LOGIN service=smtp -/// S: CONT 42 VXNlcm5hbWU6 -/// C: CONT 42 b64User -/// S: CONT 42 UGFzc3dvcmQ6 -/// C: CONT 42 b64Pass -/// S: FAIL 42 user=alice -/// ``` -/// -/// ## RFC References -/// -/// PLAIN SASL - https://datatracker.ietf.org/doc/html/rfc4616 -/// -/// -/// ## Dovecot References -/// -/// https://doc.dovecot.org/developer_manual/design/auth_protocol/ -/// https://doc.dovecot.org/configuration_manual/authentication/authentication_mechanisms/#authentication-authentication-mechanisms -/// https://doc.dovecot.org/configuration_manual/howto/simple_virtual_install/#simple-virtual-install-smtp-auth -/// https://doc.dovecot.org/configuration_manual/howto/postfix_and_dovecot_sasl/#howto-postfix-and-dovecot-sasl -pub struct AuthServer { - login_provider: ArcLoginProvider, - bind_addr: SocketAddr, -} - -impl AuthServer { - pub fn new(config: AuthConfig, login_provider: ArcLoginProvider) -> Self { - Self { - bind_addr: config.bind_addr, - login_provider, - } - } - - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!( - "SASL Authentication Protocol listening on {:#}", - self.bind_addr - ); - - let mut connections = FuturesUnordered::new(); - - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - - tracing::info!("AUTH: accepted connection from {}", remote_addr); - let conn = tokio::spawn( - NetLoop::new(socket, self.login_provider.clone(), must_exit.clone()).run_error(), - ); - - connections.push(conn); - } - drop(tcp); - - tracing::info!("AUTH server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -struct NetLoop { - login: ArcLoginProvider, - stream: BufStream, - stop: watch::Receiver, - state: State, - read_buf: Vec, - write_buf: BytesMut, -} - -impl NetLoop { - fn new(stream: TcpStream, login: ArcLoginProvider, stop: watch::Receiver) -> Self { - Self { - login, - stream: BufStream::new(stream), - state: State::Init, - stop, - read_buf: Vec::new(), - write_buf: BytesMut::new(), - } - } - - async fn run_error(self) { - match self.run().await { - Ok(()) => tracing::info!("Auth session succeeded"), - Err(e) => tracing::error!(err=?e, "Auth session failed"), - } - } - - async fn run(mut self) -> Result<()> { - loop { - tokio::select! { - read_res = self.stream.read_until(b'\n', &mut self.read_buf) => { - // Detect EOF / socket close - let bread = read_res?; - if bread == 0 { - tracing::info!("Reading buffer empty, connection has been closed. Exiting AUTH session."); - return Ok(()) - } - - // Parse command - let (_, cmd) = client_command(&self.read_buf).map_err(|_| anyhow!("Unable to parse command"))?; - tracing::trace!(cmd=?cmd, "Received command"); - - // Make some progress in our local state - self.state.progress(cmd, &self.login).await; - if matches!(self.state, State::Error) { - bail!("Internal state is in error, previous logs explain what went wrong"); - } - - // Build response - let srv_cmds = self.state.response(); - srv_cmds.iter().try_for_each(|r| { - tracing::trace!(cmd=?r, "Sent command"); - r.encode(&mut self.write_buf) - })?; - - // Send responses if at least one command response has been generated - if !srv_cmds.is_empty() { - self.stream.write_all(&self.write_buf).await?; - self.stream.flush().await?; - } - - // Reset buffers - self.read_buf.clear(); - self.write_buf.clear(); - }, - _ = self.stop.changed() => { - tracing::debug!("Server is stopping, quitting this runner"); - return Ok(()) - } - } - } - } -} - -// ----------------------------------------------------------------- -// -// BUSINESS LOGIC -// -// ----------------------------------------------------------------- -use rand::prelude::*; - -#[derive(Debug)] -enum AuthRes { - Success(String), - Failed(Option, Option), -} - -#[derive(Debug)] -enum State { - Error, - Init, - HandshakePart(Version), - HandshakeDone, - AuthPlainProgress { id: u64 }, - AuthDone { id: u64, res: AuthRes }, -} - -const SERVER_MAJOR: u64 = 1; -const SERVER_MINOR: u64 = 2; -const EMPTY_AUTHZ: &[u8] = &[]; -impl State { - async fn try_auth_plain<'a>(&self, data: &'a [u8], login: &ArcLoginProvider) -> AuthRes { - // Check that we can extract user's login+pass - let (ubin, pbin) = match auth_plain(&data) { - Ok(([], (authz, user, pass))) if authz == user || authz == EMPTY_AUTHZ => (user, pass), - Ok(_) => { - tracing::error!("Impersonating user is not supported"); - return AuthRes::Failed(None, None); - } - Err(e) => { - tracing::error!(err=?e, "Could not parse the SASL PLAIN data chunk"); - return AuthRes::Failed(None, None); - } - }; - - // Try to convert it to UTF-8 - let (user, password) = match (std::str::from_utf8(ubin), std::str::from_utf8(pbin)) { - (Ok(u), Ok(p)) => (u, p), - _ => { - tracing::error!("Username or password contain invalid UTF-8 characters"); - return AuthRes::Failed(None, None); - } - }; - - // Try to connect user - match login.login(user, password).await { - Ok(_) => AuthRes::Success(user.to_string()), - Err(e) => { - tracing::warn!(err=?e, "login failed"); - AuthRes::Failed(Some(user.to_string()), None) - } - } - } - - async fn progress(&mut self, cmd: ClientCommand, login: &ArcLoginProvider) { - let new_state = 'state: { - match (std::mem::replace(self, State::Error), cmd) { - (Self::Init, ClientCommand::Version(v)) => Self::HandshakePart(v), - (Self::HandshakePart(version), ClientCommand::Cpid(_cpid)) => { - if version.major != SERVER_MAJOR { - tracing::error!( - client_major = version.major, - server_major = SERVER_MAJOR, - "Unsupported client major version" - ); - break 'state Self::Error; - } - - Self::HandshakeDone - } - ( - Self::HandshakeDone { .. }, - ClientCommand::Auth { - id, mech, options, .. - }, - ) - | ( - Self::AuthDone { .. }, - ClientCommand::Auth { - id, mech, options, .. - }, - ) => { - if mech != Mechanism::Plain { - tracing::error!(mechanism=?mech, "Unsupported Authentication Mechanism"); - break 'state Self::AuthDone { - id, - res: AuthRes::Failed(None, None), - }; - } - - match options.last() { - Some(AuthOption::Resp(data)) => Self::AuthDone { - id, - res: self.try_auth_plain(&data, login).await, - }, - _ => Self::AuthPlainProgress { id }, - } - } - (Self::AuthPlainProgress { id }, ClientCommand::Cont { id: cid, data }) => { - // Check that ID matches - if cid != id { - tracing::error!( - auth_id = id, - cont_id = cid, - "CONT id does not match AUTH id" - ); - break 'state Self::AuthDone { - id, - res: AuthRes::Failed(None, None), - }; - } - - Self::AuthDone { - id, - res: self.try_auth_plain(&data, login).await, - } - } - _ => { - tracing::error!("This command is not valid in this context"); - Self::Error - } - } - }; - tracing::debug!(state=?new_state, "Made progress"); - *self = new_state; - } - - fn response(&self) -> Vec { - let mut srv_cmd: Vec = Vec::new(); - - match self { - Self::HandshakeDone { .. } => { - srv_cmd.push(ServerCommand::Version(Version { - major: SERVER_MAJOR, - minor: SERVER_MINOR, - })); - - srv_cmd.push(ServerCommand::Mech { - kind: Mechanism::Plain, - parameters: vec![MechanismParameters::PlainText], - }); - - srv_cmd.push(ServerCommand::Spid(15u64)); - srv_cmd.push(ServerCommand::Cuid(19350u64)); - - let mut cookie = [0u8; 16]; - thread_rng().fill(&mut cookie); - srv_cmd.push(ServerCommand::Cookie(cookie)); - - srv_cmd.push(ServerCommand::Done); - } - Self::AuthPlainProgress { id } => { - srv_cmd.push(ServerCommand::Cont { - id: *id, - data: None, - }); - } - Self::AuthDone { - id, - res: AuthRes::Success(user), - } => { - srv_cmd.push(ServerCommand::Ok { - id: *id, - user_id: Some(user.to_string()), - extra_parameters: vec![], - }); - } - Self::AuthDone { - id, - res: AuthRes::Failed(maybe_user, maybe_failcode), - } => { - srv_cmd.push(ServerCommand::Fail { - id: *id, - user_id: maybe_user.clone(), - code: maybe_failcode.clone(), - extra_parameters: vec![], - }); - } - _ => (), - }; - - srv_cmd - } -} - -// ----------------------------------------------------------------- -// -// DOVECOT AUTH TYPES -// -// ----------------------------------------------------------------- - -#[derive(Debug, Clone, PartialEq)] -enum Mechanism { - Plain, - Login, -} - -#[derive(Clone, Debug)] -enum AuthOption { - /// Unique session ID. Mainly used for logging. - Session(u64), - /// Local IP connected to by the client. In standard string format, e.g. 127.0.0.1 or ::1. - LocalIp(String), - /// Remote client IP - RemoteIp(String), - /// Local port connected to by the client. - LocalPort(u16), - /// Remote client port - RemotePort(u16), - /// When Dovecot proxy is used, the real_rip/real_port are the proxy’s IP/port and real_lip/real_lport are the backend’s IP/port where the proxy was connected to. - RealRemoteIp(String), - RealLocalIp(String), - RealLocalPort(u16), - RealRemotePort(u16), - /// TLS SNI name - LocalName(String), - /// Enable debugging for this lookup. - Debug, - /// List of fields that will become available via %{forward_*} variables. The list is double-tab-escaped, like: tab_escaped[tab_escaped(key=value)[...] - /// Note: we do not unescape the tabulation, and thus we don't parse the data - ForwardViews(Vec), - /// Remote user has secured transport to auth client (e.g. localhost, SSL, TLS). - Secured(Option), - /// The value can be “insecure”, “trusted” or “TLS”. - Transport(String), - /// TLS cipher being used. - TlsCipher(String), - /// The number of bits in the TLS cipher. - /// @FIXME: I don't know how if it's a string or an integer - TlsCipherBits(String), - /// TLS perfect forward secrecy algorithm (e.g. DH, ECDH) - TlsPfs(String), - /// TLS protocol name (e.g. SSLv3, TLSv1.2) - TlsProtocol(String), - /// Remote user has presented a valid SSL certificate. - ValidClientCert(String), - /// Ignore auth penalty tracking for this request - NoPenalty, - /// Unknown option sent by Postfix - NoLogin, - /// Username taken from client’s SSL certificate. - CertUsername, - /// IMAP ID string - ClientId, - /// An unknown key - UnknownPair(String, Vec), - UnknownBool(Vec), - /// Initial response for authentication mechanism. - /// NOTE: This must be the last parameter. Everything after it is ignored. - /// This is to avoid accidental security holes if user-given data is directly put to base64 string without filtering out tabs. - /// @FIXME: I don't understand this parameter - Resp(Vec), -} - -#[derive(Debug, Clone)] -struct Version { - major: u64, - minor: u64, -} - -#[derive(Debug)] -enum ClientCommand { - /// Both client and server should check that they support the same major version number. If they don’t, the other side isn’t expected to be talking the same protocol and should be disconnected. Minor version can be ignored. This document specifies the version number 1.2. - Version(Version), - /// CPID finishes the handshake from client. - Cpid(u64), - Auth { - /// ID is a connection-specific unique request identifier. It must be a 32bit number, so typically you’d just increment it by one. - id: u64, - /// A SASL mechanism (eg. LOGIN, PLAIN, etc.) - /// See: https://doc.dovecot.org/configuration_manual/authentication/authentication_mechanisms/#authentication-authentication-mechanisms - mech: Mechanism, - /// Service is the service requesting authentication, eg. pop3, imap, smtp. - service: String, - /// All the optional parameters - options: Vec, - }, - Cont { - /// The must match the of the AUTH command. - id: u64, - /// Data that will be serialized to / deserialized from base64 - data: Vec, - }, -} - -#[derive(Debug)] -enum MechanismParameters { - /// Anonymous authentication - Anonymous, - /// Transfers plaintext passwords - PlainText, - /// Subject to passive (dictionary) attack - Dictionary, - /// Subject to active (non-dictionary) attack - Active, - /// Provides forward secrecy between sessions - ForwardSecrecy, - /// Provides mutual authentication - MutualAuth, - /// Don’t advertise this as available SASL mechanism (eg. APOP) - Private, -} - -#[derive(Debug, Clone)] -enum FailCode { - /// This is a temporary internal failure, e.g. connection was lost to SQL database. - TempFail, - /// Authentication succeeded, but authorization failed (master user’s password was ok, but destination user was not ok). - AuthzFail, - /// User is disabled (password may or may not have been correct) - UserDisabled, - /// User’s password has expired. - PassExpired, -} - -#[derive(Debug)] -enum ServerCommand { - /// Both client and server should check that they support the same major version number. If they don’t, the other side isn’t expected to be talking the same protocol and should be disconnected. Minor version can be ignored. This document specifies the version number 1.2. - Version(Version), - /// CPID and SPID specify client and server Process Identifiers (PIDs). They should be unique identifiers for the specific process. UNIX process IDs are good choices. - /// SPID can be used by authentication client to tell master which server process handled the authentication. - Spid(u64), - /// CUID is a server process-specific unique connection identifier. It’s different each time a connection is established for the server. - /// CUID is currently useful only for APOP authentication. - Cuid(u64), - Mech { - kind: Mechanism, - parameters: Vec, - }, - /// COOKIE returns connection-specific 128 bit cookie in hex. It must be given to REQUEST command. (Protocol v1.1+ / Dovecot v2.0+) - Cookie([u8; 16]), - /// DONE finishes the handshake from server. - Done, - - Fail { - id: u64, - user_id: Option, - code: Option, - extra_parameters: Vec>, - }, - Cont { - id: u64, - data: Option>, - }, - /// FAIL and OK may contain multiple unspecified parameters which authentication client may handle specially. - /// The only one specified here is user= parameter, which should always be sent if the userid is known. - Ok { - id: u64, - user_id: Option, - extra_parameters: Vec>, - }, -} - -// ----------------------------------------------------------------- -// -// DOVECOT AUTH DECODING -// -// ------------------------------------------------------------------ - -use base64::Engine; -use nom::{ - branch::alt, - bytes::complete::{is_not, tag, tag_no_case, take, take_while, take_while1}, - character::complete::{tab, u16, u64}, - combinator::{map, opt, recognize, rest, value}, - error::{Error, ErrorKind}, - multi::{many1, separated_list0}, - sequence::{pair, preceded, tuple}, - IResult, -}; - -fn version_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { - let mut parser = tuple((tag_no_case(b"VERSION"), tab, u64, tab, u64)); - - let (input, (_, _, major, _, minor)) = parser(input)?; - Ok((input, ClientCommand::Version(Version { major, minor }))) -} - -fn cpid_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { - preceded( - pair(tag_no_case(b"CPID"), tab), - map(u64, |v| ClientCommand::Cpid(v)), - )(input) -} - -fn mechanism<'a>(input: &'a [u8]) -> IResult<&'a [u8], Mechanism> { - alt(( - value(Mechanism::Plain, tag_no_case(b"PLAIN")), - value(Mechanism::Login, tag_no_case(b"LOGIN")), - ))(input) -} - -fn is_not_tab_or_esc_or_lf(c: u8) -> bool { - c != 0x09 && c != 0x01 && c != 0x0a // TAB or 0x01 or LF -} - -fn is_esc<'a>(input: &'a [u8]) -> IResult<&'a [u8], &[u8]> { - preceded(tag(&[0x01]), take(1usize))(input) -} - -fn parameter<'a>(input: &'a [u8]) -> IResult<&'a [u8], &[u8]> { - recognize(many1(alt((take_while1(is_not_tab_or_esc_or_lf), is_esc))))(input) -} - -fn parameter_str(input: &[u8]) -> IResult<&[u8], String> { - let (input, buf) = parameter(input)?; - - std::str::from_utf8(buf) - .map(|v| (input, v.to_string())) - .map_err(|_| nom::Err::Failure(Error::new(input, ErrorKind::TakeWhile1))) -} - -fn is_param_name_char(c: u8) -> bool { - is_not_tab_or_esc_or_lf(c) && c != 0x3d // = -} - -fn parameter_name(input: &[u8]) -> IResult<&[u8], String> { - let (input, buf) = take_while1(is_param_name_char)(input)?; - - std::str::from_utf8(buf) - .map(|v| (input, v.to_string())) - .map_err(|_| nom::Err::Failure(Error::new(input, ErrorKind::TakeWhile1))) -} - -fn service<'a>(input: &'a [u8]) -> IResult<&'a [u8], String> { - preceded(tag_no_case("service="), parameter_str)(input) -} - -fn auth_option<'a>(input: &'a [u8]) -> IResult<&'a [u8], AuthOption> { - use AuthOption::*; - alt(( - alt(( - value(Debug, tag_no_case(b"debug")), - value(NoPenalty, tag_no_case(b"no-penalty")), - value(ClientId, tag_no_case(b"client_id")), - value(NoLogin, tag_no_case(b"nologin")), - map(preceded(tag_no_case(b"session="), u64), |id| Session(id)), - map(preceded(tag_no_case(b"lip="), parameter_str), |ip| { - LocalIp(ip) - }), - map(preceded(tag_no_case(b"rip="), parameter_str), |ip| { - RemoteIp(ip) - }), - map(preceded(tag_no_case(b"lport="), u16), |port| { - LocalPort(port) - }), - map(preceded(tag_no_case(b"rport="), u16), |port| { - RemotePort(port) - }), - map(preceded(tag_no_case(b"real_rip="), parameter_str), |ip| { - RealRemoteIp(ip) - }), - map(preceded(tag_no_case(b"real_lip="), parameter_str), |ip| { - RealLocalIp(ip) - }), - map(preceded(tag_no_case(b"real_lport="), u16), |port| { - RealLocalPort(port) - }), - map(preceded(tag_no_case(b"real_rport="), u16), |port| { - RealRemotePort(port) - }), - )), - alt(( - map( - preceded(tag_no_case(b"local_name="), parameter_str), - |name| LocalName(name), - ), - map( - preceded(tag_no_case(b"forward_views="), parameter), - |views| ForwardViews(views.into()), - ), - map(preceded(tag_no_case(b"secured="), parameter_str), |info| { - Secured(Some(info)) - }), - value(Secured(None), tag_no_case(b"secured")), - value(CertUsername, tag_no_case(b"cert_username")), - map(preceded(tag_no_case(b"transport="), parameter_str), |ts| { - Transport(ts) - }), - map( - preceded(tag_no_case(b"tls_cipher="), parameter_str), - |cipher| TlsCipher(cipher), - ), - map( - preceded(tag_no_case(b"tls_cipher_bits="), parameter_str), - |bits| TlsCipherBits(bits), - ), - map(preceded(tag_no_case(b"tls_pfs="), parameter_str), |pfs| { - TlsPfs(pfs) - }), - map( - preceded(tag_no_case(b"tls_protocol="), parameter_str), - |proto| TlsProtocol(proto), - ), - map( - preceded(tag_no_case(b"valid-client-cert="), parameter_str), - |cert| ValidClientCert(cert), - ), - )), - alt(( - map(preceded(tag_no_case(b"resp="), base64), |data| Resp(data)), - map( - tuple((parameter_name, tag(b"="), parameter)), - |(n, _, v)| UnknownPair(n, v.into()), - ), - map(parameter, |v| UnknownBool(v.into())), - )), - ))(input) -} - -fn auth_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { - let mut parser = tuple(( - tag_no_case(b"AUTH"), - tab, - u64, - tab, - mechanism, - tab, - service, - map(opt(preceded(tab, separated_list0(tab, auth_option))), |o| { - o.unwrap_or(vec![]) - }), - )); - let (input, (_, _, id, _, mech, _, service, options)) = parser(input)?; - Ok(( - input, - ClientCommand::Auth { - id, - mech, - service, - options, - }, - )) -} - -fn is_base64_core(c: u8) -> bool { - c >= 0x30 && c <= 0x39 // 0-9 - || c >= 0x41 && c <= 0x5a // A-Z - || c >= 0x61 && c <= 0x7a // a-z - || c == 0x2b // + - || c == 0x2f // / -} - -fn is_base64_pad(c: u8) -> bool { - c == 0x3d // = -} - -fn base64(input: &[u8]) -> IResult<&[u8], Vec> { - let (input, (b64, _)) = tuple((take_while1(is_base64_core), take_while(is_base64_pad)))(input)?; - - let data = base64::engine::general_purpose::STANDARD_NO_PAD - .decode(b64) - .map_err(|_| nom::Err::Failure(Error::new(input, ErrorKind::TakeWhile1)))?; - - Ok((input, data)) -} - -/// @FIXME Dovecot does not say if base64 content must be padded or not -fn cont_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { - let mut parser = tuple((tag_no_case(b"CONT"), tab, u64, tab, base64)); - - let (input, (_, _, id, _, data)) = parser(input)?; - Ok((input, ClientCommand::Cont { id, data })) -} - -fn client_command<'a>(input: &'a [u8]) -> IResult<&'a [u8], ClientCommand> { - alt((version_command, cpid_command, auth_command, cont_command))(input) -} - -/* -fn server_command(buf: &u8) -> IResult<&u8, ServerCommand> { - unimplemented!(); -} -*/ - -// ----------------------------------------------------------------- -// -// SASL DECODING -// -// ----------------------------------------------------------------- - -fn not_null(c: u8) -> bool { - c != 0x0 -} - -// impersonated user, login, password -fn auth_plain<'a>(input: &'a [u8]) -> IResult<&'a [u8], (&'a [u8], &'a [u8], &'a [u8])> { - map( - tuple(( - take_while(not_null), - take(1usize), - take_while(not_null), - take(1usize), - rest, - )), - |(imp, _, user, _, pass)| (imp, user, pass), - )(input) -} - -// ----------------------------------------------------------------- -// -// DOVECOT AUTH ENCODING -// -// ------------------------------------------------------------------ -use tokio_util::bytes::{BufMut, BytesMut}; -trait Encode { - fn encode(&self, out: &mut BytesMut) -> Result<()>; -} - -fn tab_enc(out: &mut BytesMut) { - out.put(&[0x09][..]) -} - -fn lf_enc(out: &mut BytesMut) { - out.put(&[0x0A][..]) -} - -impl Encode for Mechanism { - fn encode(&self, out: &mut BytesMut) -> Result<()> { - match self { - Self::Plain => out.put(&b"PLAIN"[..]), - Self::Login => out.put(&b"LOGIN"[..]), - } - Ok(()) - } -} - -impl Encode for MechanismParameters { - fn encode(&self, out: &mut BytesMut) -> Result<()> { - match self { - Self::Anonymous => out.put(&b"anonymous"[..]), - Self::PlainText => out.put(&b"plaintext"[..]), - Self::Dictionary => out.put(&b"dictionary"[..]), - Self::Active => out.put(&b"active"[..]), - Self::ForwardSecrecy => out.put(&b"forward-secrecy"[..]), - Self::MutualAuth => out.put(&b"mutual-auth"[..]), - Self::Private => out.put(&b"private"[..]), - } - Ok(()) - } -} - -impl Encode for FailCode { - fn encode(&self, out: &mut BytesMut) -> Result<()> { - match self { - Self::TempFail => out.put(&b"temp_fail"[..]), - Self::AuthzFail => out.put(&b"authz_fail"[..]), - Self::UserDisabled => out.put(&b"user_disabled"[..]), - Self::PassExpired => out.put(&b"pass_expired"[..]), - }; - Ok(()) - } -} - -impl Encode for ServerCommand { - fn encode(&self, out: &mut BytesMut) -> Result<()> { - match self { - Self::Version(Version { major, minor }) => { - out.put(&b"VERSION"[..]); - tab_enc(out); - out.put(major.to_string().as_bytes()); - tab_enc(out); - out.put(minor.to_string().as_bytes()); - lf_enc(out); - } - Self::Spid(pid) => { - out.put(&b"SPID"[..]); - tab_enc(out); - out.put(pid.to_string().as_bytes()); - lf_enc(out); - } - Self::Cuid(pid) => { - out.put(&b"CUID"[..]); - tab_enc(out); - out.put(pid.to_string().as_bytes()); - lf_enc(out); - } - Self::Cookie(cval) => { - out.put(&b"COOKIE"[..]); - tab_enc(out); - out.put(hex::encode(cval).as_bytes()); - lf_enc(out); - } - Self::Mech { kind, parameters } => { - out.put(&b"MECH"[..]); - tab_enc(out); - kind.encode(out)?; - for p in parameters.iter() { - tab_enc(out); - p.encode(out)?; - } - lf_enc(out); - } - Self::Done => { - out.put(&b"DONE"[..]); - lf_enc(out); - } - Self::Cont { id, data } => { - out.put(&b"CONT"[..]); - tab_enc(out); - out.put(id.to_string().as_bytes()); - tab_enc(out); - if let Some(rdata) = data { - let b64 = base64::engine::general_purpose::STANDARD.encode(rdata); - out.put(b64.as_bytes()); - } - lf_enc(out); - } - Self::Ok { - id, - user_id, - extra_parameters, - } => { - out.put(&b"OK"[..]); - tab_enc(out); - out.put(id.to_string().as_bytes()); - if let Some(user) = user_id { - tab_enc(out); - out.put(&b"user="[..]); - out.put(user.as_bytes()); - } - for p in extra_parameters.iter() { - tab_enc(out); - out.put(&p[..]); - } - lf_enc(out); - } - Self::Fail { - id, - user_id, - code, - extra_parameters, - } => { - out.put(&b"FAIL"[..]); - tab_enc(out); - out.put(id.to_string().as_bytes()); - if let Some(user) = user_id { - tab_enc(out); - out.put(&b"user="[..]); - out.put(user.as_bytes()); - } - if let Some(code_val) = code { - tab_enc(out); - out.put(&b"code="[..]); - code_val.encode(out)?; - } - for p in extra_parameters.iter() { - tab_enc(out); - out.put(&p[..]); - } - lf_enc(out); - } - } - Ok(()) - } -} diff --git a/src/bayou.rs b/src/bayou.rs deleted file mode 100644 index 9faff5a..0000000 --- a/src/bayou.rs +++ /dev/null @@ -1,514 +0,0 @@ -use std::sync::{Arc, Weak}; -use std::time::{Duration, Instant}; - -use anyhow::{anyhow, bail, Result}; -use log::error; -use rand::prelude::*; -use serde::{Deserialize, Serialize}; -use tokio::sync::{watch, Notify}; - -use crate::cryptoblob::*; -use crate::login::Credentials; -use crate::storage; -use crate::timestamp::*; - -const KEEP_STATE_EVERY: usize = 64; - -// Checkpointing interval constants: a checkpoint is not made earlier -// than CHECKPOINT_INTERVAL time after the last one, and is not made -// if there are less than CHECKPOINT_MIN_OPS new operations since last one. -const CHECKPOINT_INTERVAL: Duration = Duration::from_secs(6 * 3600); -const CHECKPOINT_MIN_OPS: usize = 16; -// HYPOTHESIS: processes are able to communicate in a synchronous -// fashion in times that are small compared to CHECKPOINT_INTERVAL. -// More precisely, if a process tried to save an operation within the last -// CHECKPOINT_INTERVAL, we are sure to read it from storage if it was -// successfully saved (and if we don't read it, it means it has been -// definitely discarded due to an error). - -// Keep at least two checkpoints, here three, to avoid race conditions -// between processes doing .checkpoint() and those doing .sync() -const CHECKPOINTS_TO_KEEP: usize = 3; - -const WATCH_SK: &str = "watch"; - -pub trait BayouState: - Default + Clone + Serialize + for<'de> Deserialize<'de> + Send + Sync + 'static -{ - type Op: Clone + Serialize + for<'de> Deserialize<'de> + std::fmt::Debug + Send + Sync + 'static; - - fn apply(&self, op: &Self::Op) -> Self; -} - -pub struct Bayou { - path: String, - key: Key, - - storage: storage::Store, - - checkpoint: (Timestamp, S), - history: Vec<(Timestamp, S::Op, Option)>, - - last_sync: Option, - last_try_checkpoint: Option, - - watch: Arc, - last_sync_watch_ct: storage::RowRef, -} - -impl Bayou { - pub async fn new(creds: &Credentials, path: String) -> Result { - let storage = creds.storage.build().await?; - - //let target = k2v_client.row(&path, WATCH_SK); - let target = storage::RowRef::new(&path, WATCH_SK); - let watch = K2vWatch::new(creds, target.clone()).await?; - - Ok(Self { - path, - storage, - key: creds.keys.master.clone(), - checkpoint: (Timestamp::zero(), S::default()), - history: vec![], - last_sync: None, - last_try_checkpoint: None, - watch, - last_sync_watch_ct: target, - }) - } - - /// Re-reads the state from persistent storage backend - pub async fn sync(&mut self) -> Result<()> { - let new_last_sync = Some(Instant::now()); - let new_last_sync_watch_ct = self.watch.rx.borrow().clone(); - - // 1. List checkpoints - let checkpoints = self.list_checkpoints().await?; - tracing::debug!("(sync) listed checkpoints: {:?}", checkpoints); - - // 2. Load last checkpoint if different from currently used one - let checkpoint = if let Some((ts, key)) = checkpoints.last() { - if *ts == self.checkpoint.0 { - (*ts, None) - } else { - tracing::debug!("(sync) loading checkpoint: {}", key); - - let buf = self - .storage - .blob_fetch(&storage::BlobRef(key.to_string())) - .await? - .value; - tracing::debug!("(sync) checkpoint body length: {}", buf.len()); - - let ck = open_deserialize::(&buf, &self.key)?; - (*ts, Some(ck)) - } - } else { - (Timestamp::zero(), None) - }; - - if self.checkpoint.0 > checkpoint.0 { - bail!("Loaded checkpoint is more recent than stored one"); - } - - if let Some(ck) = checkpoint.1 { - tracing::debug!( - "(sync) updating checkpoint to loaded state at {:?}", - checkpoint.0 - ); - self.checkpoint = (checkpoint.0, ck); - }; - - // remove from history events before checkpoint - self.history = std::mem::take(&mut self.history) - .into_iter() - .skip_while(|(ts, _, _)| *ts < self.checkpoint.0) - .collect(); - - // 3. List all operations starting from checkpoint - let ts_ser = self.checkpoint.0.to_string(); - tracing::debug!("(sync) looking up operations starting at {}", ts_ser); - let ops_map = self - .storage - .row_fetch(&storage::Selector::Range { - shard: &self.path, - sort_begin: &ts_ser, - sort_end: WATCH_SK, - }) - .await?; - - let mut ops = vec![]; - for row_value in ops_map { - let row = row_value.row_ref; - let sort_key = row.uid.sort; - let ts = sort_key - .parse::() - .map_err(|_| anyhow!("Invalid operation timestamp: {}", sort_key))?; - - let val = row_value.value; - if val.len() != 1 { - bail!("Invalid operation, has {} values", val.len()); - } - match &val[0] { - storage::Alternative::Value(v) => { - let op = open_deserialize::(v, &self.key)?; - tracing::trace!("(sync) operation {}: {:?}", sort_key, op); - ops.push((ts, op)); - } - storage::Alternative::Tombstone => { - continue; - } - } - } - ops.sort_by_key(|(ts, _)| *ts); - tracing::debug!("(sync) {} operations", ops.len()); - - if ops.len() < self.history.len() { - bail!("Some operations have disappeared from storage!"); - } - - // 4. Check that first operation has same timestamp as checkpoint (if not zero) - if self.checkpoint.0 != Timestamp::zero() && ops[0].0 != self.checkpoint.0 { - bail!( - "First operation in listing doesn't have timestamp that corresponds to checkpoint" - ); - } - - // 5. Apply all operations in order - // Hypothesis: before the loaded checkpoint, operations haven't changed - // between what's on storage and what we used to calculate the state in RAM here. - let i0 = self - .history - .iter() - .zip(ops.iter()) - .take_while(|((ts1, _, _), (ts2, _))| ts1 == ts2) - .count(); - - if ops.len() > i0 { - // Remove operations from first position where histories differ - self.history.truncate(i0); - - // Look up last calculated state which we have saved and start from there. - let mut last_state = (0, &self.checkpoint.1); - for (i, (_, _, state_opt)) in self.history.iter().enumerate().rev() { - if let Some(state) = state_opt { - last_state = (i + 1, state); - break; - } - } - - // Calculate state at the end of this common part of the history - let mut state = last_state.1.clone(); - for (_, op, _) in self.history[last_state.0..].iter() { - state = state.apply(op); - } - - // Now, apply all operations retrieved from storage after the common part - for (ts, op) in ops.drain(i0..) { - state = state.apply(&op); - if (self.history.len() + 1) % KEEP_STATE_EVERY == 0 { - self.history.push((ts, op, Some(state.clone()))); - } else { - self.history.push((ts, op, None)); - } - } - - // Always save final state as result of last operation - self.history.last_mut().unwrap().2 = Some(state); - } - - // Save info that sync has been done - self.last_sync = new_last_sync; - self.last_sync_watch_ct = new_last_sync_watch_ct; - Ok(()) - } - - /// Does a sync() if either of the two conditions is met: - /// - last sync was more than CHECKPOINT_INTERVAL/5 ago - /// - a change was detected - pub async fn opportunistic_sync(&mut self) -> Result<()> { - let too_old = match self.last_sync { - Some(t) => Instant::now() > t + (CHECKPOINT_INTERVAL / 5), - _ => true, - }; - let changed = self.last_sync_watch_ct != *self.watch.rx.borrow(); - if too_old || changed { - self.sync().await?; - } - Ok(()) - } - - pub fn notifier(&self) -> std::sync::Weak { - Arc::downgrade(&self.watch.learnt_remote_update) - } - - /// Applies a new operation on the state. Once this function returns, - /// the operation has been safely persisted to storage backend. - /// Make sure to call `.opportunistic_sync()` before doing this, - /// and even before calculating the `op` argument given here. - pub async fn push(&mut self, op: S::Op) -> Result<()> { - tracing::debug!("(push) add operation: {:?}", op); - - let ts = Timestamp::after( - self.history - .last() - .map(|(ts, _, _)| ts) - .unwrap_or(&self.checkpoint.0), - ); - - let row_val = storage::RowVal::new( - storage::RowRef::new(&self.path, &ts.to_string()), - seal_serialize(&op, &self.key)?, - ); - self.storage.row_insert(vec![row_val]).await?; - self.watch.propagate_local_update.notify_one(); - - let new_state = self.state().apply(&op); - self.history.push((ts, op, Some(new_state))); - - // Clear previously saved state in history if not required - let hlen = self.history.len(); - if hlen >= 2 && (hlen - 1) % KEEP_STATE_EVERY != 0 { - self.history[hlen - 2].2 = None; - } - - self.checkpoint().await?; - - Ok(()) - } - - /// Save a new checkpoint if previous checkpoint is too old - pub async fn checkpoint(&mut self) -> Result<()> { - match self.last_try_checkpoint { - Some(ts) if Instant::now() - ts < CHECKPOINT_INTERVAL / 5 => Ok(()), - _ => { - let res = self.checkpoint_internal().await; - if res.is_ok() { - self.last_try_checkpoint = Some(Instant::now()); - } - res - } - } - } - - async fn checkpoint_internal(&mut self) -> Result<()> { - self.sync().await?; - - // Check what would be the possible time for a checkpoint in the history we have - let now = now_msec() as i128; - let i_cp = match self - .history - .iter() - .enumerate() - .rev() - .skip_while(|(_, (ts, _, _))| { - (now - ts.msec as i128) < CHECKPOINT_INTERVAL.as_millis() as i128 - }) - .map(|(i, _)| i) - .next() - { - Some(i) => i, - None => { - tracing::debug!("(cp) Oldest operation is too recent to trigger checkpoint"); - return Ok(()); - } - }; - - if i_cp < CHECKPOINT_MIN_OPS { - tracing::debug!("(cp) Not enough old operations to trigger checkpoint"); - return Ok(()); - } - - let ts_cp = self.history[i_cp].0; - tracing::debug!( - "(cp) we could checkpoint at time {} (index {} in history)", - ts_cp.to_string(), - i_cp - ); - - // Check existing checkpoints: if last one is too recent, don't checkpoint again. - let existing_checkpoints = self.list_checkpoints().await?; - tracing::debug!("(cp) listed checkpoints: {:?}", existing_checkpoints); - - if let Some(last_cp) = existing_checkpoints.last() { - if (ts_cp.msec as i128 - last_cp.0.msec as i128) - < CHECKPOINT_INTERVAL.as_millis() as i128 - { - tracing::debug!( - "(cp) last checkpoint is too recent: {}, not checkpointing", - last_cp.0.to_string() - ); - return Ok(()); - } - } - - tracing::debug!("(cp) saving checkpoint at {}", ts_cp.to_string()); - - // Calculate state at time of checkpoint - let mut last_known_state = (0, &self.checkpoint.1); - for (i, (_, _, st)) in self.history[..i_cp].iter().enumerate() { - if let Some(s) = st { - last_known_state = (i + 1, s); - } - } - let mut state_cp = last_known_state.1.clone(); - for (_, op, _) in self.history[last_known_state.0..i_cp].iter() { - state_cp = state_cp.apply(op); - } - - // Serialize and save checkpoint - let cryptoblob = seal_serialize(&state_cp, &self.key)?; - tracing::debug!("(cp) checkpoint body length: {}", cryptoblob.len()); - - let blob_val = storage::BlobVal::new( - storage::BlobRef(format!("{}/checkpoint/{}", self.path, ts_cp.to_string())), - cryptoblob.into(), - ); - self.storage.blob_insert(blob_val).await?; - - // Drop old checkpoints (but keep at least CHECKPOINTS_TO_KEEP of them) - let ecp_len = existing_checkpoints.len(); - if ecp_len + 1 > CHECKPOINTS_TO_KEEP { - let last_to_keep = ecp_len + 1 - CHECKPOINTS_TO_KEEP; - - // Delete blobs - for (_ts, key) in existing_checkpoints[..last_to_keep].iter() { - tracing::debug!("(cp) drop old checkpoint {}", key); - self.storage - .blob_rm(&storage::BlobRef(key.to_string())) - .await?; - } - - // Delete corresponding range of operations - let ts_ser = existing_checkpoints[last_to_keep].0.to_string(); - self.storage - .row_rm(&storage::Selector::Range { - shard: &self.path, - sort_begin: "", - sort_end: &ts_ser, - }) - .await? - } - - Ok(()) - } - - pub fn state(&self) -> &S { - if let Some(last) = self.history.last() { - last.2.as_ref().unwrap() - } else { - &self.checkpoint.1 - } - } - - // ---- INTERNAL ---- - - async fn list_checkpoints(&self) -> Result> { - let prefix = format!("{}/checkpoint/", self.path); - - let checkpoints_res = self.storage.blob_list(&prefix).await?; - - let mut checkpoints = vec![]; - for object in checkpoints_res { - let key = object.0; - if let Some(ckid) = key.strip_prefix(&prefix) { - if let Ok(ts) = ckid.parse::() { - checkpoints.push((ts, key.into())); - } - } - } - checkpoints.sort_by_key(|(ts, _)| *ts); - Ok(checkpoints) - } -} - -// ---- Bayou watch in K2V ---- - -struct K2vWatch { - target: storage::RowRef, - rx: watch::Receiver, - propagate_local_update: Notify, - learnt_remote_update: Arc, -} - -impl K2vWatch { - /// Creates a new watch and launches subordinate threads. - /// These threads hold Weak pointers to the struct; - /// they exit when the Arc is dropped. - async fn new(creds: &Credentials, target: storage::RowRef) -> Result> { - let storage = creds.storage.build().await?; - - let (tx, rx) = watch::channel::(target.clone()); - let propagate_local_update = Notify::new(); - let learnt_remote_update = Arc::new(Notify::new()); - - let watch = Arc::new(K2vWatch { - target, - rx, - propagate_local_update, - learnt_remote_update, - }); - - tokio::spawn(Self::background_task(Arc::downgrade(&watch), storage, tx)); - - Ok(watch) - } - - async fn background_task( - self_weak: Weak, - storage: storage::Store, - tx: watch::Sender, - ) { - let (mut row, remote_update) = match Weak::upgrade(&self_weak) { - Some(this) => (this.target.clone(), this.learnt_remote_update.clone()), - None => return, - }; - - while let Some(this) = Weak::upgrade(&self_weak) { - tracing::debug!( - "bayou k2v watch bg loop iter ({}, {})", - this.target.uid.shard, - this.target.uid.sort - ); - tokio::select!( - // Needed to exit: will force a loop iteration every minutes, - // that will stop the loop if other Arc references have been dropped - // and free resources. Otherwise we would be blocked waiting forever... - _ = tokio::time::sleep(Duration::from_secs(60)) => continue, - - // Watch if another instance has modified the log - update = storage.row_poll(&row) => { - match update { - Err(e) => { - error!("Error in bayou k2v wait value changed: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - Ok(new_value) => { - row = new_value.row_ref; - if let Err(e) = tx.send(row.clone()) { - tracing::warn!(err=?e, "(watch) can't record the new log ref"); - break; - } - tracing::debug!(row=?row, "(watch) learnt remote update"); - this.learnt_remote_update.notify_waiters(); - } - } - } - - // It appears we have modified the log, informing other people - _ = this.propagate_local_update.notified() => { - let rand = u128::to_be_bytes(thread_rng().gen()).to_vec(); - let row_val = storage::RowVal::new(row.clone(), rand); - if let Err(e) = storage.row_insert(vec![row_val]).await - { - tracing::error!("Error in bayou k2v watch updater loop: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - } - ); - } - // unblock listeners - remote_update.notify_waiters(); - tracing::info!("bayou k2v watch bg loop exiting"); - } -} diff --git a/src/config.rs b/src/config.rs deleted file mode 100644 index 7de2eac..0000000 --- a/src/config.rs +++ /dev/null @@ -1,191 +0,0 @@ -use std::collections::HashMap; -use std::io::{Read, Write}; -use std::net::SocketAddr; -use std::path::PathBuf; - -use anyhow::Result; -use serde::{Deserialize, Serialize}; - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct CompanionConfig { - pub pid: Option, - pub imap: ImapUnsecureConfig, - // @FIXME Add DAV - - #[serde(flatten)] - pub users: LoginStaticConfig, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct ProviderConfig { - pub pid: Option, - pub imap: Option, - pub imap_unsecure: Option, - pub lmtp: Option, - pub auth: Option, - pub dav_unsecure: Option, - pub users: UserManagement, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(tag = "user_driver")] -pub enum UserManagement { - Demo, - Static(LoginStaticConfig), - Ldap(LoginLdapConfig), -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct AuthConfig { - pub bind_addr: SocketAddr, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct LmtpConfig { - pub bind_addr: SocketAddr, - pub hostname: String, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct ImapConfig { - pub bind_addr: SocketAddr, - pub certs: PathBuf, - pub key: PathBuf, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct DavUnsecureConfig { - pub bind_addr: SocketAddr, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct ImapUnsecureConfig { - pub bind_addr: SocketAddr, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct LoginStaticConfig { - pub user_list: PathBuf, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(tag = "storage_driver")] -pub enum LdapStorage { - Garage(LdapGarageConfig), - InMemory, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct LdapGarageConfig { - pub s3_endpoint: String, - pub k2v_endpoint: String, - pub aws_region: String, - - pub aws_access_key_id_attr: String, - pub aws_secret_access_key_attr: String, - pub bucket_attr: Option, - pub default_bucket: Option, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct LoginLdapConfig { - // LDAP connection info - pub ldap_server: String, - #[serde(default)] - pub pre_bind_on_login: bool, - pub bind_dn: Option, - pub bind_password: Option, - pub search_base: String, - - // Schema-like info required for Aerogramme's logic - pub username_attr: String, - #[serde(default = "default_mail_attr")] - pub mail_attr: String, - - // The field that will contain the crypto root thingy - pub crypto_root_attr: String, - - // Storage related thing - #[serde(flatten)] - pub storage: LdapStorage, -} - -// ---- - -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(tag = "storage_driver")] -pub enum StaticStorage { - Garage(StaticGarageConfig), - InMemory, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct StaticGarageConfig { - pub s3_endpoint: String, - pub k2v_endpoint: String, - pub aws_region: String, - - pub aws_access_key_id: String, - pub aws_secret_access_key: String, - pub bucket: String, -} - -pub type UserList = HashMap; - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct UserEntry { - #[serde(default)] - pub email_addresses: Vec, - pub password: String, - pub crypto_root: String, - - #[serde(flatten)] - pub storage: StaticStorage, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct SetupEntry { - #[serde(default)] - pub email_addresses: Vec, - - #[serde(default)] - pub clear_password: Option, - - #[serde(flatten)] - pub storage: StaticStorage, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(tag = "role")] -pub enum AnyConfig { - Companion(CompanionConfig), - Provider(ProviderConfig), -} - -// --- -pub fn read_config(config_file: PathBuf) -> Result { - let mut file = std::fs::OpenOptions::new() - .read(true) - .open(config_file.as_path())?; - - let mut config = String::new(); - file.read_to_string(&mut config)?; - - Ok(toml::from_str(&config)?) -} - -pub fn write_config(config_file: PathBuf, config: &T) -> Result<()> { - let mut file = std::fs::OpenOptions::new() - .write(true) - .create(true) - .truncate(true) - .open(config_file.as_path())?; - - file.write_all(toml::to_string(config)?.as_bytes())?; - - Ok(()) -} - -fn default_mail_attr() -> String { - "mail".into() -} diff --git a/src/cryptoblob.rs b/src/cryptoblob.rs deleted file mode 100644 index 327a642..0000000 --- a/src/cryptoblob.rs +++ /dev/null @@ -1,67 +0,0 @@ -//! Helper functions for secret-key encrypted blobs -//! that contain Zstd encrypted data - -use anyhow::{anyhow, Result}; -use serde::{Deserialize, Serialize}; -use zstd::stream::{decode_all as zstd_decode, encode_all as zstd_encode}; - -//use sodiumoxide::crypto::box_ as publicbox; -use sodiumoxide::crypto::secretbox::xsalsa20poly1305 as secretbox; - -pub use sodiumoxide::crypto::box_::{ - gen_keypair, PublicKey, SecretKey, PUBLICKEYBYTES, SECRETKEYBYTES, -}; -pub use sodiumoxide::crypto::secretbox::xsalsa20poly1305::{gen_key, Key, KEYBYTES}; - -pub fn open(cryptoblob: &[u8], key: &Key) -> Result> { - use secretbox::{Nonce, NONCEBYTES}; - - if cryptoblob.len() < NONCEBYTES { - return Err(anyhow!("Cyphertext too short")); - } - - // Decrypt -> get Zstd data - let nonce = Nonce::from_slice(&cryptoblob[..NONCEBYTES]).unwrap(); - let zstdblob = secretbox::open(&cryptoblob[NONCEBYTES..], &nonce, key) - .map_err(|_| anyhow!("Could not decrypt blob"))?; - - // Decompress zstd data - let mut reader = &zstdblob[..]; - let data = zstd_decode(&mut reader)?; - - Ok(data) -} - -pub fn seal(plainblob: &[u8], key: &Key) -> Result> { - use secretbox::{gen_nonce, NONCEBYTES}; - - // Compress data using zstd - let mut reader = plainblob; - let zstdblob = zstd_encode(&mut reader, 0)?; - - // Encrypt - let nonce = gen_nonce(); - let cryptoblob = secretbox::seal(&zstdblob, &nonce, key); - - let mut res = Vec::with_capacity(NONCEBYTES + cryptoblob.len()); - res.extend(nonce.as_ref()); - res.extend(cryptoblob); - - Ok(res) -} - -pub fn open_deserialize Deserialize<'de>>(cryptoblob: &[u8], key: &Key) -> Result { - let blob = open(cryptoblob, key)?; - - Ok(rmp_serde::decode::from_read_ref::<_, T>(&blob)?) -} - -pub fn seal_serialize(obj: T, key: &Key) -> Result> { - let mut wr = Vec::with_capacity(128); - let mut se = rmp_serde::Serializer::new(&mut wr) - .with_struct_map() - .with_string_variants(); - obj.serialize(&mut se)?; - - seal(&wr, key) -} diff --git a/src/dav/acltypes.rs b/src/dav/acltypes.rs deleted file mode 100644 index f356813..0000000 --- a/src/dav/acltypes.rs +++ /dev/null @@ -1,4 +0,0 @@ -//@FIXME required for a full DAV implementation -// See section 6. of the CalDAV RFC -// It seems mainly required for free-busy that I will not implement now. -// It can also be used for discovering main calendar, not sure it is used. diff --git a/src/dav/caldecoder.rs b/src/dav/caldecoder.rs deleted file mode 100644 index 5f40c4b..0000000 --- a/src/dav/caldecoder.rs +++ /dev/null @@ -1,33 +0,0 @@ -use super::types as dav; -use super::caltypes::*; -use super::xml; -use super::error; - -// ---- ROOT ELEMENTS --- - -// ---- EXTENSIONS --- -impl xml::QRead for Violation { - async fn qread(xml: &mut xml::Reader) -> Result { - unreachable!(); - } -} - -impl xml::QRead for Property { - async fn qread(xml: &mut xml::Reader) -> Result { - unreachable!(); - } -} - -impl xml::QRead for PropertyRequest { - async fn qread(xml: &mut xml::Reader) -> Result { - unreachable!(); - } -} - -impl xml::QRead for ResourceType { - async fn qread(xml: &mut xml::Reader) -> Result { - unreachable!(); - } -} - -// ---- INNER XML ---- diff --git a/src/dav/calencoder.rs b/src/dav/calencoder.rs deleted file mode 100644 index 58b88c7..0000000 --- a/src/dav/calencoder.rs +++ /dev/null @@ -1,886 +0,0 @@ -use quick_xml::Error as QError; -use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; -use quick_xml::name::PrefixDeclaration; -use tokio::io::AsyncWrite; - -use super::caltypes::*; -use super::xml::{Node, QWrite, IWrite, Writer}; -use super::types::Extension; - -const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; - -// ==================== Calendar Types Serialization ========================= - -// -------------------- MKCALENDAR METHOD ------------------------------------ -impl QWrite for MkCalendar { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_cal_element("mkcalendar"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl> QWrite for MkCalendarResponse { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_cal_element("mkcalendar-response"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for propstat in self.0.iter() { - propstat.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -// ----------------------- REPORT METHOD ------------------------------------- - -impl QWrite for CalendarQuery { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_cal_element("calendar-query"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - if let Some(selector) = &self.selector { - selector.qwrite(xml).await?; - } - self.filter.qwrite(xml).await?; - if let Some(tz) = &self.timezone { - tz.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for CalendarMultiget { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_cal_element("calendar-multiget"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - if let Some(selector) = &self.selector { - selector.qwrite(xml).await?; - } - for href in self.href.iter() { - href.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for FreeBusyQuery { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_cal_element("free-busy-query"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -// -------------------------- DAV::prop -------------------------------------- -impl QWrite for PropertyRequest { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut atom = async |c| { - let empty_tag = xml.create_cal_element(c); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }; - - match self { - Self::CalendarDescription => atom("calendar-description").await, - Self::CalendarTimezone => atom("calendar-timezone").await, - Self::SupportedCalendarComponentSet => atom("supported-calendar-component-set").await, - Self::SupportedCalendarData => atom("supported-calendar-data").await, - Self::MaxResourceSize => atom("max-resource-size").await, - Self::MinDateTime => atom("min-date-time").await, - Self::MaxDateTime => atom("max-date-time").await, - Self::MaxInstances => atom("max-instances").await, - Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, - Self::SupportedCollationSet => atom("supported-collation-set").await, - Self::CalendarData(req) => req.qwrite(xml).await, - } - } -} -impl QWrite for Property { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::CalendarDescription { lang, text } => { - let mut start = xml.create_cal_element("calendar-description"); - if let Some(the_lang) = lang { - start.push_attribute(("xml:lang", the_lang.as_str())); - } - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(text))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::CalendarTimezone(payload) => { - let start = xml.create_cal_element("calendar-timezone"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(payload))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::SupportedCalendarComponentSet(many_comp) => { - let start = xml.create_cal_element("supported-calendar-component-set"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for comp in many_comp.iter() { - comp.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - }, - Self::SupportedCalendarData(many_mime) => { - let start = xml.create_cal_element("supported-calendar-data"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for mime in many_mime.iter() { - mime.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - }, - Self::MaxResourceSize(bytes) => { - let start = xml.create_cal_element("max-resource-size"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::MinDateTime(dt) => { - let start = xml.create_cal_element("min-date-time"); - let end = start.to_end(); - - let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::MaxDateTime(dt) => { - let start = xml.create_cal_element("max-date-time"); - let end = start.to_end(); - - let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::MaxInstances(count) => { - let start = xml.create_cal_element("max-instances"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::MaxAttendeesPerInstance(count) => { - let start = xml.create_cal_element("max-attendees-per-instance"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::SupportedCollationSet(many_collations) => { - let start = xml.create_cal_element("supported-collation-set"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for collation in many_collations.iter() { - collation.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - }, - Self::CalendarData(inner) => inner.qwrite(xml).await, - } - } -} - -// ---------------------- DAV::resourcetype ---------------------------------- -impl QWrite for ResourceType { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Calendar => { - let empty_tag = xml.create_dav_element("calendar"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - } - } -} - -// --------------------------- DAV::error ------------------------------------ -impl QWrite for Violation { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut atom = async |c| { - let empty_tag = xml.create_cal_element(c); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }; - - match self { - //@FIXME - // DAV elements, should not be here but in RFC3744 on ACLs - // (we do not use atom as this error is in the DAV namespace, not the caldav one) - Self::NeedPrivileges => { - let empty_tag = xml.create_dav_element("need-privileges"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - - // Regular CalDAV errors - Self::ResourceMustBeNull => atom("resource-must-be-null").await, - Self::CalendarCollectionLocationOk => atom("calendar-collection-location-ok").await, - Self::ValidCalendarData => atom("valid-calendar-data").await, - Self::InitializeCalendarCollection => atom("initialize-calendar-collection").await, - Self::SupportedCalendarData => atom("supported-calendar-data").await, - Self::ValidCalendarObjectResource => atom("valid-calendar-object-resource").await, - Self::SupportedCalendarComponent => atom("supported-calendar-component").await, - Self::NoUidConflict(href) => { - let start = xml.create_cal_element("no-uid-conflict"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - href.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - }, - Self::MaxResourceSize => atom("max-resource-size").await, - Self::MinDateTime => atom("min-date-time").await, - Self::MaxDateTime => atom("max-date-time").await, - Self::MaxInstances => atom("max-instances").await, - Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, - Self::ValidFilter => atom("valid-filter").await, - Self::SupportedFilter { comp, prop, param } => { - let start = xml.create_cal_element("supported-filter"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for comp_item in comp.iter() { - comp_item.qwrite(xml).await?; - } - for prop_item in prop.iter() { - prop_item.qwrite(xml).await?; - } - for param_item in param.iter() { - param_item.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - }, - Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await, - } - } -} - - -// ---------------------------- Inner XML ------------------------------------ -impl QWrite for SupportedCollation { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_cal_element("supported-collation"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - - } -} - -impl QWrite for Collation { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let col = match self { - Self::AsciiCaseMap => "i;ascii-casemap", - Self::Octet => "i;octet", - Self::Unknown(v) => v.as_str(), - }; - - xml.q.write_event_async(Event::Text(BytesText::new(col))).await - } -} - -impl QWrite for CalendarDataPayload { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("calendar-data"); - if let Some(mime) = &self.mime { - start.push_attribute(("content-type", mime.content_type.as_str())); - start.push_attribute(("version", mime.version.as_str())); - } - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for CalendarDataRequest { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("calendar-data"); - if let Some(mime) = &self.mime { - start.push_attribute(("content-type", mime.content_type.as_str())); - start.push_attribute(("version", mime.version.as_str())); - } - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - if let Some(comp) = &self.comp { - comp.qwrite(xml).await?; - } - if let Some(recurrence) = &self.recurrence { - recurrence.qwrite(xml).await?; - } - if let Some(freebusy) = &self.limit_freebusy_set { - freebusy.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for CalendarDataEmpty { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("calendar-data"); - if let Some(mime) = &self.0 { - empty.push_attribute(("content-type", mime.content_type.as_str())); - empty.push_attribute(("version", mime.version.as_str())); - } - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -impl QWrite for Comp { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("comp"); - start.push_attribute(("name", self.name.as_str())); - match &self.additional_rules { - None => xml.q.write_event_async(Event::Empty(start)).await, - Some(rules) => { - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - rules.prop_kind.qwrite(xml).await?; - rules.comp_kind.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - }, - } - } -} - -impl QWrite for CompSupport { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("comp"); - empty.push_attribute(("name", self.0.as_str())); - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -impl QWrite for CompKind { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::AllComp => { - let empty_tag = xml.create_cal_element("allcomp"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::Comp(many_comp) => { - for comp in many_comp.iter() { - // Required: recursion in an async fn requires boxing - // rustc --explain E0733 - Box::pin(comp.qwrite(xml)).await?; - } - Ok(()) - } - } - } -} - -impl QWrite for PropKind { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::AllProp => { - let empty_tag = xml.create_cal_element("allprop"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::Prop(many_prop) => { - for prop in many_prop.iter() { - prop.qwrite(xml).await?; - } - Ok(()) - } - } - } -} - -impl QWrite for CalProp { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("prop"); - empty.push_attribute(("name", self.name.0.as_str())); - match self.novalue { - None => (), - Some(true) => empty.push_attribute(("novalue", "yes")), - Some(false) => empty.push_attribute(("novalue", "no")), - } - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -impl QWrite for RecurrenceModifier { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Expand(exp) => exp.qwrite(xml).await, - Self::LimitRecurrenceSet(lrs) => lrs.qwrite(xml).await, - } - } -} - -impl QWrite for Expand { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("expand"); - empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -impl QWrite for LimitRecurrenceSet { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("limit-recurrence-set"); - empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -impl QWrite for LimitFreebusySet { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("limit-freebusy-set"); - empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -impl QWrite for CalendarSelector { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::AllProp => { - let empty_tag = xml.create_dav_element("allprop"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::PropName => { - let empty_tag = xml.create_dav_element("propname"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::Prop(prop) => prop.qwrite(xml).await, - } - } -} - -impl QWrite for CompFilter { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("comp-filter"); - start.push_attribute(("name", self.name.as_str())); - - match &self.additional_rules { - None => xml.q.write_event_async(Event::Empty(start)).await, - Some(rules) => { - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - rules.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } - } - } -} - -impl QWrite for CompFilterRules { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::IsNotDefined => { - let empty_tag = xml.create_dav_element("is-not-defined"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::Matches(cfm) => cfm.qwrite(xml).await, - } - } -} - -impl QWrite for CompFilterMatch { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - if let Some(time_range) = &self.time_range { - time_range.qwrite(xml).await?; - } - - for prop_item in self.prop_filter.iter() { - prop_item.qwrite(xml).await?; - } - for comp_item in self.comp_filter.iter() { - // Required: recursion in an async fn requires boxing - // rustc --explain E0733 - Box::pin(comp_item.qwrite(xml)).await?; - } - Ok(()) - } -} - -impl QWrite for PropFilter { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("prop-filter"); - start.push_attribute(("name", self.name.as_str())); - - match &self.additional_rules { - None => xml.q.write_event_async(Event::Empty(start.clone())).await, - Some(rules) => { - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - rules.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } - } - } -} - -impl QWrite for PropFilterRules { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::IsNotDefined => { - let empty_tag = xml.create_dav_element("is-not-defined"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::Match(prop_match) => prop_match.qwrite(xml).await, - } - } -} - -impl QWrite for PropFilterMatch { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - if let Some(time_range) = &self.time_range { - time_range.qwrite(xml).await?; - } - if let Some(time_or_text) = &self.time_or_text { - time_or_text.qwrite(xml).await?; - } - for param_item in self.param_filter.iter() { - param_item.qwrite(xml).await?; - } - Ok(()) - } -} - -impl QWrite for TimeOrText { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Time(time) => time.qwrite(xml).await, - Self::Text(txt) => txt.qwrite(xml).await, - } - } -} - -impl QWrite for TextMatch { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("text-match"); - if let Some(collation) = &self.collation { - start.push_attribute(("collation", collation.as_str())); - } - match self.negate_condition { - None => (), - Some(true) => start.push_attribute(("negate-condition", "yes")), - Some(false) => start.push_attribute(("negate-condition", "no")), - } - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for ParamFilter { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("param-filter"); - start.push_attribute(("name", self.name.as_str())); - - match &self.additional_rules { - None => xml.q.write_event_async(Event::Empty(start)).await, - Some(rules) => { - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - rules.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } - } - } -} - -impl QWrite for ParamFilterMatch { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::IsNotDefined => { - let empty_tag = xml.create_dav_element("is-not-defined"); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }, - Self::Match(tm) => tm.qwrite(xml).await, - } - } -} - -impl QWrite for TimeZone { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("timezone"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Filter { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("filter"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for TimeRange { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut empty = xml.create_cal_element("time-range"); - match self { - Self::OnlyStart(start) => empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())), - Self::OnlyEnd(end) => empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())), - Self::FullRange(start, end) => { - empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); - } - } - xml.q.write_event_async(Event::Empty(empty)).await - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::dav::types as dav; - use crate::dav::realization::Calendar; - use tokio::io::AsyncWriteExt; - use chrono::{Utc,TimeZone,DateTime}; - - async fn serialize(elem: &impl QWrite) -> String { - let mut buffer = Vec::new(); - let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); - let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ns_to_apply = vec![ - ("xmlns:D".into(), "DAV:".into()), - ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), - ]; - let mut writer = Writer { q, ns_to_apply }; - - elem.qwrite(&mut writer).await.expect("xml serialization"); - tokio_buffer.flush().await.expect("tokio buffer flush"); - let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - - return got.into() - } - - #[tokio::test] - async fn basic_violation() { - let got = serialize( - &dav::Error::(vec![ - dav::Violation::Extension(Violation::ResourceMustBeNull), - ]) - ).await; - - let expected = r#" - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_calendar_query1_req() { - let got = serialize( - &CalendarQuery:: { - selector: Some(CalendarSelector::Prop(dav::PropName(vec![ - dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { - mime: None, - comp: Some(Comp { - name: Component::VCalendar, - additional_rules: Some(CompInner { - prop_kind: PropKind::Prop(vec![ - CalProp { - name: ComponentProperty("VERSION".into()), - novalue: None, - } - ]), - comp_kind: CompKind::Comp(vec![ - Comp { - name: Component::VEvent, - additional_rules: Some(CompInner { - prop_kind: PropKind::Prop(vec![ - CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, - CalProp { name: ComponentProperty("UID".into()), novalue: None }, - CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, - CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, - CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, - CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, - ]), - comp_kind: CompKind::Comp(vec![]), - }), - }, - Comp { - name: Component::VTimeZone, - additional_rules: None, - } - ]), - }), - }), - recurrence: None, - limit_freebusy_set: None, - })), - ]))), - filter: Filter(CompFilter { - name: Component::VCalendar, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - time_range: None, - prop_filter: vec![], - comp_filter: vec![ - CompFilter { - name: Component::VEvent, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - time_range: Some(TimeRange::FullRange( - Utc.with_ymd_and_hms(2006,1,4,0,0,0).unwrap(), - Utc.with_ymd_and_hms(2006,1,5,0,0,0).unwrap(), - )), - prop_filter: vec![], - comp_filter: vec![], - })), - }, - ], - })), - }), - timezone: None, - } - ).await; - - let expected = r#" - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_calendar_query1_res() { - let got = serialize( - &dav::Multistatus::> { - responses: vec![ - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), - vec![dav::PropStat { - prop: dav::PropValue(vec![ - dav::Property::GetEtag("\"fffff-abcd2\"".into()), - dav::Property::Extension(Property::CalendarData(CalendarDataPayload { - mime: None, - payload: "PLACEHOLDER".into() - })), - ]), - status: dav::Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), - location: None, - error: None, - responsedescription: None, - }, - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), - vec![dav::PropStat { - prop: dav::PropValue(vec![ - dav::Property::GetEtag("\"fffff-abcd3\"".into()), - dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ - mime: None, - payload: "PLACEHOLDER".into(), - })), - ]), - status: dav::Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), - location: None, - error: None, - responsedescription: None, - }, - ], - responsedescription: None, - }, - ).await; - - let expected = r#" - - http://cal.example.com/bernard/work/abcd2.ics - - - "fffff-abcd2" - PLACEHOLDER - - HTTP/1.1 200 OK - - - - http://cal.example.com/bernard/work/abcd3.ics - - - "fffff-abcd3" - PLACEHOLDER - - HTTP/1.1 200 OK - - -"#; - - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } -} diff --git a/src/dav/caltypes.rs b/src/dav/caltypes.rs deleted file mode 100644 index befecef..0000000 --- a/src/dav/caltypes.rs +++ /dev/null @@ -1,1440 +0,0 @@ -#![allow(dead_code)] - -use chrono::{DateTime,Utc}; -use super::types as dav; -use super::xml; - -//@FIXME ACL (rfc3744) is missing, required -//@FIXME Versioning (rfc3253) is missing, required -//@FIXME WebDAV sync (rfc6578) is missing, optional -// For reference, SabreDAV guide gives high-level & real-world overview: -// https://sabre.io/dav/building-a-caldav-client/ -// For reference, non-official extensions documented by SabreDAV: -// https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions - - -// ----- Root elements ----- - -// --- (MKCALENDAR PART) --- - -/// If a request body is included, it MUST be a CALDAV:mkcalendar XML -/// element. Instruction processing MUST occur in the order -/// instructions are received (i.e., from top to bottom). -/// Instructions MUST either all be executed or none executed. Thus, -/// if any error occurs during processing, all executed instructions -/// MUST be undone and a proper error result returned. Instruction -/// processing details can be found in the definition of the DAV:set -/// instruction in Section 12.13.2 of [RFC2518]. -/// -/// -#[derive(Debug, PartialEq)] -pub struct MkCalendar(pub dav::Set); - - -/// If a response body for a successful request is included, it MUST -/// be a CALDAV:mkcalendar-response XML element. -/// -/// -/// -/// ---- -/// -/// ANY is not satisfying, so looking at RFC5689 -/// https://www.rfc-editor.org/rfc/rfc5689.html#section-5.2 -/// -/// Definition: -/// -/// -#[derive(Debug, PartialEq)] -pub struct MkCalendarResponse>(pub Vec>); - -// --- (REPORT PART) --- - -/// Name: calendar-query -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Defines a report for querying calendar object resources. -/// -/// Description: See Section 7.8. -/// -/// Definition: -/// -/// -#[derive(Debug, PartialEq)] -pub struct CalendarQuery { - pub selector: Option>, - pub filter: Filter, - pub timezone: Option, -} - -/// Name: calendar-multiget -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: CalDAV report used to retrieve specific calendar object -/// resources. -/// -/// Description: See Section 7.9. -/// -/// Definition: -/// -/// -#[derive(Debug, PartialEq)] -pub struct CalendarMultiget { - pub selector: Option>, - pub href: Vec, -} - -/// Name: free-busy-query -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: CalDAV report used to generate a VFREEBUSY to determine -/// busy time over a specific time range. -/// -/// Description: See Section 7.10. -/// -/// Definition: -/// -#[derive(Debug, PartialEq)] -pub struct FreeBusyQuery(pub TimeRange); - -// ----- Hooks ----- -#[derive(Debug, PartialEq)] -pub enum ResourceType { - Calendar, -} - -/// Check the matching Property object for documentation -#[derive(Debug, PartialEq)] -pub enum PropertyRequest { - CalendarDescription, - CalendarTimezone, - SupportedCalendarComponentSet, - SupportedCalendarData, - MaxResourceSize, - MinDateTime, - MaxDateTime, - MaxInstances, - MaxAttendeesPerInstance, - SupportedCollationSet, - CalendarData(CalendarDataRequest), -} - -#[derive(Debug, PartialEq)] -pub enum Property { - /// Name: calendar-description - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Provides a human-readable description of the calendar - /// collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MAY be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). An xml:lang attribute indicating the human - /// language of the description SHOULD be set for this property by - /// clients or through server provisioning. Servers MUST return any - /// xml:lang attribute if set for the property. - /// - /// Description: If present, the property contains a description of the - /// calendar collection that is suitable for presentation to a user. - /// If not present, the client should assume no description for the - /// calendar collection. - /// - /// Definition: - /// - /// - /// PCDATA value: string - /// - /// Example: - /// - /// Calendrier de Mathilde Desruisseaux - CalendarDescription { - lang: Option, - text: String, - }, - - /// 5.2.2. CALDAV:calendar-timezone Property - /// - /// Name: calendar-timezone - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Specifies a time zone on a calendar collection. - /// - /// Conformance: This property SHOULD be defined on all calendar - /// collections. If defined, it SHOULD NOT be returned by a PROPFIND - /// DAV:allprop request (as defined in Section 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:calendar-timezone property is used to - /// specify the time zone the server should rely on to resolve "date" - /// values and "date with local time" values (i.e., floating time) to - /// "date with UTC time" values. The server will require this - /// information to determine if a calendar component scheduled with - /// "date" values or "date with local time" values overlaps a CALDAV: - /// time-range specified in a CALDAV:calendar-query REPORT. The - /// server will also require this information to compute the proper - /// FREEBUSY time period as "date with UTC time" in the VFREEBUSY - /// component returned in a response to a CALDAV:free-busy-query - /// REPORT request that takes into account calendar components - /// scheduled with "date" values or "date with local time" values. In - /// the absence of this property, the server MAY rely on the time zone - /// of their choice. - /// - /// Note: The iCalendar data embedded within the CALDAV:calendar- - /// timezone XML element MUST follow the standard XML character data - /// encoding rules, including use of <, >, & etc. entity - /// encoding or the use of a construct. In the - /// later case, the iCalendar data cannot contain the character - /// sequence "]]>", which is the end delimiter for the CDATA section. - /// - /// Definition: - /// - /// - /// PCDATA value: an iCalendar object with exactly one VTIMEZONE component. - /// - /// Example: - /// - /// BEGIN:VCALENDAR - /// PRODID:-//Example Corp.//CalDAV Client//EN - /// VERSION:2.0 - /// BEGIN:VTIMEZONE - /// TZID:US-Eastern - /// LAST-MODIFIED:19870101T000000Z - /// BEGIN:STANDARD - /// DTSTART:19671029T020000 - /// RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 - /// TZOFFSETFROM:-0400 - /// TZOFFSETTO:-0500 - /// TZNAME:Eastern Standard Time (US & Canada) - /// END:STANDARD - /// BEGIN:DAYLIGHT - /// DTSTART:19870405T020000 - /// RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 - /// TZOFFSETFROM:-0500 - /// TZOFFSETTO:-0400 - /// TZNAME:Eastern Daylight Time (US & Canada) - /// END:DAYLIGHT - /// END:VTIMEZONE - /// END:VCALENDAR - /// - //@FIXME we might want to put a buffer here or an iCal parsed object - CalendarTimezone(String), - - /// Name: supported-calendar-component-set - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Specifies the calendar component types (e.g., VEVENT, - /// VTODO, etc.) that calendar object resources can contain in the - /// calendar collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:supported-calendar-component-set property is - /// used to specify restrictions on the calendar component types that - /// calendar object resources may contain in a calendar collection. - /// Any attempt by the client to store calendar object resources with - /// component types not listed in this property, if it exists, MUST - /// result in an error, with the CALDAV:supported-calendar-component - /// precondition (Section 5.3.2.1) being violated. Since this - /// property is protected, it cannot be changed by clients using a - /// PROPPATCH request. However, clients can initialize the value of - /// this property when creating a new calendar collection with - /// MKCALENDAR. The empty-element tag MUST - /// only be specified if support for calendar object resources that - /// only contain VTIMEZONE components is provided or desired. Support - /// for VTIMEZONE components in calendar object resources that contain - /// VEVENT or VTODO components is always assumed. In the absence of - /// this property, the server MUST accept all component types, and the - /// client can assume that all component types are accepted. - /// - /// Definition: - /// - /// - /// - /// Example: - /// - /// - /// - /// - /// - SupportedCalendarComponentSet(Vec), - - /// Name: supported-calendar-data - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Specifies what media types are allowed for calendar object - /// resources in a calendar collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:supported-calendar-data property is used to - /// specify the media type supported for the calendar object resources - /// contained in a given calendar collection (e.g., iCalendar version - /// 2.0). Any attempt by the client to store calendar object - /// resources with a media type not listed in this property MUST - /// result in an error, with the CALDAV:supported-calendar-data - /// precondition (Section 5.3.2.1) being violated. In the absence of - /// this property, the server MUST only accept data with the media - /// type "text/calendar" and iCalendar version 2.0, and clients can - /// assume that the server will only accept this data. - /// - /// Definition: - /// - /// - /// - /// Example: - /// - /// - /// - /// - /// - /// ----- - /// - /// - /// - /// when nested in the CALDAV:supported-calendar-data property - /// to specify a supported media type for calendar object - /// resources; - SupportedCalendarData(Vec), - - /// Name: max-resource-size - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Provides a numeric value indicating the maximum size of a - /// resource in octets that the server is willing to accept when a - /// calendar object resource is stored in a calendar collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:max-resource-size is used to specify a - /// numeric value that represents the maximum size in octets that the - /// server is willing to accept when a calendar object resource is - /// stored in a calendar collection. Any attempt to store a calendar - /// object resource exceeding this size MUST result in an error, with - /// the CALDAV:max-resource-size precondition (Section 5.3.2.1) being - /// violated. In the absence of this property, the client can assume - /// that the server will allow storing a resource of any reasonable - /// size. - /// - /// Definition: - /// - /// - /// PCDATA value: a numeric value (positive integer) - /// - /// Example: - /// - /// - /// 102400 - /// - MaxResourceSize(u64), - - /// CALDAV:min-date-time Property - /// - /// Name: min-date-time - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Provides a DATE-TIME value indicating the earliest date and - /// time (in UTC) that the server is willing to accept for any DATE or - /// DATE-TIME value in a calendar object resource stored in a calendar - /// collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:min-date-time is used to specify an - /// iCalendar DATE-TIME value in UTC that indicates the earliest - /// inclusive date that the server is willing to accept for any - /// explicit DATE or DATE-TIME value in a calendar object resource - /// stored in a calendar collection. Any attempt to store a calendar - /// object resource using a DATE or DATE-TIME value earlier than this - /// value MUST result in an error, with the CALDAV:min-date-time - /// precondition (Section 5.3.2.1) being violated. Note that servers - /// MUST accept recurring components that specify instances beyond - /// this limit, provided none of those instances have been overridden. - /// In that case, the server MAY simply ignore those instances outside - /// of the acceptable range when processing reports on the calendar - /// object resource. In the absence of this property, the client can - /// assume any valid iCalendar date may be used at least up to the - /// CALDAV:max-date-time value, if that is defined. - /// - /// Definition: - /// - /// - /// PCDATA value: an iCalendar format DATE-TIME value in UTC - /// - /// Example: - /// - /// - /// 19000101T000000Z - /// - MinDateTime(DateTime), - - /// CALDAV:max-date-time Property - /// - /// Name: max-date-time - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Provides a DATE-TIME value indicating the latest date and - /// time (in UTC) that the server is willing to accept for any DATE or - /// DATE-TIME value in a calendar object resource stored in a calendar - /// collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:max-date-time is used to specify an - /// iCalendar DATE-TIME value in UTC that indicates the inclusive - /// latest date that the server is willing to accept for any date or - /// time value in a calendar object resource stored in a calendar - /// collection. Any attempt to store a calendar object resource using - /// a DATE or DATE-TIME value later than this value MUST result in an - /// error, with the CALDAV:max-date-time precondition - /// (Section 5.3.2.1) being violated. Note that servers MUST accept - /// recurring components that specify instances beyond this limit, - /// provided none of those instances have been overridden. In that - /// case, the server MAY simply ignore those instances outside of the - /// acceptable range when processing reports on the calendar object - /// resource. In the absence of this property, the client can assume - /// any valid iCalendar date may be used at least down to the CALDAV: - /// min-date-time value, if that is defined. - /// - /// Definition: - /// - /// - /// PCDATA value: an iCalendar format DATE-TIME value in UTC - /// - /// Example: - /// - /// - /// 20491231T235959Z - /// - MaxDateTime(DateTime), - - /// CALDAV:max-instances Property - /// - /// Name: max-instances - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Provides a numeric value indicating the maximum number of - /// recurrence instances that a calendar object resource stored in a - /// calendar collection can generate. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:max-instances is used to specify a numeric - /// value that indicates the maximum number of recurrence instances - /// that a calendar object resource stored in a calendar collection - /// can generate. Any attempt to store a calendar object resource - /// with a recurrence pattern that generates more instances than this - /// value MUST result in an error, with the CALDAV:max-instances - /// precondition (Section 5.3.2.1) being violated. In the absence of - /// this property, the client can assume that the server has no limits - /// on the number of recurrence instances it can handle or expand. - /// - /// Definition: - /// - /// - /// PCDATA value: a numeric value (integer greater than zero) - /// - /// Example: - /// - /// - /// 100 - /// - MaxInstances(u64), - - /// CALDAV:max-attendees-per-instance Property - /// - /// Name: max-attendees-per-instance - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Provides a numeric value indicating the maximum number of - /// ATTENDEE properties in any instance of a calendar object resource - /// stored in a calendar collection. - /// - /// Conformance: This property MAY be defined on any calendar - /// collection. If defined, it MUST be protected and SHOULD NOT be - /// returned by a PROPFIND DAV:allprop request (as defined in Section - /// 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:max-attendees-per-instance is used to - /// specify a numeric value that indicates the maximum number of - /// iCalendar ATTENDEE properties on any one instance of a calendar - /// object resource stored in a calendar collection. Any attempt to - /// store a calendar object resource with more ATTENDEE properties per - /// instance than this value MUST result in an error, with the CALDAV: - /// max-attendees-per-instance precondition (Section 5.3.2.1) being - /// violated. In the absence of this property, the client can assume - /// that the server can handle any number of ATTENDEE properties in a - /// calendar component. - /// - /// Definition: - /// - /// - /// PCDATA value: a numeric value (integer greater than zero) - /// - /// Example: - /// - /// - /// 25 - /// - MaxAttendeesPerInstance(u64), - - /// Name: supported-collation-set - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Identifies the set of collations supported by the server - /// for text matching operations. - /// - /// Conformance: This property MUST be defined on any resource that - /// supports a report that does text matching. If defined, it MUST be - /// protected and SHOULD NOT be returned by a PROPFIND DAV:allprop - /// request (as defined in Section 12.14.1 of [RFC2518]). - /// - /// Description: The CALDAV:supported-collation-set property contains - /// zero or more CALDAV:supported-collation elements, which specify - /// the collection identifiers of the collations supported by the - /// server. - /// - /// Definition: - /// - /// - /// - /// - /// Example: - /// - /// - /// i;ascii-casemap - /// i;octet - /// - SupportedCollationSet(Vec), - - /// Name: calendar-data - /// - /// Namespace: urn:ietf:params:xml:ns:caldav - /// - /// Purpose: Specified one of the following: - /// - /// 1. A supported media type for calendar object resources when - /// nested in the CALDAV:supported-calendar-data property; - /// - /// 2. The parts of a calendar object resource should be returned by - /// a calendaring report; - /// - /// 3. The content of a calendar object resource in a response to a - /// calendaring report. - /// - /// Description: When nested in the CALDAV:supported-calendar-data - /// property, the CALDAV:calendar-data XML element specifies a media - /// type supported by the CalDAV server for calendar object resources. - /// - /// When used in a calendaring REPORT request, the CALDAV:calendar- - /// data XML element specifies which parts of calendar object - /// resources need to be returned in the response. If the CALDAV: - /// calendar-data XML element doesn't contain any CALDAV:comp element, - /// calendar object resources will be returned in their entirety. - /// - /// Finally, when used in a calendaring REPORT response, the CALDAV: - /// calendar-data XML element specifies the content of a calendar - /// object resource. Given that XML parsers normalize the two- - /// character sequence CRLF (US-ASCII decimal 13 and US-ASCII decimal - /// 10) to a single LF character (US-ASCII decimal 10), the CR - /// character (US-ASCII decimal 13) MAY be omitted in calendar object - /// resources specified in the CALDAV:calendar-data XML element. - /// Furthermore, calendar object resources specified in the CALDAV: - /// calendar-data XML element MAY be invalid per their media type - /// specification if the CALDAV:calendar-data XML element part of the - /// calendaring REPORT request did not specify required properties - /// (e.g., UID, DTSTAMP, etc.), or specified a CALDAV:prop XML element - /// with the "novalue" attribute set to "yes". - /// - /// Note: The CALDAV:calendar-data XML element is specified in requests - /// and responses inside the DAV:prop XML element as if it were a - /// WebDAV property. However, the CALDAV:calendar-data XML element is - /// not a WebDAV property and, as such, is not returned in PROPFIND - /// responses, nor used in PROPPATCH requests. - /// - /// Note: The iCalendar data embedded within the CALDAV:calendar-data - /// XML element MUST follow the standard XML character data encoding - /// rules, including use of <, >, & etc. entity encoding or - /// the use of a construct. In the later case, the - /// iCalendar data cannot contain the character sequence "]]>", which - /// is the end delimiter for the CDATA section. - CalendarData(CalendarDataPayload), -} - -#[derive(Debug, PartialEq)] -pub enum Violation { - /// (DAV:resource-must-be-null): A resource MUST NOT exist at the - /// Request-URI; - ResourceMustBeNull, - - /// (CALDAV:calendar-collection-location-ok): The Request-URI MUST - /// identify a location where a calendar collection can be created; - CalendarCollectionLocationOk, - - /// (CALDAV:valid-calendar-data): The time zone specified in CALDAV: - /// calendar-timezone property MUST be a valid iCalendar object - /// containing a single valid VTIMEZONE component. - ValidCalendarData, - - ///@FIXME should not be here but in RFC3744 - /// !!! ERRATA 1002 !!! - /// (DAV:need-privileges): The DAV:bind privilege MUST be granted to - /// the current user on the parent collection of the Request-URI. - NeedPrivileges, - - /// (CALDAV:initialize-calendar-collection): A new calendar collection - /// exists at the Request-URI. The DAV:resourcetype of the calendar - /// collection MUST contain both DAV:collection and CALDAV:calendar - /// XML elements. - InitializeCalendarCollection, - - /// (CALDAV:supported-calendar-data): The resource submitted in the - /// PUT request, or targeted by a COPY or MOVE request, MUST be a - /// supported media type (i.e., iCalendar) for calendar object - /// resources; - SupportedCalendarData, - - /// (CALDAV:valid-calendar-object-resource): The resource submitted in - /// the PUT request, or targeted by a COPY or MOVE request, MUST obey - /// all restrictions specified in Section 4.1 (e.g., calendar object - /// resources MUST NOT contain more than one type of calendar - /// component, calendar object resources MUST NOT specify the - /// iCalendar METHOD property, etc.); - ValidCalendarObjectResource, - - /// (CALDAV:supported-calendar-component): The resource submitted in - /// the PUT request, or targeted by a COPY or MOVE request, MUST - /// contain a type of calendar component that is supported in the - /// targeted calendar collection; - SupportedCalendarComponent, - - /// (CALDAV:no-uid-conflict): The resource submitted in the PUT - /// request, or targeted by a COPY or MOVE request, MUST NOT specify - /// an iCalendar UID property value already in use in the targeted - /// calendar collection or overwrite an existing calendar object - /// resource with one that has a different UID property value. - /// Servers SHOULD report the URL of the resource that is already - /// making use of the same UID property value in the DAV:href element; - /// - /// - NoUidConflict(dav::Href), - - /// (CALDAV:max-resource-size): The resource submitted in the PUT - /// request, or targeted by a COPY or MOVE request, MUST have an octet - /// size less than or equal to the value of the CALDAV:max-resource- - /// size property value (Section 5.2.5) on the calendar collection - /// where the resource will be stored; - MaxResourceSize, - - /// (CALDAV:min-date-time): The resource submitted in the PUT request, - /// or targeted by a COPY or MOVE request, MUST have all of its - /// iCalendar DATE or DATE-TIME property values (for each recurring - /// instance) greater than or equal to the value of the CALDAV:min- - /// date-time property value (Section 5.2.6) on the calendar - /// collection where the resource will be stored; - MinDateTime, - - /// (CALDAV:max-date-time): The resource submitted in the PUT request, - /// or targeted by a COPY or MOVE request, MUST have all of its - /// iCalendar DATE or DATE-TIME property values (for each recurring - /// instance) less than the value of the CALDAV:max-date-time property - /// value (Section 5.2.7) on the calendar collection where the - /// resource will be stored; - MaxDateTime, - - /// (CALDAV:max-instances): The resource submitted in the PUT request, - /// or targeted by a COPY or MOVE request, MUST generate a number of - /// recurring instances less than or equal to the value of the CALDAV: - /// max-instances property value (Section 5.2.8) on the calendar - /// collection where the resource will be stored; - MaxInstances, - - /// (CALDAV:max-attendees-per-instance): The resource submitted in the - /// PUT request, or targeted by a COPY or MOVE request, MUST have a - /// number of ATTENDEE properties on any one instance less than or - /// equal to the value of the CALDAV:max-attendees-per-instance - /// property value (Section 5.2.9) on the calendar collection where - /// the resource will be stored; - MaxAttendeesPerInstance, - - /// (CALDAV:valid-filter): The CALDAV:filter XML element (see - /// Section 9.7) specified in the REPORT request MUST be valid. For - /// instance, a CALDAV:filter cannot nest a - /// element in a element, and a CALDAV:filter - /// cannot nest a element in a - /// element. - ValidFilter, - - /// (CALDAV:supported-filter): The CALDAV:comp-filter (see - /// Section 9.7.1), CALDAV:prop-filter (see Section 9.7.2), and - /// CALDAV:param-filter (see Section 9.7.3) XML elements used in the - /// CALDAV:filter XML element (see Section 9.7) in the REPORT request - /// only make reference to components, properties, and parameters for - /// which queries are supported by the server, i.e., if the CALDAV: - /// filter element attempts to reference an unsupported component, - /// property, or parameter, this precondition is violated. Servers - /// SHOULD report the CALDAV:comp-filter, CALDAV:prop-filter, or - /// CALDAV:param-filter for which it does not provide support. - /// - /// - SupportedFilter { - comp: Vec, - prop: Vec, - param: Vec, - }, - - /// (DAV:number-of-matches-within-limits): The number of matching - /// calendar object resources must fall within server-specific, - /// predefined limits. For example, this condition might be triggered - /// if a search specification would cause the return of an extremely - /// large number of responses. - NumberOfMatchesWithinLimits, -} - -// -------- Inner XML elements --------- - -/// Some of the reports defined in this section do text matches of -/// character strings provided by the client and are compared to stored -/// calendar data. Since iCalendar data is, by default, encoded in the -/// UTF-8 charset and may include characters outside the US-ASCII charset -/// range in some property and parameter values, there is a need to -/// ensure that text matching follows well-defined rules. -/// -/// To deal with this, this specification makes use of the IANA Collation -/// Registry defined in [RFC4790] to specify collations that may be used -/// to carry out the text comparison operations with a well-defined rule. -/// -/// The comparisons used in CalDAV are all "substring" matches, as per -/// [RFC4790], Section 4.2. Collations supported by the server MUST -/// support "substring" match operations. -/// -/// CalDAV servers are REQUIRED to support the "i;ascii-casemap" and -/// "i;octet" collations, as described in [RFC4790], and MAY support -/// other collations. -/// -/// Servers MUST advertise the set of collations that they support via -/// the CALDAV:supported-collation-set property defined on any resource -/// that supports reports that use collations. -/// -/// Clients MUST only use collations from the list advertised by the -/// server. -/// -/// In the absence of a collation explicitly specified by the client, or -/// if the client specifies the "default" collation identifier (as -/// defined in [RFC4790], Section 3.1), the server MUST default to using -/// "i;ascii-casemap" as the collation. -/// -/// Wildcards (as defined in [RFC4790], Section 3.2) MUST NOT be used in -/// the collation identifier. -/// -/// If the client chooses a collation not supported by the server, the -/// server MUST respond with a CALDAV:supported-collation precondition -/// error response. -#[derive(Debug, PartialEq)] -pub struct SupportedCollation(pub Collation); - -/// -/// PCDATA value: iCalendar object -/// -/// when nested in the DAV:prop XML element in a calendaring -/// REPORT response to specify the content of a returned -/// calendar object resource. -#[derive(Debug, PartialEq)] -pub struct CalendarDataPayload { - pub mime: Option, - pub payload: String, -} - -/// -/// -/// when nested in the DAV:prop XML element in a calendaring -/// REPORT request to specify which parts of calendar object -/// resources should be returned in the response; -#[derive(Debug, PartialEq)] -pub struct CalendarDataRequest { - pub mime: Option, - pub comp: Option, - pub recurrence: Option, - pub limit_freebusy_set: Option, -} - -/// calendar-data specialization for Property -/// -/// -/// -/// when nested in the CALDAV:supported-calendar-data property -/// to specify a supported media type for calendar object -/// resources; -#[derive(Debug, PartialEq)] -pub struct CalendarDataEmpty(pub Option); - -/// -/// content-type value: a MIME media type -/// version value: a version string -/// attributes can be used on all three variants of the -/// CALDAV:calendar-data XML element. -#[derive(Debug, PartialEq)] -pub struct CalendarDataSupport { - pub content_type: String, - pub version: String, -} - -/// Name: comp -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Defines which component types to return. -/// -/// Description: The name value is a calendar component name (e.g., -/// VEVENT). -/// -/// Definition: -/// -/// -/// -/// name value: a calendar component name -/// -/// Note: The CALDAV:prop and CALDAV:allprop elements have the same name -/// as the DAV:prop and DAV:allprop elements defined in [RFC2518]. -/// However, the CALDAV:prop and CALDAV:allprop elements are defined -/// in the "urn:ietf:params:xml:ns:caldav" namespace instead of the -/// "DAV:" namespace. -#[derive(Debug, PartialEq)] -pub struct Comp { - pub name: Component, - pub additional_rules: Option, -} - -#[derive(Debug, PartialEq)] -pub struct CompInner { - pub prop_kind: PropKind, - pub comp_kind: CompKind, -} - -/// For SupportedCalendarComponentSet -/// -/// Definition: -/// -/// -/// -/// Example: -/// -/// -/// -/// -/// -#[derive(Debug, PartialEq)] -pub struct CompSupport(pub Component); - -/// Name: allcomp -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies that all components shall be returned. -/// -/// Description: The CALDAV:allcomp XML element can be used when the -/// client wants all types of components returned by a calendaring -/// REPORT request. -/// -/// Definition: -/// -/// -#[derive(Debug, PartialEq)] -pub enum CompKind { - AllComp, - Comp(Vec), -} - -/// Name: allprop -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies that all properties shall be returned. -/// -/// Description: The CALDAV:allprop XML element can be used when the -/// client wants all properties of components returned by a -/// calendaring REPORT request. -/// -/// Definition: -/// -/// -/// -/// Note: The CALDAV:allprop element has the same name as the DAV: -/// allprop element defined in [RFC2518]. However, the CALDAV:allprop -/// element is defined in the "urn:ietf:params:xml:ns:caldav" -/// namespace instead of the "DAV:" namespace. -#[derive(Debug, PartialEq)] -pub enum PropKind { - AllProp, - Prop(Vec), -} - -/// Name: prop -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Defines which properties to return in the response. -/// -/// Description: The "name" attribute specifies the name of the calendar -/// property to return (e.g., ATTENDEE). The "novalue" attribute can -/// be used by clients to request that the actual value of the -/// property not be returned (if the "novalue" attribute is set to -/// "yes"). In that case, the server will return just the iCalendar -/// property name and any iCalendar parameters and a trailing ":" -/// without the subsequent value data. -/// -/// Definition: -/// -/// -/// name value: a calendar property name -/// novalue value: "yes" or "no" -/// -/// Note: The CALDAV:prop element has the same name as the DAV:prop -/// element defined in [RFC2518]. However, the CALDAV:prop element is -/// defined in the "urn:ietf:params:xml:ns:caldav" namespace instead -/// of the "DAV:" namespace. -#[derive(Debug, PartialEq)] -pub struct CalProp { - pub name: ComponentProperty, - pub novalue: Option, -} - -#[derive(Debug, PartialEq)] -pub enum RecurrenceModifier { - Expand(Expand), - LimitRecurrenceSet(LimitRecurrenceSet), -} - -/// Name: expand -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Forces the server to expand recurring components into -/// individual recurrence instances. -/// -/// Description: The CALDAV:expand XML element specifies that for a -/// given calendaring REPORT request, the server MUST expand the -/// recurrence set into calendar components that define exactly one -/// recurrence instance, and MUST return only those whose scheduled -/// time intersect a specified time range. -/// -/// The "start" attribute specifies the inclusive start of the time -/// range, and the "end" attribute specifies the non-inclusive end of -/// the time range. Both attributes are specified as date with UTC -/// time value. The value of the "end" attribute MUST be greater than -/// the value of the "start" attribute. -/// -/// The server MUST use the same logic as defined for CALDAV:time- -/// range to determine if a recurrence instance intersects the -/// specified time range. -/// -/// Recurring components, other than the initial instance, MUST -/// include a RECURRENCE-ID property indicating which instance they -/// refer to. -/// -/// The returned calendar components MUST NOT use recurrence -/// properties (i.e., EXDATE, EXRULE, RDATE, and RRULE) and MUST NOT -/// have reference to or include VTIMEZONE components. Date and local -/// time with reference to time zone information MUST be converted -/// into date with UTC time. -/// -/// Definition: -/// -/// -/// -/// start value: an iCalendar "date with UTC time" -/// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] -pub struct Expand(pub DateTime, pub DateTime); - -/// CALDAV:limit-recurrence-set XML Element -/// -/// Name: limit-recurrence-set -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies a time range to limit the set of "overridden -/// components" returned by the server. -/// -/// Description: The CALDAV:limit-recurrence-set XML element specifies -/// that for a given calendaring REPORT request, the server MUST -/// return, in addition to the "master component", only the -/// "overridden components" that impact a specified time range. An -/// overridden component impacts a time range if its current start and -/// end times overlap the time range, or if the original start and end -/// times -- the ones that would have been used if the instance were -/// not overridden -- overlap the time range. -/// -/// The "start" attribute specifies the inclusive start of the time -/// range, and the "end" attribute specifies the non-inclusive end of -/// the time range. Both attributes are specified as date with UTC -/// time value. The value of the "end" attribute MUST be greater than -/// the value of the "start" attribute. -/// -/// The server MUST use the same logic as defined for CALDAV:time- -/// range to determine if the current or original scheduled time of an -/// "overridden" recurrence instance intersects the specified time -/// range. -/// -/// Overridden components that have a RANGE parameter on their -/// RECURRENCE-ID property may specify one or more instances in the -/// recurrence set, and some of those instances may fall within the -/// specified time range or may have originally fallen within the -/// specified time range prior to being overridden. If that is the -/// case, the overridden component MUST be included in the results, as -/// it has a direct impact on the interpretation of instances within -/// the specified time range. -/// -/// Definition: -/// -/// -/// -/// start value: an iCalendar "date with UTC time" -/// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] -pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); - -/// Name: limit-freebusy-set -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies a time range to limit the set of FREEBUSY values -/// returned by the server. -/// -/// Description: The CALDAV:limit-freebusy-set XML element specifies -/// that for a given calendaring REPORT request, the server MUST only -/// return the FREEBUSY property values of a VFREEBUSY component that -/// intersects a specified time range. -/// -/// The "start" attribute specifies the inclusive start of the time -/// range, and the "end" attribute specifies the non-inclusive end of -/// the time range. Both attributes are specified as "date with UTC -/// time" value. The value of the "end" attribute MUST be greater -/// than the value of the "start" attribute. -/// -/// The server MUST use the same logic as defined for CALDAV:time- -/// range to determine if a FREEBUSY property value intersects the -/// specified time range. -/// -/// Definition: -/// -/// -/// start value: an iCalendar "date with UTC time" -/// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] -pub struct LimitFreebusySet(pub DateTime, pub DateTime); - -/// Used by CalendarQuery & CalendarMultiget -#[derive(Debug, PartialEq)] -pub enum CalendarSelector { - AllProp, - PropName, - Prop(dav::PropName), -} - -/// Name: comp-filter -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies search criteria on calendar components. -/// -/// Description: The CALDAV:comp-filter XML element specifies a query -/// targeted at the calendar object (i.e., VCALENDAR) or at a specific -/// calendar component type (e.g., VEVENT). The scope of the -/// CALDAV:comp-filter XML element is the calendar object when used as -/// a child of the CALDAV:filter XML element. The scope of the -/// CALDAV:comp-filter XML element is the enclosing calendar component -/// when used as a child of another CALDAV:comp-filter XML element. A -/// CALDAV:comp-filter is said to match if: -/// -/// * The CALDAV:comp-filter XML element is empty and the calendar -/// object or calendar component type specified by the "name" -/// attribute exists in the current scope; -/// -/// or: -/// -/// * The CALDAV:comp-filter XML element contains a CALDAV:is-not- -/// defined XML element and the calendar object or calendar -/// component type specified by the "name" attribute does not exist -/// in the current scope; -/// -/// or: -/// -/// * The CALDAV:comp-filter XML element contains a CALDAV:time-range -/// XML element and at least one recurrence instance in the -/// targeted calendar component is scheduled to overlap the -/// specified time range, and all specified CALDAV:prop-filter and -/// CALDAV:comp-filter child XML elements also match the targeted -/// calendar component; -/// -/// or: -/// -/// * The CALDAV:comp-filter XML element only contains CALDAV:prop- -/// filter and CALDAV:comp-filter child XML elements that all match -/// the targeted calendar component. -/// -/// Definition: -/// -/// -/// -/// name value: a calendar object or calendar component -/// type (e.g., VEVENT) -#[derive(Debug, PartialEq)] -pub struct CompFilter { - pub name: Component, - // Option 1 = None, Option 2, 3, 4 = Some - pub additional_rules: Option, -} -#[derive(Debug, PartialEq)] -pub enum CompFilterRules { - // Option 2 - IsNotDefined, - // Options 3 & 4 - Matches(CompFilterMatch), -} -#[derive(Debug, PartialEq)] -pub struct CompFilterMatch { - pub time_range: Option, - pub prop_filter: Vec, - pub comp_filter: Vec, -} - -/// Name: prop-filter -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies search criteria on calendar properties. -/// -/// Description: The CALDAV:prop-filter XML element specifies a query -/// targeted at a specific calendar property (e.g., CATEGORIES) in the -/// scope of the enclosing calendar component. A calendar property is -/// said to match a CALDAV:prop-filter if: -/// -/// * The CALDAV:prop-filter XML element is empty and a property of -/// the type specified by the "name" attribute exists in the -/// enclosing calendar component; -/// -/// or: -/// -/// * The CALDAV:prop-filter XML element contains a CALDAV:is-not- -/// defined XML element and no property of the type specified by -/// the "name" attribute exists in the enclosing calendar -/// component; -/// -/// or: -/// -/// * The CALDAV:prop-filter XML element contains a CALDAV:time-range -/// XML element and the property value overlaps the specified time -/// range, and all specified CALDAV:param-filter child XML elements -/// also match the targeted property; -/// -/// or: -/// -/// * The CALDAV:prop-filter XML element contains a CALDAV:text-match -/// XML element and the property value matches it, and all -/// specified CALDAV:param-filter child XML elements also match the -/// targeted property; -/// -/// Definition: -/// -/// -/// -/// -/// name value: a calendar property name (e.g., ATTENDEE) -#[derive(Debug, PartialEq)] -pub struct PropFilter { - pub name: Component, - // None = Option 1, Some() = Option 2, 3 & 4 - pub additional_rules: Option, -} -#[derive(Debug, PartialEq)] -pub enum PropFilterRules { - // Option 2 - IsNotDefined, - // Options 3 & 4 - Match(PropFilterMatch), -} -#[derive(Debug, PartialEq)] -pub struct PropFilterMatch { - pub time_range: Option, - pub time_or_text: Option, - pub param_filter: Vec, -} -#[derive(Debug, PartialEq)] -pub enum TimeOrText { - Time(TimeRange), - Text(TextMatch), -} - -/// Name: text-match -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies a substring match on a property or parameter -/// value. -/// -/// Description: The CALDAV:text-match XML element specifies text used -/// for a substring match against the property or parameter value -/// specified in a calendaring REPORT request. -/// -/// The "collation" attribute is used to select the collation that the -/// server MUST use for character string matching. In the absence of -/// this attribute, the server MUST use the "i;ascii-casemap" -/// collation. -/// -/// The "negate-condition" attribute is used to indicate that this -/// test returns a match if the text matches when the attribute value -/// is set to "no", or return a match if the text does not match, if -/// the attribute value is set to "yes". For example, this can be -/// used to match components with a STATUS property not set to -/// CANCELLED. -/// -/// Definition: -/// -/// PCDATA value: string -/// -#[derive(Debug, PartialEq)] -pub struct TextMatch { - pub collation: Option, - pub negate_condition: Option, - pub text: String, -} - -/// Name: param-filter -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Limits the search to specific parameter values. -/// -/// Description: The CALDAV:param-filter XML element specifies a query -/// targeted at a specific calendar property parameter (e.g., -/// PARTSTAT) in the scope of the calendar property on which it is -/// defined. A calendar property parameter is said to match a CALDAV: -/// param-filter if: -/// -/// * The CALDAV:param-filter XML element is empty and a parameter of -/// the type specified by the "name" attribute exists on the -/// calendar property being examined; -/// -/// or: -/// -/// * The CALDAV:param-filter XML element contains a CALDAV:is-not- -/// defined XML element and no parameter of the type specified by -/// the "name" attribute exists on the calendar property being -/// examined; -/// -/// Definition: -/// -/// -/// -/// -/// name value: a property parameter name (e.g., PARTSTAT) -#[derive(Debug, PartialEq)] -pub struct ParamFilter { - pub name: PropertyParameter, - pub additional_rules: Option, -} -#[derive(Debug, PartialEq)] -pub enum ParamFilterMatch { - IsNotDefined, - Match(TextMatch), -} - -/// CALDAV:is-not-defined XML Element -/// -/// Name: is-not-defined -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies that a match should occur if the enclosing -/// component, property, or parameter does not exist. -/// -/// Description: The CALDAV:is-not-defined XML element specifies that a -/// match occurs if the enclosing component, property, or parameter -/// value specified in a calendaring REPORT request does not exist in -/// the calendar data being tested. -/// -/// Definition: -/// -/* CURRENTLY INLINED */ - - - -/// Name: timezone -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies the time zone component to use when determining -/// the results of a report. -/// -/// Description: The CALDAV:timezone XML element specifies that for a -/// given calendaring REPORT request, the server MUST rely on the -/// specified VTIMEZONE component instead of the CALDAV:calendar- -/// timezone property of the calendar collection, in which the -/// calendar object resource is contained to resolve "date" values and -/// "date with local time" values (i.e., floating time) to "date with -/// UTC time" values. The server will require this information to -/// determine if a calendar component scheduled with "date" values or -/// "date with local time" values intersects a CALDAV:time-range -/// specified in a CALDAV:calendar-query REPORT. -/// -/// Note: The iCalendar data embedded within the CALDAV:timezone XML -/// element MUST follow the standard XML character data encoding -/// rules, including use of <, >, & etc. entity encoding or -/// the use of a construct. In the later case, the -/// -/// iCalendar data cannot contain the character sequence "]]>", which -/// is the end delimiter for the CDATA section. -/// -/// Definition: -/// -/// -/// PCDATA value: an iCalendar object with exactly one VTIMEZONE -#[derive(Debug, PartialEq)] -pub struct TimeZone(pub String); - -/// Name: filter -/// -/// Namespace: urn:ietf:params:xml:ns:caldav -/// -/// Purpose: Specifies a filter to limit the set of calendar components -/// returned by the server. -/// -/// Description: The CALDAV:filter XML element specifies the search -/// filter used to limit the calendar components returned by a -/// calendaring REPORT request. -/// -/// Definition: -/// -#[derive(Debug, PartialEq)] -pub struct Filter(pub CompFilter); - -/// Name: time-range -/// -/// Definition: -/// -/// -/// -/// start value: an iCalendar "date with UTC time" -/// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] -pub enum TimeRange { - OnlyStart(DateTime), - OnlyEnd(DateTime), - FullRange(DateTime, DateTime), -} - -// ----------------------- ENUM ATTRIBUTES --------------------- - -/// Known components -#[derive(Debug, PartialEq)] -pub enum Component { - VCalendar, - VJournal, - VFreeBusy, - VEvent, - VTodo, - VAlarm, - VTimeZone, - Unknown(String), -} -impl Component { - pub fn as_str<'a>(&'a self) -> &'a str { - match self { - Self::VCalendar => "VCALENDAR", - Self::VJournal => "VJOURNAL", - Self::VFreeBusy => "VFREEBUSY", - Self::VEvent => "VEVENT", - Self::VTodo => "VTODO", - Self::VAlarm => "VALARM", - Self::VTimeZone => "VTIMEZONE", - Self::Unknown(c) => c, - } - } -} - -/// name="VERSION", name="SUMMARY", etc. -/// Can be set on different objects: VCalendar, VEvent, etc. -/// Might be replaced by an enum later -#[derive(Debug, PartialEq)] -pub struct ComponentProperty(pub String); - -/// like PARSTAT -#[derive(Debug, PartialEq)] -pub struct PropertyParameter(pub String); -impl PropertyParameter { - pub fn as_str<'a>(&'a self) -> &'a str { - self.0.as_str() - } -} - -#[derive(Default,Debug,PartialEq)] -pub enum Collation { - #[default] - AsciiCaseMap, - Octet, - Unknown(String), -} -impl Collation { - pub fn as_str<'a>(&'a self) -> &'a str { - match self { - Self::AsciiCaseMap => "i;ascii-casemap", - Self::Octet => "i;octet", - Self::Unknown(c) => c.as_str(), - } - } -} diff --git a/src/dav/decoder.rs b/src/dav/decoder.rs deleted file mode 100644 index aa3c7e5..0000000 --- a/src/dav/decoder.rs +++ /dev/null @@ -1,948 +0,0 @@ -use std::borrow::Cow; -use std::future::Future; - -use quick_xml::events::{Event, BytesStart, BytesDecl, BytesText}; -use quick_xml::events::attributes::AttrError; -use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; -use quick_xml::reader::NsReader; -use tokio::io::AsyncBufRead; - -use super::types::*; -use super::error::ParsingError; -use super::xml::{Node, QRead, Reader, IRead, DAV_URN, CAL_URN}; - -//@TODO (1) Rewrite all objects as Href, -// where we return Ok(None) instead of trying to find the object at any cost. -// Add a xml.find() -> Result, ParsingError> or similar for the cases we -// really need the object -// (2) Rewrite QRead and replace Result, _> with Result<_, _>, not found being a possible -// error. -// (3) Rewrite vectors with xml.collect() -> Result, _> -// (4) Something for alternatives would be great but no idea yet - -// ---- ROOT ---- - -/// Propfind request -impl QRead> for PropFind { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "propfind").await?; - let propfind: PropFind = loop { - // allprop - if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { - let includ = xml.maybe_find::>().await?; - xml.close().await?; - break PropFind::AllProp(includ) - } - - // propname - if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { - xml.close().await?; - break PropFind::PropName - } - - // prop - let (mut maybe_prop, mut dirty) = (None, false); - xml.maybe_read::>(&mut maybe_prop, &mut dirty).await?; - if let Some(prop) = maybe_prop { - break PropFind::Prop(prop) - } - - // not found, skipping - xml.skip().await?; - }; - xml.close().await?; - - Ok(propfind) - } -} - -/// PROPPATCH request -impl QRead> for PropertyUpdate { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "propertyupdate").await?; - let collected_items = xml.collect::>().await?; - xml.close().await?; - Ok(PropertyUpdate(collected_items)) - } -} - -/// Generic response -impl> QRead> for Multistatus { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "multistatus").await?; - let mut responses = Vec::new(); - let mut responsedescription = None; - - loop { - let mut dirty = false; - xml.maybe_push(&mut responses, &mut dirty).await?; - xml.maybe_read(&mut responsedescription, &mut dirty).await?; - if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => xml.skip().await?, - }; - } - } - - xml.close().await?; - Ok(Multistatus { responses, responsedescription }) - } -} - -// LOCK REQUEST -impl QRead for LockInfo { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "lockinfo").await?; - let (mut m_scope, mut m_type, mut owner) = (None, None, None); - loop { - let mut dirty = false; - xml.maybe_read::(&mut m_scope, &mut dirty).await?; - xml.maybe_read::(&mut m_type, &mut dirty).await?; - xml.maybe_read::(&mut owner, &mut dirty).await?; - - if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => xml.skip().await?, - }; - } - } - xml.close().await?; - match (m_scope, m_type) { - (Some(lockscope), Some(locktype)) => Ok(LockInfo { lockscope, locktype, owner }), - _ => Err(ParsingError::MissingChild), - } - } -} - -// LOCK RESPONSE -impl QRead> for PropValue { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "prop").await?; - let mut acc = xml.collect::>().await?; - xml.close().await?; - Ok(PropValue(acc)) - } -} - - -/// Error response -impl QRead> for Error { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "error").await?; - let violations = xml.collect::>().await?; - xml.close().await?; - Ok(Error(violations)) - } -} - - - -// ---- INNER XML -impl> QRead> for Response { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "response").await?; - let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); - let mut href = Vec::new(); - let mut propstat = Vec::new(); - - loop { - let mut dirty = false; - xml.maybe_read::(&mut status, &mut dirty).await?; - xml.maybe_push::(&mut href, &mut dirty).await?; - xml.maybe_push::>(&mut propstat, &mut dirty).await?; - xml.maybe_read::>(&mut error, &mut dirty).await?; - xml.maybe_read::(&mut responsedescription, &mut dirty).await?; - xml.maybe_read::(&mut location, &mut dirty).await?; - - if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await? }, - }; - } - } - - xml.close().await?; - match (status, &propstat[..], &href[..]) { - (Some(status), &[], &[_, ..]) => Ok(Response { - status_or_propstat: StatusOrPropstat::Status(href, status), - error, responsedescription, location, - }), - (None, &[_, ..], &[_, ..]) => Ok(Response { - status_or_propstat: StatusOrPropstat::PropStat(href.into_iter().next().unwrap(), propstat), - error, responsedescription, location, - }), - (Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue), - _ => Err(ParsingError::MissingChild), - } - } -} - -impl> QRead> for PropStat { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "propstat").await?; - - let (mut m_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); - - loop { - let mut dirty = false; - xml.maybe_read::(&mut m_prop, &mut dirty).await?; - xml.maybe_read::(&mut m_status, &mut dirty).await?; - xml.maybe_read::>(&mut error, &mut dirty).await?; - xml.maybe_read::(&mut responsedescription, &mut dirty).await?; - - if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => xml.skip().await?, - }; - } - } - - xml.close().await?; - match (m_prop, m_status) { - (Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), - _ => Err(ParsingError::MissingChild), - } - } -} - -impl QRead for Status { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "status").await?; - let fullcode = xml.tag_string().await?; - let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; - let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; - xml.close().await?; - Ok(Status(code)) - } -} - -impl QRead for ResponseDescription { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "responsedescription").await?; - let cnt = xml.tag_string().await?; - xml.close().await?; - Ok(ResponseDescription(cnt)) - } -} - -impl QRead for Location { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "location").await?; - let href = xml.find::().await?; - xml.close().await?; - Ok(Location(href)) - } -} - -impl QRead> for PropertyUpdateItem { - async fn qread(xml: &mut Reader) -> Result { - match Remove::qread(xml).await { - Err(ParsingError::Recoverable) => (), - otherwise => return otherwise.map(PropertyUpdateItem::Remove), - } - Set::qread(xml).await.map(PropertyUpdateItem::Set) - } -} - -impl QRead> for Remove { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "remove").await?; - let propname = xml.find::>().await?; - xml.close().await?; - Ok(Remove(propname)) - } -} - -impl QRead> for Set { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "set").await?; - let propvalue = xml.find::>().await?; - xml.close().await?; - Ok(Set(propvalue)) - } -} - -impl QRead> for Violation { - async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(DAV_URN, "lock-token-matches-request-uri").await?.is_some() { - xml.close().await?; - Ok(Violation::LockTokenMatchesRequestUri) - } else if xml.maybe_open(DAV_URN, "lock-token-submitted").await?.is_some() { - let links = xml.collect::().await?; - xml.close().await?; - Ok(Violation::LockTokenSubmitted(links)) - } else if xml.maybe_open(DAV_URN, "no-conflicting-lock").await?.is_some() { - let links = xml.collect::().await?; - xml.close().await?; - Ok(Violation::NoConflictingLock(links)) - } else if xml.maybe_open(DAV_URN, "no-external-entities").await?.is_some() { - xml.close().await?; - Ok(Violation::NoExternalEntities) - } else if xml.maybe_open(DAV_URN, "preserved-live-properties").await?.is_some() { - xml.close().await?; - Ok(Violation::PreservedLiveProperties) - } else if xml.maybe_open(DAV_URN, "propfind-finite-depth").await?.is_some() { - xml.close().await?; - Ok(Violation::PropfindFiniteDepth) - } else if xml.maybe_open(DAV_URN, "cannot-modify-protected-property").await?.is_some() { - xml.close().await?; - Ok(Violation::CannotModifyProtectedProperty) - } else { - E::Error::qread(xml).await.map(Violation::Extension) - } - } -} - -impl QRead> for Include { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "include").await?; - let acc = xml.collect::>().await?; - xml.close().await?; - Ok(Include(acc)) - } -} - -impl QRead> for PropName { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "prop").await?; - let acc = xml.collect::>().await?; - xml.close().await?; - Ok(PropName(acc)) - } -} - -impl QRead> for PropertyRequest { - async fn qread(xml: &mut Reader) -> Result { - let maybe = if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { - Some(PropertyRequest::CreationDate) - } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { - Some(PropertyRequest::DisplayName) - } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { - Some(PropertyRequest::GetContentLanguage) - } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { - Some(PropertyRequest::GetContentLength) - } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { - Some(PropertyRequest::GetContentType) - } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { - Some(PropertyRequest::GetEtag) - } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { - Some(PropertyRequest::GetLastModified) - } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { - Some(PropertyRequest::LockDiscovery) - } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { - Some(PropertyRequest::ResourceType) - } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { - Some(PropertyRequest::SupportedLock) - } else { - None - }; - - match maybe { - Some(pr) => { - xml.close().await?; - Ok(pr) - }, - None => E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension), - } - } -} - -impl QRead> for Property { - async fn qread(xml: &mut Reader) -> Result { - use chrono::{DateTime, FixedOffset, TimeZone}; - - // Core WebDAV properties - if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { - let datestr = xml.tag_string().await?; - xml.close().await?; - return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) - } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { - let name = xml.tag_string().await?; - xml.close().await?; - return Ok(Property::DisplayName(name)) - } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { - let lang = xml.tag_string().await?; - xml.close().await?; - return Ok(Property::GetContentLanguage(lang)) - } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { - let cl = xml.tag_string().await?.parse::()?; - xml.close().await?; - return Ok(Property::GetContentLength(cl)) - } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { - let ct = xml.tag_string().await?; - xml.close().await?; - return Ok(Property::GetContentType(ct)) - } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { - let etag = xml.tag_string().await?; - xml.close().await?; - return Ok(Property::GetEtag(etag)) - } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { - let datestr = xml.tag_string().await?; - xml.close().await?; - return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(datestr.as_str())?)) - } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { - let acc = xml.collect::().await?; - xml.close().await?; - return Ok(Property::LockDiscovery(acc)) - } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { - let acc = xml.collect::>().await?; - xml.close().await?; - return Ok(Property::ResourceType(acc)) - } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { - let acc = xml.collect::().await?; - xml.close().await?; - return Ok(Property::SupportedLock(acc)) - } - - // Option 2: an extension property, delegating - E::Property::qread(xml).await.map(Property::Extension) - } -} - -impl QRead for ActiveLock { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "activelock").await?; - let (mut m_scope, mut m_type, mut m_depth, mut owner, mut timeout, mut locktoken, mut m_root) = - (None, None, None, None, None, None, None); - - loop { - let mut dirty = false; - xml.maybe_read::(&mut m_scope, &mut dirty).await?; - xml.maybe_read::(&mut m_type, &mut dirty).await?; - xml.maybe_read::(&mut m_depth, &mut dirty).await?; - xml.maybe_read::(&mut owner, &mut dirty).await?; - xml.maybe_read::(&mut timeout, &mut dirty).await?; - xml.maybe_read::(&mut locktoken, &mut dirty).await?; - xml.maybe_read::(&mut m_root, &mut dirty).await?; - - if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - } - } - - xml.close().await?; - match (m_scope, m_type, m_depth, m_root) { - (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => - Ok(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot }), - _ => Err(ParsingError::MissingChild), - } - } -} - -impl QRead for Depth { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "depth").await?; - let depth_str = xml.tag_string().await?; - xml.close().await?; - match depth_str.as_str() { - "0" => Ok(Depth::Zero), - "1" => Ok(Depth::One), - "infinity" => Ok(Depth::Infinity), - _ => Err(ParsingError::WrongToken), - } - } -} - -impl QRead for Owner { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "owner").await?; - - let mut owner = Owner::Unknown; - loop { - match xml.peek() { - Event::Text(_) | Event::CData(_) => { - let txt = xml.tag_string().await?; - if matches!(owner, Owner::Unknown) { - owner = Owner::Txt(txt); - } - } - Event::Start(_) | Event::Empty(_) => { - match Href::qread(xml).await { - Ok(href) => { owner = Owner::Href(href); }, - Err(ParsingError::Recoverable) => { xml.skip().await?; }, - Err(e) => return Err(e), - } - } - Event::End(_) => break, - _ => { xml.skip().await?; }, - } - }; - xml.close().await?; - Ok(owner) - } -} - -impl QRead for Timeout { - async fn qread(xml: &mut Reader) -> Result { - const SEC_PFX: &str = "SEC_PFX"; - xml.open(DAV_URN, "timeout").await?; - - let timeout = match xml.tag_string().await?.as_str() { - "Infinite" => Timeout::Infinite, - seconds => match seconds.strip_prefix(SEC_PFX) { - Some(secs) => Timeout::Seconds(secs.parse::()?), - None => return Err(ParsingError::InvalidValue), - }, - }; - - xml.close().await?; - Ok(timeout) - } -} - -impl QRead for LockToken { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "locktoken").await?; - let href = Href::qread(xml).await?; - xml.close().await?; - Ok(LockToken(href)) - } -} - -impl QRead for LockRoot { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "lockroot").await?; - let href = Href::qread(xml).await?; - xml.close().await?; - Ok(LockRoot(href)) - } -} - -impl QRead> for ResourceType { - async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(DAV_URN, "collection").await?.is_some() { - xml.close().await?; - return Ok(ResourceType::Collection) - } - - E::ResourceType::qread(xml).await.map(ResourceType::Extension) - } -} - -impl QRead for LockEntry { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "lockentry").await?; - let (mut maybe_scope, mut maybe_type) = (None, None); - - loop { - let mut dirty = false; - xml.maybe_read::(&mut maybe_scope, &mut dirty).await?; - xml.maybe_read::(&mut maybe_type, &mut dirty).await?; - if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => xml.skip().await?, - }; - } - } - - xml.close().await?; - match (maybe_scope, maybe_type) { - (Some(lockscope), Some(locktype)) => Ok(LockEntry { lockscope, locktype }), - _ => Err(ParsingError::MissingChild), - } - } -} - -impl QRead for LockScope { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "lockscope").await?; - - let lockscope = loop { - if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() { - xml.close().await?; - break LockScope::Exclusive - } else if xml.maybe_open(DAV_URN, "shared").await?.is_some() { - xml.close().await?; - break LockScope::Shared - } - - xml.skip().await?; - }; - - xml.close().await?; - Ok(lockscope) - } -} - -impl QRead for LockType { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "locktype").await?; - - let locktype = loop { - if xml.maybe_open(DAV_URN, "write").await?.is_some() { - xml.close().await?; - break LockType::Write - } - - xml.skip().await?; - }; - - xml.close().await?; - Ok(locktype) - } -} - -impl QRead for Href { - async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "href").await?; - let mut url = xml.tag_string().await?; - xml.close().await?; - Ok(Href(url)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use chrono::{FixedOffset, DateTime, TimeZone, Utc}; - use crate::dav::realization::Core; - - #[tokio::test] - async fn basic_propfind_propname() { - let src = r#" - - - - - -"#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, PropFind::::PropName); - } - - #[tokio::test] - async fn basic_propfind_prop() { - let src = r#" - - - - - - - - - - - - - -"#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, PropFind::Prop(PropName(vec![ - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]))); - } - - #[tokio::test] - async fn rfc_lock_error() { - let src = r#" - - - /locked/ - - "#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, Error(vec![ - Violation::LockTokenSubmitted(vec![ - Href("/locked/".into()) - ]) - ])); - } - - - #[tokio::test] - async fn rfc_propertyupdate() { - let src = r#" - - - - - Jim Whitehead - Roy Fielding - - - - - - - "#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, PropertyUpdate(vec![ - PropertyUpdateItem::Set(Set(PropValue(vec![]))), - PropertyUpdateItem::Remove(Remove(PropName(vec![]))), - ])); - } - - #[tokio::test] - async fn rfc_lockinfo() { - let src = r#" - - - - - - http://example.org/~ejw/contact.html - - -"#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::().await.unwrap(); - - assert_eq!(got, LockInfo { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - }); - } - - #[tokio::test] - async fn rfc_multistatus_name() { - let src = r#" - - - - http://www.example.com/container/ - - - - - - - - - - HTTP/1.1 200 OK - - - - http://www.example.com/container/front.html - - - - - - - - - - - - - HTTP/1.1 200 OK - - - -"#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>>().await.unwrap(); - - assert_eq!(got, Multistatus { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/".into()), - vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/front.html".into()), - vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - }, - ], - responsedescription: None, - }); - } - - - #[tokio::test] - async fn rfc_multistatus_value() { - let src = r#" - - - - /container/ - - - Box type A - Hadrian - 1997-12-01T17:42:21-08:00 - Example collection - - - - - - - - - - - - - HTTP/1.1 200 OK - - - - /container/front.html - - - Box type B - - 1997-12-01T18:27:21-08:00 - Example HTML resource - 4525 - text/html - "zzyzx" - Mon, 12 Jan 1998 09:25:56 GMT - - - - - - - - - - - - - HTTP/1.1 200 OK - - - "#; - - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>>().await.unwrap(); - - assert_eq!(got, Multistatus { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/".into()), - vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap()), - Property::DisplayName("Example collection".into()), - Property::ResourceType(vec![ResourceType::Collection]), - Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ]), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/front.html".into()), - vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap()), - Property::DisplayName("Example HTML resource".into()), - Property::GetContentLength(4525), - Property::GetContentType("text/html".into()), - Property::GetEtag(r#""zzyzx""#.into()), - Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap()), - //Property::ResourceType(vec![]), - Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ]), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - - }, - ], - responsedescription: None, - }); - } - -} diff --git a/src/dav/encoder.rs b/src/dav/encoder.rs deleted file mode 100644 index 4de5440..0000000 --- a/src/dav/encoder.rs +++ /dev/null @@ -1,1117 +0,0 @@ -use std::io::Cursor; - -use quick_xml::Error as QError; -use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; -use quick_xml::writer::ElementWriter; -use quick_xml::name::PrefixDeclaration; -use tokio::io::AsyncWrite; -use super::types::*; -use super::xml::{Node, Writer,QWrite,IWrite}; - - -// --- XML ROOTS - -/// PROPFIND REQUEST -impl QWrite for PropFind { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("propfind"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - match self { - Self::PropName => { - let empty_propname = xml.create_dav_element("propname"); - xml.q.write_event_async(Event::Empty(empty_propname)).await? - }, - Self::AllProp(maybe_include) => { - let empty_allprop = xml.create_dav_element("allprop"); - xml.q.write_event_async(Event::Empty(empty_allprop)).await?; - if let Some(include) = maybe_include { - include.qwrite(xml).await?; - } - }, - Self::Prop(propname) => propname.qwrite(xml).await?, - } - xml.q.write_event_async(Event::End(end)).await - } -} - -/// PROPPATCH REQUEST -impl QWrite for PropertyUpdate { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("propertyupdate"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for update in self.0.iter() { - update.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - - -/// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE -/// DELETE RESPONSE, -impl> QWrite for Multistatus { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("multistatus"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for response in self.responses.iter() { - response.qwrite(xml).await?; - } - if let Some(description) = &self.responsedescription { - description.qwrite(xml).await?; - } - - xml.q.write_event_async(Event::End(end)).await?; - Ok(()) - } -} - -/// LOCK REQUEST -impl QWrite for LockInfo { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("lockinfo"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.lockscope.qwrite(xml).await?; - self.locktype.qwrite(xml).await?; - if let Some(owner) = &self.owner { - owner.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -/// SOME LOCK RESPONSES -impl QWrite for PropValue { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("prop"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for propval in &self.0 { - propval.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -/// Error response -impl QWrite for Error { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("error"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for violation in &self.0 { - violation.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -// --- XML inner elements -impl QWrite for PropertyUpdateItem { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Set(set) => set.qwrite(xml).await, - Self::Remove(rm) => rm.qwrite(xml).await, - } - } -} - -impl QWrite for Set { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("set"); - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Remove { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("remove"); - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - - -impl QWrite for PropName { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("prop"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for propname in &self.0 { - propname.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - - -impl QWrite for Href { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("href"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl> QWrite for Response { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("response"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.status_or_propstat.qwrite(xml).await?; - if let Some(error) = &self.error { - error.qwrite(xml).await?; - } - if let Some(responsedescription) = &self.responsedescription { - responsedescription.qwrite(xml).await?; - } - if let Some(location) = &self.location { - location.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -impl> QWrite for StatusOrPropstat { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Status(many_href, status) => { - for href in many_href.iter() { - href.qwrite(xml).await?; - } - status.qwrite(xml).await - }, - Self::PropStat(href, propstat_list) => { - href.qwrite(xml).await?; - for propstat in propstat_list.iter() { - propstat.qwrite(xml).await?; - } - Ok(()) - } - } - } -} - -impl QWrite for Status { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("status"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - - let txt = format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason")); - xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?; - - xml.q.write_event_async(Event::End(end)).await?; - - Ok(()) - } -} - -impl QWrite for ResponseDescription { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("responsedescription"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Location { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("location"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl> QWrite for PropStat { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("propstat"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.prop.qwrite(xml).await?; - self.status.qwrite(xml).await?; - if let Some(error) = &self.error { - error.qwrite(xml).await?; - } - if let Some(description) = &self.responsedescription { - description.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await?; - - Ok(()) - } -} - -impl QWrite for Property { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - use Property::*; - match self { - CreationDate(date) => { - // 1997-12-01T17:42:21-08:00 - let start = xml.create_dav_element("creationdate"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - DisplayName(name) => { - // Example collection - let start = xml.create_dav_element("displayname"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(name))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - GetContentLanguage(lang) => { - let start = xml.create_dav_element("getcontentlanguage"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(lang))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - GetContentLength(len) => { - // 4525 - let start = xml.create_dav_element("getcontentlength"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - GetContentType(ct) => { - // text/html - let start = xml.create_dav_element("getcontenttype"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&ct))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - GetEtag(et) => { - // "zzyzx" - let start = xml.create_dav_element("getetag"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(et))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - GetLastModified(date) => { - // Mon, 12 Jan 1998 09:25:56 GMT - let start = xml.create_dav_element("getlastmodified"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; - xml.q.write_event_async(Event::End(end)).await?; - }, - LockDiscovery(many_locks) => { - // ... - let start = xml.create_dav_element("lockdiscovery"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for lock in many_locks.iter() { - lock.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await?; - }, - ResourceType(many_types) => { - // - - // - - // - // - // - // - - let start = xml.create_dav_element("resourcetype"); - if many_types.is_empty() { - xml.q.write_event_async(Event::Empty(start)).await?; - } else { - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - for restype in many_types.iter() { - restype.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await?; - } - }, - SupportedLock(many_entries) => { - // - - // ... - - let start = xml.create_dav_element("supportedlock"); - if many_entries.is_empty() { - xml.q.write_event_async(Event::Empty(start)).await?; - } else { - let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; - for entry in many_entries.iter() { - entry.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await?; - } - }, - Extension(inner) => inner.qwrite(xml).await?, - }; - Ok(()) - } -} - -impl QWrite for ResourceType { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - match self { - Self::Collection => { - let empty_collection = xml.create_dav_element("collection"); - xml.q.write_event_async(Event::Empty(empty_collection)).await - }, - Self::Extension(inner) => inner.qwrite(xml).await, - } - } -} - -impl QWrite for Include { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("include"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for prop in self.0.iter() { - prop.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for PropertyRequest { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - use PropertyRequest::*; - let mut atom = async |c| { - let empty_tag = xml.create_dav_element(c); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }; - - match self { - CreationDate => atom("creationdate").await, - DisplayName => atom("displayname").await, - GetContentLanguage => atom("getcontentlanguage").await, - GetContentLength => atom("getcontentlength").await, - GetContentType => atom("getcontenttype").await, - GetEtag => atom("getetag").await, - GetLastModified => atom("getlastmodified").await, - LockDiscovery => atom("lockdiscovery").await, - ResourceType => atom("resourcetype").await, - SupportedLock => atom("supportedlock").await, - Extension(inner) => inner.qwrite(xml).await, - } - } -} - -impl QWrite for ActiveLock { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - // - // - // - // infinity - // - // http://example.org/~ejw/contact.html - // - // Second-604800 - // - // urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4 - // - // - // http://example.com/workspace/webdav/proposal.doc - // - // - let start = xml.create_dav_element("activelock"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.locktype.qwrite(xml).await?; - self.lockscope.qwrite(xml).await?; - self.depth.qwrite(xml).await?; - if let Some(owner) = &self.owner { - owner.qwrite(xml).await?; - } - if let Some(timeout) = &self.timeout { - timeout.qwrite(xml).await?; - } - if let Some(locktoken) = &self.locktoken { - locktoken.qwrite(xml).await?; - } - self.lockroot.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for LockType { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("locktype"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - match self { - Self::Write => { - let empty_write = xml.create_dav_element("write"); - xml.q.write_event_async(Event::Empty(empty_write)).await? - }, - }; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for LockScope { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("lockscope"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - match self { - Self::Exclusive => { - let empty_tag = xml.create_dav_element("exclusive"); - xml.q.write_event_async(Event::Empty(empty_tag)).await? - }, - Self::Shared => { - let empty_tag = xml.create_dav_element("shared"); - xml.q.write_event_async(Event::Empty(empty_tag)).await? - }, - }; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Owner { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("owner"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - match self { - Self::Txt(txt) => xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?, - Self::Href(href) => href.qwrite(xml).await?, - Self::Unknown => (), - } - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Depth { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("depth"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - match self { - Self::Zero => xml.q.write_event_async(Event::Text(BytesText::new("0"))).await?, - Self::One => xml.q.write_event_async(Event::Text(BytesText::new("1"))).await?, - Self::Infinity => xml.q.write_event_async(Event::Text(BytesText::new("infinity"))).await?, - }; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Timeout { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("timeout"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - match self { - Self::Seconds(count) => { - let txt = format!("Second-{}", count); - xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await? - }, - Self::Infinite => xml.q.write_event_async(Event::Text(BytesText::new("Infinite"))).await? - }; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for LockToken { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("locktoken"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for LockRoot { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("lockroot"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.0.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for LockEntry { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let start = xml.create_dav_element("lockentry"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - self.lockscope.qwrite(xml).await?; - self.locktype.qwrite(xml).await?; - xml.q.write_event_async(Event::End(end)).await - } -} - -impl QWrite for Violation { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut atom = async |c| { - let empty_tag = xml.create_dav_element(c); - xml.q.write_event_async(Event::Empty(empty_tag)).await - }; - - match self { - Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await, - Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => atom("lock-token-submitted").await, - Violation::LockTokenSubmitted(hrefs) => { - let start = xml.create_dav_element("lock-token-submitted"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for href in hrefs { - href.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - }, - Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => atom("no-conflicting-lock").await, - Violation::NoConflictingLock(hrefs) => { - let start = xml.create_dav_element("no-conflicting-lock"); - let end = start.to_end(); - - xml.q.write_event_async(Event::Start(start.clone())).await?; - for href in hrefs { - href.qwrite(xml).await?; - } - xml.q.write_event_async(Event::End(end)).await - }, - Violation::NoExternalEntities => atom("no-external-entities").await, - Violation::PreservedLiveProperties => atom("preserved-live-properties").await, - Violation::PropfindFiniteDepth => atom("propfind-finite-depth").await, - Violation::CannotModifyProtectedProperty => atom("cannot-modify-protected-property").await, - Violation::Extension(inner) => inner.qwrite(xml).await, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::dav::realization::Core; - use tokio::io::AsyncWriteExt; - - /// To run only the unit tests and avoid the behavior ones: - /// cargo test --bin aerogramme - - async fn serialize(elem: &impl QWrite) -> String { - let mut buffer = Vec::new(); - let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); - let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; - let mut writer = Writer { q, ns_to_apply }; - - elem.qwrite(&mut writer).await.expect("xml serialization"); - tokio_buffer.flush().await.expect("tokio buffer flush"); - let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - - return got.into() - } - - #[tokio::test] - async fn basic_href() { - - let got = serialize( - &Href("/SOGo/dav/so/".into()) - ).await; - let expected = r#"/SOGo/dav/so/"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn basic_multistatus() { - let got = serialize( - &Multistatus::> { - responses: vec![], - responsedescription: Some(ResponseDescription("Hello world".into())) - }, - ).await; - - let expected = r#" - Hello world -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - - #[tokio::test] - async fn rfc_error_delete_locked() { - let got = serialize( - &Error::(vec![ - Violation::LockTokenSubmitted(vec![ - Href("/locked/".into()) - ]) - ]), - ).await; - - let expected = r#" - - /locked/ - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_propname_req() { - let got = serialize( - &PropFind::::PropName, - ).await; - - let expected = r#" - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_propname_res() { - let got = serialize( - &Multistatus::> { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/".into()), - vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), - error: None, - responsedescription: None, - location: None, - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/front.html".into()), - vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - } - ]), - error: None, - responsedescription: None, - location: None, - }, - ], - responsedescription: None, - }, - ).await; - - let expected = r#" - - http://www.example.com/container/ - - - - - - - - HTTP/1.1 200 OK - - - - http://www.example.com/container/front.html - - - - - - - - - - - - HTTP/1.1 200 OK - - -"#; - - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_allprop_req() { - let got = serialize( - &PropFind::::AllProp(None), - ).await; - - let expected = r#" - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_allprop_res() { - use chrono::{DateTime,FixedOffset,TimeZone}; - let got = serialize( - &Multistatus::> { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/".into()), - vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600) - .unwrap() - .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) - .unwrap()), - Property::DisplayName("Example collection".into()), - Property::ResourceType(vec![ResourceType::Collection]), - Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ]), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), - error: None, - responsedescription: None, - location: None, - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/front.html".into()), - vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600) - .unwrap() - .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) - .unwrap()), - Property::DisplayName("Example HTML resource".into()), - Property::GetContentLength(4525), - Property::GetContentType("text/html".into()), - Property::GetEtag(r#""zzyzx""#.into()), - Property::GetLastModified(FixedOffset::east_opt(0) - .unwrap() - .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) - .unwrap()), - Property::ResourceType(vec![]), - Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ]), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), - error: None, - responsedescription: None, - location: None, - }, - ], - responsedescription: None, - } - ).await; - - let expected = r#" - - /container/ - - - 1997-12-01T17:42:21-08:00 - Example collection - - - - - - - - - - - - - - - - - - - - - - - HTTP/1.1 200 OK - - - - /container/front.html - - - 1997-12-01T18:27:21-08:00 - Example HTML resource - 4525 - text/html - "zzyzx" - Mon, 12 Jan 1998 09:25:56 +0000 - - - - - - - - - - - - - - - - - - - - - HTTP/1.1 200 OK - - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_allprop_include() { - let got = serialize( - &PropFind::::AllProp(Some(Include(vec![ - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - ]))), - ).await; - - let expected = r#" - - - - - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_propertyupdate() { - let got = serialize( - &PropertyUpdate::(vec![ - PropertyUpdateItem::Set(Set(PropValue(vec![ - Property::GetContentLanguage("fr-FR".into()), - ]))), - PropertyUpdateItem::Remove(Remove(PropName(vec![ - PropertyRequest::DisplayName, - ]))), - ]), - ).await; - - let expected = r#" - - - fr-FR - - - - - - - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_delete_locked2() { - let got = serialize( - &Multistatus::> { - responses: vec![Response { - status_or_propstat: StatusOrPropstat::Status( - vec![Href("http://www.example.com/container/resource3".into())], - Status(http::status::StatusCode::from_u16(423).unwrap()) - ), - error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), - responsedescription: None, - location: None, - }], - responsedescription: None, - }, - ).await; - - let expected = r#" - - http://www.example.com/container/resource3 - HTTP/1.1 423 Locked - - - - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_simple_lock_request() { - let got = serialize( - &LockInfo { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - }, - ).await; - - let expected = r#" - - - - - - - - http://example.org/~ejw/contact.html - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } - - #[tokio::test] - async fn rfc_simple_lock_response() { - let got = serialize( - &PropValue::(vec![ - Property::LockDiscovery(vec![ActiveLock { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - depth: Depth::Infinity, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - timeout: Some(Timeout::Seconds(604800)), - locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), - lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), - }]), - ]), - ).await; - - let expected = r#" - - - - - - - - - infinity - - http://example.org/~ejw/contact.html - - Second-604800 - - urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4 - - - http://example.com/workspace/webdav/proposal.doc - - - -"#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - } -} diff --git a/src/dav/error.rs b/src/dav/error.rs deleted file mode 100644 index 78c6d6b..0000000 --- a/src/dav/error.rs +++ /dev/null @@ -1,42 +0,0 @@ -use quick_xml::events::attributes::AttrError; - -#[derive(Debug)] -pub enum ParsingError { - Recoverable, - MissingChild, - NamespacePrefixAlreadyUsed, - WrongToken, - TagNotFound, - InvalidValue, - Utf8Error(std::str::Utf8Error), - QuickXml(quick_xml::Error), - Chrono(chrono::format::ParseError), - Int(std::num::ParseIntError), - Eof -} -impl From for ParsingError { - fn from(value: AttrError) -> Self { - Self::QuickXml(value.into()) - } -} -impl From for ParsingError { - fn from(value: quick_xml::Error) -> Self { - Self::QuickXml(value) - } -} -impl From for ParsingError { - fn from(value: std::str::Utf8Error) -> Self { - Self::Utf8Error(value) - } -} -impl From for ParsingError { - fn from(value: chrono::format::ParseError) -> Self { - Self::Chrono(value) - } -} - -impl From for ParsingError { - fn from(value: std::num::ParseIntError) -> Self { - Self::Int(value) - } -} diff --git a/src/dav/mod.rs b/src/dav/mod.rs deleted file mode 100644 index 906cfdd..0000000 --- a/src/dav/mod.rs +++ /dev/null @@ -1,167 +0,0 @@ -// utils -pub mod error; -pub mod xml; - -// webdav -pub mod types; -pub mod encoder; -pub mod decoder; - -// calendar -mod caltypes; -mod calencoder; -mod caldecoder; - -// wip -mod acltypes; -mod versioningtypes; - -// final type -pub mod realization; - - -use std::net::SocketAddr; - -use anyhow::{anyhow, Result}; -use base64::Engine; -use hyper::service::service_fn; -use hyper::{Request, Response, body::Bytes}; -use hyper::server::conn::http1 as http; -use hyper_util::rt::TokioIo; -use http_body_util::Full; -use futures::stream::{FuturesUnordered, StreamExt}; -use tokio::net::TcpListener; -use tokio::sync::watch; - -use crate::config::DavUnsecureConfig; -use crate::login::ArcLoginProvider; -use crate::user::User; - -pub struct Server { - bind_addr: SocketAddr, - login_provider: ArcLoginProvider, -} - -pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { - Server { - bind_addr: config.bind_addr, - login_provider: login, - } -} - -impl Server { - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!("DAV server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - tracing::info!("Accepted connection from {}", remote_addr); - let stream = TokioIo::new(socket); - let login = self.login_provider.clone(); - let conn = tokio::spawn(async move { - //@FIXME should create a generic "public web" server on which "routers" could be - //abitrarily bound - //@FIXME replace with a handler supporting http2 and TLS - match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { - let login = login.clone(); - async move { - auth(login, req).await - } - })).await { - Err(e) => tracing::warn!(err=?e, "connection failed"), - Ok(()) => tracing::trace!("connection terminated with success"), - } - }); - connections.push(conn); - } - drop(tcp); - - tracing::info!("Server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -//@FIXME We should not support only BasicAuth -async fn auth( - login: ArcLoginProvider, - req: Request, -) -> Result>> { - - let auth_val = match req.headers().get("Authorization") { - Some(hv) => hv.to_str()?, - None => return Ok(Response::builder() - .status(401) - .body(Full::new(Bytes::from("Missing Authorization field")))?), - }; - - let b64_creds_maybe_padded = match auth_val.split_once(" ") { - Some(("Basic", b64)) => b64, - _ => return Ok(Response::builder() - .status(400) - .body(Full::new(Bytes::from("Unsupported Authorization field")))?), - }; - - // base64urlencoded may have trailing equals, base64urlsafe has not - // theoretically authorization is padded but "be liberal in what you accept" - let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); - - // Decode base64 - let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; - let str_creds = std::str::from_utf8(&creds)?; - - // Split username and password - let (username, password) = str_creds - .split_once(':') - .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; - - // Call login provider - let creds = match login.login(username, password).await { - Ok(c) => c, - Err(e) => return Ok(Response::builder() - .status(401) - .body(Full::new(Bytes::from("Wrong credentials")))?), - }; - - // Build a user - let user = User::new(username.into(), creds).await?; - - // Call router with user - router(user, req).await -} - -async fn router(user: std::sync::Arc, req: Request) -> Result>> { - let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); - match path_segments.as_slice() { - [] => tracing::info!("root"), - [ username, ..] if *username != user.username => return Ok(Response::builder() - .status(403) - .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), - [ _ ] => tracing::info!("user home"), - [ _, "calendar" ] => tracing::info!("user calendars"), - [ _, "calendar", colname ] => tracing::info!(name=colname, "selected calendar"), - [ _, "calendar", colname, member ] => tracing::info!(name=colname, obj=member, "selected event"), - _ => return Ok(Response::builder() - .status(404) - .body(Full::new(Bytes::from("Resource not found")))?), - } - Ok(Response::new(Full::new(Bytes::from("Hello World!")))) -} - -async fn collections(user: std::sync::Arc, req: Request) -> Result>> { - unimplemented!(); -} diff --git a/src/dav/realization.rs b/src/dav/realization.rs deleted file mode 100644 index 33a556e..0000000 --- a/src/dav/realization.rs +++ /dev/null @@ -1,42 +0,0 @@ -use super::types as dav; -use super::caltypes as cal; -use super::xml; -use super::error; - -#[derive(Debug, PartialEq)] -pub struct Disabled(()); -impl xml::QRead for Disabled { - async fn qread(xml: &mut xml::Reader) -> Result { - Err(error::ParsingError::Recoverable) - } -} -impl xml::QWrite for Disabled { - async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { - unreachable!(); - } -} - -/// The base WebDAV -/// -/// Any extension is kooh is disabled through an object we can't build -/// due to a private inner element. -#[derive(Debug, PartialEq)] -pub struct Core {} -impl dav::Extension for Core { - type Error = Disabled; - type Property = Disabled; - type PropertyRequest = Disabled; - type ResourceType = Disabled; -} - -// WebDAV with the base Calendar implementation (RFC4791) -#[derive(Debug, PartialEq)] -pub struct Calendar {} -impl dav::Extension for Calendar -{ - type Error = cal::Violation; - type Property = cal::Property; - type PropertyRequest = cal::PropertyRequest; - type ResourceType = cal::ResourceType; -} - diff --git a/src/dav/types.rs b/src/dav/types.rs deleted file mode 100644 index 5ea38d1..0000000 --- a/src/dav/types.rs +++ /dev/null @@ -1,950 +0,0 @@ -#![allow(dead_code)] -use std::fmt::Debug; - -use chrono::{DateTime,FixedOffset}; -use super::xml; -use super::error; - -/// It's how we implement a DAV extension -/// (That's the dark magic part...) -pub trait Extension: std::fmt::Debug + PartialEq { - type Error: xml::Node; - type Property: xml::Node; - type PropertyRequest: xml::Node; - type ResourceType: xml::Node; -} - -/// 14.1. activelock XML Element -/// -/// Name: activelock -/// -/// Purpose: Describes a lock on a resource. -/// -#[derive(Debug, PartialEq)] -pub struct ActiveLock { - pub lockscope: LockScope, - pub locktype: LockType, - pub depth: Depth, - pub owner: Option, - pub timeout: Option, - pub locktoken: Option, - pub lockroot: LockRoot, -} - -/// 14.3 collection XML Element -/// -/// Name: collection -/// -/// Purpose: Identifies the associated resource as a collection. The -/// DAV:resourcetype property of a collection resource MUST contain -/// this element. It is normally empty but extensions may add sub- -/// elements. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Collection{} - -/// 14.4 depth XML Element -/// -/// Name: depth -/// -/// Purpose: Used for representing depth values in XML content (e.g., -/// in lock information). -/// -/// Value: "0" | "1" | "infinity" -/// -/// -#[derive(Debug, PartialEq)] -pub enum Depth { - Zero, - One, - Infinity -} - -/// 14.5 error XML Element -/// -/// Name: error -/// -/// Purpose: Error responses, particularly 403 Forbidden and 409 -/// Conflict, sometimes need more information to indicate what went -/// wrong. In these cases, servers MAY return an XML response body -/// with a document element of 'error', containing child elements -/// identifying particular condition codes. -/// -/// Description: Contains at least one XML element, and MUST NOT -/// contain text or mixed content. Any element that is a child of the -/// 'error' element is considered to be a precondition or -/// postcondition code. Unrecognized elements MUST be ignored. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Error(pub Vec>); -#[derive(Debug, PartialEq)] -pub enum Violation { - /// Name: lock-token-matches-request-uri - /// - /// Use with: 409 Conflict - /// - /// Purpose: (precondition) -- A request may include a Lock-Token header - /// to identify a lock for the UNLOCK method. However, if the - /// Request-URI does not fall within the scope of the lock identified - /// by the token, the server SHOULD use this error. The lock may have - /// a scope that does not include the Request-URI, or the lock could - /// have disappeared, or the token may be invalid. - LockTokenMatchesRequestUri, - - /// Name: lock-token-submitted (precondition) - /// - /// Use with: 423 Locked - /// - /// Purpose: The request could not succeed because a lock token should - /// have been submitted. This element, if present, MUST contain at - /// least one URL of a locked resource that prevented the request. In - /// cases of MOVE, COPY, and DELETE where collection locks are - /// involved, it can be difficult for the client to find out which - /// locked resource made the request fail -- but the server is only - /// responsible for returning one such locked resource. The server - /// MAY return every locked resource that prevented the request from - /// succeeding if it knows them all. - /// - /// - LockTokenSubmitted(Vec), - - /// Name: no-conflicting-lock (precondition) - /// - /// Use with: Typically 423 Locked - /// - /// Purpose: A LOCK request failed due the presence of an already - /// existing conflicting lock. Note that a lock can be in conflict - /// although the resource to which the request was directed is only - /// indirectly locked. In this case, the precondition code can be - /// used to inform the client about the resource that is the root of - /// the conflicting lock, avoiding a separate lookup of the - /// "lockdiscovery" property. - /// - /// - NoConflictingLock(Vec), - - /// Name: no-external-entities - /// - /// Use with: 403 Forbidden - /// - /// Purpose: (precondition) -- If the server rejects a client request - /// because the request body contains an external entity, the server - /// SHOULD use this error. - NoExternalEntities, - - /// Name: preserved-live-properties - /// - /// Use with: 409 Conflict - /// - /// Purpose: (postcondition) -- The server received an otherwise-valid - /// MOVE or COPY request, but cannot maintain the live properties with - /// the same behavior at the destination. It may be that the server - /// only supports some live properties in some parts of the - /// repository, or simply has an internal error. - PreservedLiveProperties, - - /// Name: propfind-finite-depth - /// - /// Use with: 403 Forbidden - /// - /// Purpose: (precondition) -- This server does not allow infinite-depth - /// PROPFIND requests on collections. - PropfindFiniteDepth, - - - /// Name: cannot-modify-protected-property - /// - /// Use with: 403 Forbidden - /// - /// Purpose: (precondition) -- The client attempted to set a protected - /// property in a PROPPATCH (such as DAV:getetag). See also - /// [RFC3253], Section 3.12. - CannotModifyProtectedProperty, - - /// Specific errors - Extension(E::Error), -} - -/// 14.6. exclusive XML Element -/// -/// Name: exclusive -/// -/// Purpose: Specifies an exclusive lock. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Exclusive {} - -/// 14.7. href XML Element -/// -/// Name: href -/// -/// Purpose: MUST contain a URI or a relative reference. -/// -/// Description: There may be limits on the value of 'href' depending -/// on the context of its use. Refer to the specification text where -/// 'href' is used to see what limitations apply in each case. -/// -/// Value: Simple-ref -/// -/// -#[derive(Debug, PartialEq)] -pub struct Href(pub String); - - -/// 14.8. include XML Element -/// -/// Name: include -/// -/// Purpose: Any child element represents the name of a property to be -/// included in the PROPFIND response. All elements inside an -/// 'include' XML element MUST define properties related to the -/// resource, although possible property names are in no way limited -/// to those property names defined in this document or other -/// standards. This element MUST NOT contain text or mixed content. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Include(pub Vec>); - -/// 14.9. location XML Element -/// -/// Name: location -/// -/// Purpose: HTTP defines the "Location" header (see [RFC2616], Section -/// 14.30) for use with some status codes (such as 201 and the 300 -/// series codes). When these codes are used inside a 'multistatus' -/// element, the 'location' element can be used to provide the -/// accompanying Location header value. -/// -/// Description: Contains a single href element with the same value -/// that would be used in a Location header. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Location(pub Href); - -/// 14.10. lockentry XML Element -/// -/// Name: lockentry -/// -/// Purpose: Defines the types of locks that can be used with the -/// resource. -/// -/// -#[derive(Debug, PartialEq)] -pub struct LockEntry { - pub lockscope: LockScope, - pub locktype: LockType, -} - -/// 14.11. lockinfo XML Element -/// -/// Name: lockinfo -/// -/// Purpose: The 'lockinfo' XML element is used with a LOCK method to -/// specify the type of lock the client wishes to have created. -/// -/// -#[derive(Debug, PartialEq)] -pub struct LockInfo { - pub lockscope: LockScope, - pub locktype: LockType, - pub owner: Option, -} - -/// 14.12. lockroot XML Element -/// -/// Name: lockroot -/// -/// Purpose: Contains the root URL of the lock, which is the URL -/// through which the resource was addressed in the LOCK request. -/// -/// Description: The href element contains the root of the lock. The -/// server SHOULD include this in all DAV:lockdiscovery property -/// values and the response to LOCK requests. -/// -/// -#[derive(Debug, PartialEq)] -pub struct LockRoot(pub Href); - -/// 14.13. lockscope XML Element -/// -/// Name: lockscope -/// -/// Purpose: Specifies whether a lock is an exclusive lock, or a shared -/// lock. -/// -#[derive(Debug, PartialEq)] -pub enum LockScope { - Exclusive, - Shared -} - -/// 14.14. locktoken XML Element -/// -/// Name: locktoken -/// -/// Purpose: The lock token associated with a lock. -/// -/// Description: The href contains a single lock token URI, which -/// refers to the lock. -/// -/// -#[derive(Debug, PartialEq)] -pub struct LockToken(pub Href); - -/// 14.15. locktype XML Element -/// -/// Name: locktype -/// -/// Purpose: Specifies the access type of a lock. At present, this -/// specification only defines one lock type, the write lock. -/// -/// -#[derive(Debug, PartialEq)] -pub enum LockType { - /// 14.30. write XML Element - /// - /// Name: write - /// - /// Purpose: Specifies a write lock. - /// - /// - /// - Write -} - -/// 14.16. multistatus XML Element -/// -/// Name: multistatus -/// -/// Purpose: Contains multiple response messages. -/// -/// Description: The 'responsedescription' element at the top level is -/// used to provide a general message describing the overarching -/// nature of the response. If this value is available, an -/// application may use it instead of presenting the individual -/// response descriptions contained within the responses. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Multistatus> { - pub responses: Vec>, - pub responsedescription: Option, -} - -/// 14.17. owner XML Element -/// -/// Name: owner -/// -/// Purpose: Holds client-supplied information about the creator of a -/// lock. -/// -/// Description: Allows a client to provide information sufficient for -/// either directly contacting a principal (such as a telephone number -/// or Email URI), or for discovering the principal (such as the URL -/// of a homepage) who created a lock. The value provided MUST be -/// treated as a dead property in terms of XML Information Item -/// preservation. The server MUST NOT alter the value unless the -/// owner value provided by the client is empty. For a certain amount -/// of interoperability between different client implementations, if -/// clients have URI-formatted contact information for the lock -/// creator suitable for user display, then clients SHOULD put those -/// URIs in 'href' child elements of the 'owner' element. -/// -/// Extensibility: MAY be extended with child elements, mixed content, -/// text content or attributes. -/// -/// -//@FIXME might need support for an extension -#[derive(Debug, PartialEq)] -pub enum Owner { - Txt(String), - Href(Href), - Unknown, -} - -/// 14.18. prop XML Element -/// -/// Name: prop -/// -/// Purpose: Contains properties related to a resource. -/// -/// Description: A generic container for properties defined on -/// resources. All elements inside a 'prop' XML element MUST define -/// properties related to the resource, although possible property -/// names are in no way limited to those property names defined in -/// this document or other standards. This element MUST NOT contain -/// text or mixed content. -/// -/// -#[derive(Debug, PartialEq)] -pub struct PropName(pub Vec>); - -#[derive(Debug, PartialEq)] -pub struct PropValue(pub Vec>); - -/// 14.19. propertyupdate XML Element -/// -/// Name: propertyupdate -/// -/// Purpose: Contains a request to alter the properties on a resource. -/// -/// Description: This XML element is a container for the information -/// required to modify the properties on the resource. -/// -/// -#[derive(Debug, PartialEq)] -pub struct PropertyUpdate(pub Vec>); - -#[derive(Debug, PartialEq)] -pub enum PropertyUpdateItem { - Remove(Remove), - Set(Set), -} - -/// 14.2 allprop XML Element -/// -/// Name: allprop -/// -/// Purpose: Specifies that all names and values of dead properties and -/// the live properties defined by this document existing on the -/// resource are to be returned. -/// -/// -/// -/// --- -/// -/// 14.21. propname XML Element -/// -/// Name: propname -/// -/// Purpose: Specifies that only a list of property names on the -/// resource is to be returned. -/// -/// -/// -/// --- -/// -/// 14.20. propfind XML Element -/// -/// Name: propfind -/// -/// Purpose: Specifies the properties to be returned from a PROPFIND -/// method. Four special elements are specified for use with -/// 'propfind': 'prop', 'allprop', 'include', and 'propname'. If -/// 'prop' is used inside 'propfind', it MUST NOT contain property -/// values. -/// -/// -#[derive(Debug, PartialEq)] -pub enum PropFind { - PropName, - AllProp(Option>), - Prop(PropName), -} - -/// 14.22 propstat XML Element -/// -/// Name: propstat -/// -/// Purpose: Groups together a prop and status element that is -/// associated with a particular 'href' element. -/// -/// Description: The propstat XML element MUST contain one prop XML -/// element and one status XML element. The contents of the prop XML -/// element MUST only list the names of properties to which the result -/// in the status element applies. The optional precondition/ -/// postcondition element and 'responsedescription' text also apply to -/// the properties named in 'prop'. -/// -/// -#[derive(Debug, PartialEq)] -pub struct PropStat> { - pub prop: N, - pub status: Status, - pub error: Option>, - pub responsedescription: Option, -} - -/// 14.23. remove XML Element -/// -/// Name: remove -/// -/// Purpose: Lists the properties to be removed from a resource. -/// -/// Description: Remove instructs that the properties specified in prop -/// should be removed. Specifying the removal of a property that does -/// not exist is not an error. All the XML elements in a 'prop' XML -/// element inside of a 'remove' XML element MUST be empty, as only -/// the names of properties to be removed are required. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Remove(pub PropName); - -/// 14.24. response XML Element -/// -/// Name: response -/// -/// Purpose: Holds a single response describing the effect of a method -/// on resource and/or its properties. -/// -/// Description: The 'href' element contains an HTTP URL pointing to a -/// WebDAV resource when used in the 'response' container. A -/// particular 'href' value MUST NOT appear more than once as the -/// child of a 'response' XML element under a 'multistatus' XML -/// element. This requirement is necessary in order to keep -/// processing costs for a response to linear time. Essentially, this -/// prevents having to search in order to group together all the -/// responses by 'href'. There are, however, no requirements -/// regarding ordering based on 'href' values. The optional -/// precondition/postcondition element and 'responsedescription' text -/// can provide additional information about this resource relative to -/// the request or result. -/// -/// -/// -/// --- rewritten as --- -/// -#[derive(Debug, PartialEq)] -pub enum StatusOrPropstat> { - // One status, multiple hrefs... - Status(Vec, Status), - // A single href, multiple properties... - PropStat(Href, Vec>), -} - -#[derive(Debug, PartialEq)] -pub struct Response> { - pub status_or_propstat: StatusOrPropstat, - pub error: Option>, - pub responsedescription: Option, - pub location: Option, -} - -/// 14.25. responsedescription XML Element -/// -/// Name: responsedescription -/// -/// Purpose: Contains information about a status response within a -/// Multi-Status. -/// -/// Description: Provides information suitable to be presented to a -/// user. -/// -/// -#[derive(Debug, PartialEq)] -pub struct ResponseDescription(pub String); - -/// 14.26. set XML Element -/// -/// Name: set -/// -/// Purpose: Lists the property values to be set for a resource. -/// -/// Description: The 'set' element MUST contain only a 'prop' element. -/// The elements contained by the 'prop' element inside the 'set' -/// element MUST specify the name and value of properties that are set -/// on the resource identified by Request-URI. If a property already -/// exists, then its value is replaced. Language tagging information -/// appearing in the scope of the 'prop' element (in the "xml:lang" -/// attribute, if present) MUST be persistently stored along with the -/// property, and MUST be subsequently retrievable using PROPFIND. -/// -/// -#[derive(Debug, PartialEq)] -pub struct Set(pub PropValue); - -/// 14.27. shared XML Element -/// -/// Name: shared -/// -/// Purpose: Specifies a shared lock. -/// -/// -/// -#[derive(Debug, PartialEq)] -pub struct Shared {} - - -/// 14.28. status XML Element -/// -/// Name: status -/// -/// Purpose: Holds a single HTTP status-line. -/// -/// Value: status-line (defined in Section 6.1 of [RFC2616]) -/// -/// -//@FIXME: Better typing is possible with an enum for example -#[derive(Debug, PartialEq)] -pub struct Status(pub http::status::StatusCode); - -/// 14.29. timeout XML Element -/// -/// Name: timeout -/// -/// Purpose: The number of seconds remaining before a lock expires. -/// -/// Value: TimeType (defined in Section 10.7) -/// -/// -/// -/// -/// TimeOut = "Timeout" ":" 1#TimeType -/// TimeType = ("Second-" DAVTimeOutVal | "Infinite") -/// ; No LWS allowed within TimeType -/// DAVTimeOutVal = 1*DIGIT -/// -/// Clients MAY include Timeout request headers in their LOCK requests. -/// However, the server is not required to honor or even consider these -/// requests. Clients MUST NOT submit a Timeout request header with any -/// method other than a LOCK method. -/// -/// The "Second" TimeType specifies the number of seconds that will -/// elapse between granting of the lock at the server, and the automatic -/// removal of the lock. The timeout value for TimeType "Second" MUST -/// NOT be greater than 2^32-1. -#[derive(Debug, PartialEq)] -pub enum Timeout { - Seconds(u32), - Infinite, -} - - -/// 15. DAV Properties -/// -/// For DAV properties, the name of the property is also the same as the -/// name of the XML element that contains its value. In the section -/// below, the final line of each section gives the element type -/// declaration using the format defined in [REC-XML]. The "Value" -/// field, where present, specifies further restrictions on the allowable -/// contents of the XML element using BNF (i.e., to further restrict the -/// values of a PCDATA element). -/// -/// A protected property is one that cannot be changed with a PROPPATCH -/// request. There may be other requests that would result in a change -/// to a protected property (as when a LOCK request affects the value of -/// DAV:lockdiscovery). Note that a given property could be protected on -/// one type of resource, but not protected on another type of resource. -/// -/// A computed property is one with a value defined in terms of a -/// computation (based on the content and other properties of that -/// resource, or even of some other resource). A computed property is -/// always a protected property. -/// -/// COPY and MOVE behavior refers to local COPY and MOVE operations. -/// -/// For properties defined based on HTTP GET response headers (DAV:get*), -/// the header value could include LWS as defined in [RFC2616], Section -/// 4.2. Server implementors SHOULD strip LWS from these values before -/// using as WebDAV property values. -#[derive(Debug, PartialEq)] -pub enum PropertyRequest { - CreationDate, - DisplayName, - GetContentLanguage, - GetContentLength, - GetContentType, - GetEtag, - GetLastModified, - LockDiscovery, - ResourceType, - SupportedLock, - Extension(E::PropertyRequest), -} - -#[derive(Debug, PartialEq)] -pub enum Property { - /// 15.1. creationdate Property - /// - /// Name: creationdate - /// - /// Purpose: Records the time and date the resource was created. - /// - /// Value: date-time (defined in [RFC3339], see the ABNF in Section - /// 5.6.) - /// - /// Protected: MAY be protected. Some servers allow DAV:creationdate - /// to be changed to reflect the time the document was created if that - /// is more meaningful to the user (rather than the time it was - /// uploaded). Thus, clients SHOULD NOT use this property in - /// synchronization logic (use DAV:getetag instead). - /// - /// COPY/MOVE behavior: This property value SHOULD be kept during a - /// MOVE operation, but is normally re-initialized when a resource is - /// created with a COPY. It should not be set in a COPY. - /// - /// Description: The DAV:creationdate property SHOULD be defined on all - /// DAV compliant resources. If present, it contains a timestamp of - /// the moment when the resource was created. Servers that are - /// incapable of persistently recording the creation date SHOULD - /// instead leave it undefined (i.e. report "Not Found"). - /// - /// - CreationDate(DateTime), - - /// 15.2. displayname Property - /// - /// Name: displayname - /// - /// Purpose: Provides a name for the resource that is suitable for - /// presentation to a user. - /// - /// Value: Any text. - /// - /// Protected: SHOULD NOT be protected. Note that servers implementing - /// [RFC2518] might have made this a protected property as this is a - /// new requirement. - /// - /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY - /// and MOVE operations. - /// - /// Description: Contains a description of the resource that is - /// suitable for presentation to a user. This property is defined on - /// the resource, and hence SHOULD have the same value independent of - /// the Request-URI used to retrieve it (thus, computing this property - /// based on the Request-URI is deprecated). While generic clients - /// might display the property value to end users, client UI designers - /// must understand that the method for identifying resources is still - /// the URL. Changes to DAV:displayname do not issue moves or copies - /// to the server, but simply change a piece of meta-data on the - /// individual resource. Two resources can have the same DAV: - /// displayname value even within the same collection. - /// - /// - DisplayName(String), - - - /// 15.3. getcontentlanguage Property - /// - /// Name: getcontentlanguage - /// - /// Purpose: Contains the Content-Language header value (from Section - /// 14.12 of [RFC2616]) as it would be returned by a GET without - /// accept headers. - /// - /// Value: language-tag (language-tag is defined in Section 3.10 of - /// [RFC2616]) - /// - /// Protected: SHOULD NOT be protected, so that clients can reset the - /// language. Note that servers implementing [RFC2518] might have - /// made this a protected property as this is a new requirement. - /// - /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY - /// and MOVE operations. - /// - /// Description: The DAV:getcontentlanguage property MUST be defined on - /// any DAV-compliant resource that returns the Content-Language - /// header on a GET. - /// - /// - GetContentLanguage(String), - - /// 15.4. getcontentlength Property - /// - /// Name: getcontentlength - /// - /// Purpose: Contains the Content-Length header returned by a GET - /// without accept headers. - /// - /// Value: See Section 14.13 of [RFC2616]. - /// - /// Protected: This property is computed, therefore protected. - /// - /// Description: The DAV:getcontentlength property MUST be defined on - /// any DAV-compliant resource that returns the Content-Length header - /// in response to a GET. - /// - /// COPY/MOVE behavior: This property value is dependent on the size of - /// the destination resource, not the value of the property on the - /// source resource. - /// - /// - GetContentLength(u64), - - /// 15.5. getcontenttype Property - /// - /// Name: getcontenttype - /// - /// Purpose: Contains the Content-Type header value (from Section 14.17 - /// of [RFC2616]) as it would be returned by a GET without accept - /// headers. - /// - /// Value: media-type (defined in Section 3.7 of [RFC2616]) - /// - /// Protected: Potentially protected if the server prefers to assign - /// content types on its own (see also discussion in Section 9.7.1). - /// - /// COPY/MOVE behavior: This property value SHOULD be preserved in COPY - /// and MOVE operations. - /// - /// Description: This property MUST be defined on any DAV-compliant - /// resource that returns the Content-Type header in response to a - /// GET. - /// - /// - GetContentType(String), - - /// 15.6. getetag Property - /// - /// Name: getetag - /// - /// Purpose: Contains the ETag header value (from Section 14.19 of - /// [RFC2616]) as it would be returned by a GET without accept - /// headers. - /// - /// Value: entity-tag (defined in Section 3.11 of [RFC2616]) - /// - /// Protected: MUST be protected because this value is created and - /// controlled by the server. - /// - /// COPY/MOVE behavior: This property value is dependent on the final - /// state of the destination resource, not the value of the property - /// on the source resource. Also note the considerations in - /// Section 8.8. - /// - /// Description: The getetag property MUST be defined on any DAV- - /// compliant resource that returns the Etag header. Refer to Section - /// 3.11 of RFC 2616 for a complete definition of the semantics of an - /// ETag, and to Section 8.6 for a discussion of ETags in WebDAV. - /// - /// - GetEtag(String), - - /// 15.7. getlastmodified Property - /// - /// Name: getlastmodified - /// - /// Purpose: Contains the Last-Modified header value (from Section - /// 14.29 of [RFC2616]) as it would be returned by a GET method - /// without accept headers. - /// - /// Value: rfc1123-date (defined in Section 3.3.1 of [RFC2616]) - /// - /// Protected: SHOULD be protected because some clients may rely on the - /// value for appropriate caching behavior, or on the value of the - /// Last-Modified header to which this property is linked. - /// - /// COPY/MOVE behavior: This property value is dependent on the last - /// modified date of the destination resource, not the value of the - /// property on the source resource. Note that some server - /// implementations use the file system date modified value for the - /// DAV:getlastmodified value, and this can be preserved in a MOVE - /// even when the HTTP Last-Modified value SHOULD change. Note that - /// since [RFC2616] requires clients to use ETags where provided, a - /// server implementing ETags can count on clients using a much better - /// mechanism than modification dates for offline synchronization or - /// cache control. Also note the considerations in Section 8.8. - /// - /// Description: The last-modified date on a resource SHOULD only - /// reflect changes in the body (the GET responses) of the resource. - /// A change in a property only SHOULD NOT cause the last-modified - /// date to change, because clients MAY rely on the last-modified date - /// to know when to overwrite the existing body. The DAV: - /// getlastmodified property MUST be defined on any DAV-compliant - /// resource that returns the Last-Modified header in response to a - /// GET. - /// - /// - GetLastModified(DateTime), - - /// 15.8. lockdiscovery Property - /// - /// Name: lockdiscovery - /// - /// Purpose: Describes the active locks on a resource - /// - /// Protected: MUST be protected. Clients change the list of locks - /// through LOCK and UNLOCK, not through PROPPATCH. - /// - /// COPY/MOVE behavior: The value of this property depends on the lock - /// state of the destination, not on the locks of the source resource. - /// Recall that locks are not moved in a MOVE operation. - /// - /// Description: Returns a listing of who has a lock, what type of lock - /// he has, the timeout type and the time remaining on the timeout, - /// and the associated lock token. Owner information MAY be omitted - /// if it is considered sensitive. If there are no locks, but the - /// server supports locks, the property will be present but contain - /// zero 'activelock' elements. If there are one or more locks, an - /// 'activelock' element appears for each lock on the resource. This - /// property is NOT lockable with respect to write locks (Section 7). - /// - /// - LockDiscovery(Vec), - - - /// 15.9. resourcetype Property - /// - /// Name: resourcetype - /// - /// Purpose: Specifies the nature of the resource. - /// - /// Protected: SHOULD be protected. Resource type is generally decided - /// through the operation creating the resource (MKCOL vs PUT), not by - /// PROPPATCH. - /// - /// COPY/MOVE behavior: Generally a COPY/MOVE of a resource results in - /// the same type of resource at the destination. - /// - /// Description: MUST be defined on all DAV-compliant resources. Each - /// child element identifies a specific type the resource belongs to, - /// such as 'collection', which is the only resource type defined by - /// this specification (see Section 14.3). If the element contains - /// the 'collection' child element plus additional unrecognized - /// elements, it should generally be treated as a collection. If the - /// element contains no recognized child elements, it should be - /// treated as a non-collection resource. The default value is empty. - /// This element MUST NOT contain text or mixed content. Any custom - /// child element is considered to be an identifier for a resource - /// type. - /// - /// Example: (fictional example to show extensibility) - /// - /// - /// - /// - /// - ResourceType(Vec>), - - /// 15.10. supportedlock Property - /// - /// Name: supportedlock - /// - /// Purpose: To provide a listing of the lock capabilities supported by - /// the resource. - /// - /// Protected: MUST be protected. Servers, not clients, determine what - /// lock mechanisms are supported. - /// COPY/MOVE behavior: This property value is dependent on the kind of - /// locks supported at the destination, not on the value of the - /// property at the source resource. Servers attempting to COPY to a - /// destination should not attempt to set this property at the - /// destination. - /// - /// Description: Returns a listing of the combinations of scope and - /// access types that may be specified in a lock request on the - /// resource. Note that the actual contents are themselves controlled - /// by access controls, so a server is not required to provide - /// information the client is not authorized to see. This property is - /// NOT lockable with respect to write locks (Section 7). - /// - /// - SupportedLock(Vec), - - /// Any extension - Extension(E::Property), -} - -#[derive(Debug, PartialEq)] -pub enum ResourceType { - Collection, - Extension(E::ResourceType), -} diff --git a/src/dav/versioningtypes.rs b/src/dav/versioningtypes.rs deleted file mode 100644 index 6c1c204..0000000 --- a/src/dav/versioningtypes.rs +++ /dev/null @@ -1,3 +0,0 @@ -//@FIXME required for a full DAV implementation -// See section 7.1 of the CalDAV RFC -// It seems it's mainly due to the fact that the REPORT method is re-used. diff --git a/src/dav/xml.rs b/src/dav/xml.rs deleted file mode 100644 index 02263fd..0000000 --- a/src/dav/xml.rs +++ /dev/null @@ -1,273 +0,0 @@ -use tokio::io::{AsyncWrite, AsyncBufRead}; -use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; -use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; -use quick_xml::reader::NsReader; - -use super::error::ParsingError; - -// Constants -pub const DAV_URN: &[u8] = b"DAV:"; -pub const CAL_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; -pub const CARD_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; - -// Async traits -pub trait IWrite = AsyncWrite + Unpin; -pub trait IRead = AsyncBufRead + Unpin; - -// Serialization/Deserialization traits -pub trait QWrite { - async fn qwrite(&self, xml: &mut Writer) -> Result<(), quick_xml::Error>; -} -pub trait QRead { - async fn qread(xml: &mut Reader) -> Result; -} - -// The representation of an XML node in Rust -pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq; - -// --------------- - -/// Transform a Rust object into an XML stream of characters -pub struct Writer { - pub q: quick_xml::writer::Writer, - pub ns_to_apply: Vec<(String, String)>, -} -impl Writer { - pub fn create_dav_element(&mut self, name: &str) -> BytesStart<'static> { - self.create_ns_element("D", name) - } - pub fn create_cal_element(&mut self, name: &str) -> BytesStart<'static> { - self.create_ns_element("C", name) - } - - fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> { - let mut start = BytesStart::new(format!("{}:{}", ns, name)); - if !self.ns_to_apply.is_empty() { - start.extend_attributes(self.ns_to_apply.iter().map(|(k, n)| (k.as_str(), n.as_str()))); - self.ns_to_apply.clear() - } - start - } -} - -/// Transform an XML stream of characters into a Rust object -pub struct Reader { - pub rdr: NsReader, - cur: Event<'static>, - parents: Vec>, - buf: Vec, -} -impl Reader { - pub async fn new(mut rdr: NsReader) -> Result { - let mut buf: Vec = vec![]; - let cur = rdr.read_event_into_async(&mut buf).await?.into_owned(); - let parents = vec![]; - buf.clear(); - Ok(Self { cur, parents, rdr, buf }) - } - - /// read one more tag - /// do not expose it publicly - async fn next(&mut self) -> Result, ParsingError> { - let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); - self.buf.clear(); - let old_evt = std::mem::replace(&mut self.cur, evt); - Ok(old_evt) - } - - /// skip a node at current level - /// I would like to make this one private but not ready - pub async fn skip(&mut self) -> Result, ParsingError> { - //println!("skipping inside node {:?}", self.parents.last()); - match &self.cur { - Event::Start(b) => { - let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; - self.next().await - }, - Event::End(_) => Err(ParsingError::WrongToken), - Event::Eof => Err(ParsingError::Eof), - _ => self.next().await, - } - } - - /// check if this is the desired tag - fn is_tag(&self, ns: &[u8], key: &str) -> bool { - let qname = match self.peek() { - Event::Start(bs) | Event::Empty(bs) => bs.name(), - Event::End(be) => be.name(), - _ => return false, - }; - - let (extr_ns, local) = self.rdr.resolve_element(qname); - - if local.into_inner() != key.as_bytes() { - return false - } - - match extr_ns { - ResolveResult::Bound(v) => v.into_inner() == ns, - _ => false, - } - } - - fn parent_has_child(&self) -> bool { - matches!(self.parents.last(), Some(Event::Start(_)) | None) - } - - fn ensure_parent_has_child(&self) -> Result<(), ParsingError> { - match self.parent_has_child() { - true => Ok(()), - false => Err(ParsingError::Recoverable), - } - } - - pub fn peek(&self) -> &Event<'static> { - &self.cur - } - - // NEW API - pub async fn tag_string(&mut self) -> Result { - self.ensure_parent_has_child()?; - - let mut acc = String::new(); - loop { - match self.peek() { - Event::CData(unescaped) => { - acc.push_str(std::str::from_utf8(unescaped.as_ref())?); - self.next().await? - }, - Event::Text(escaped) => { - acc.push_str(escaped.unescape()?.as_ref()); - self.next().await? - } - Event::End(_) | Event::Start(_) | Event::Empty(_) => return Ok(acc), - _ => self.next().await?, - }; - } - } - - pub async fn maybe_read>(&mut self, t: &mut Option, dirty: &mut bool) -> Result<(), ParsingError> { - if !self.parent_has_child() { - return Ok(()) - } - - match N::qread(self).await { - Ok(v) => { - *t = Some(v); - *dirty = true; - Ok(()) - }, - Err(ParsingError::Recoverable) => Ok(()), - Err(e) => Err(e), - } - } - - pub async fn maybe_push>(&mut self, t: &mut Vec, dirty: &mut bool) -> Result<(), ParsingError> { - if !self.parent_has_child() { - return Ok(()) - } - - match N::qread(self).await { - Ok(v) => { - t.push(v); - *dirty = true; - Ok(()) - }, - Err(ParsingError::Recoverable) => Ok(()), - Err(e) => Err(e), - } - } - - pub async fn find>(&mut self) -> Result { - self.ensure_parent_has_child()?; - - loop { - // Try parse - match N::qread(self).await { - Err(ParsingError::Recoverable) => (), - otherwise => return otherwise, - } - - // If recovered, skip the element - self.skip().await?; - } - } - - pub async fn maybe_find>(&mut self) -> Result, ParsingError> { - self.ensure_parent_has_child()?; - - loop { - // Try parse - match N::qread(self).await { - Err(ParsingError::Recoverable) => (), - otherwise => return otherwise.map(Some), - } - - match self.peek() { - Event::End(_) => return Ok(None), - _ => self.skip().await?, - }; - } - } - - pub async fn collect>(&mut self) -> Result, ParsingError> { - self.ensure_parent_has_child()?; - let mut acc = Vec::new(); - - loop { - match N::qread(self).await { - Err(ParsingError::Recoverable) => match self.peek() { - Event::End(_) => return Ok(acc), - _ => { - self.skip().await?; - }, - }, - Ok(v) => acc.push(v), - Err(e) => return Err(e), - } - } - } - - pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - let evt = match self.peek() { - Event::Empty(_) if self.is_tag(ns, key) => self.cur.clone(), - Event::Start(_) if self.is_tag(ns, key) => self.next().await?, - _ => return Err(ParsingError::Recoverable), - }; - - //println!("open tag {:?}", evt); - self.parents.push(evt.clone()); - Ok(evt) - } - - pub async fn maybe_open(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { - match self.open(ns, key).await { - Ok(v) => Ok(Some(v)), - Err(ParsingError::Recoverable) => Ok(None), - Err(e) => Err(e), - } - } - - // find stop tag - pub async fn close(&mut self) -> Result, ParsingError> { - //println!("close tag {:?}", self.parents.last()); - - // Handle the empty case - if !self.parent_has_child() { - self.parents.pop(); - return self.next().await - } - - // Handle the start/end case - loop { - match self.peek() { - Event::End(_) => { - self.parents.pop(); - return self.next().await - }, - _ => self.skip().await?, - }; - } - } -} - diff --git a/src/imap/attributes.rs b/src/imap/attributes.rs deleted file mode 100644 index 89446a8..0000000 --- a/src/imap/attributes.rs +++ /dev/null @@ -1,77 +0,0 @@ -use imap_codec::imap_types::command::FetchModifier; -use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName, Section}; - -/// Internal decisions based on fetched attributes -/// passed by the client - -pub struct AttributesProxy { - pub attrs: Vec>, -} -impl AttributesProxy { - pub fn new( - attrs: &MacroOrMessageDataItemNames<'static>, - modifiers: &[FetchModifier], - is_uid_fetch: bool, - ) -> Self { - // Expand macros - let mut fetch_attrs = match attrs { - MacroOrMessageDataItemNames::Macro(m) => { - use imap_codec::imap_types::fetch::Macro; - use MessageDataItemName::*; - match m { - Macro::All => vec![Flags, InternalDate, Rfc822Size, Envelope], - Macro::Fast => vec![Flags, InternalDate, Rfc822Size], - Macro::Full => vec![Flags, InternalDate, Rfc822Size, Envelope, Body], - _ => { - tracing::error!("unimplemented macro"); - vec![] - } - } - } - MacroOrMessageDataItemNames::MessageDataItemNames(a) => a.clone(), - }; - - // Handle uids - if is_uid_fetch && !fetch_attrs.contains(&MessageDataItemName::Uid) { - fetch_attrs.push(MessageDataItemName::Uid); - } - - // Handle inferred MODSEQ tag - let is_changed_since = modifiers - .iter() - .any(|m| matches!(m, FetchModifier::ChangedSince(..))); - if is_changed_since && !fetch_attrs.contains(&MessageDataItemName::ModSeq) { - fetch_attrs.push(MessageDataItemName::ModSeq); - } - - Self { attrs: fetch_attrs } - } - - pub fn is_enabling_condstore(&self) -> bool { - self.attrs - .iter() - .any(|x| matches!(x, MessageDataItemName::ModSeq)) - } - - pub fn need_body(&self) -> bool { - self.attrs.iter().any(|x| match x { - MessageDataItemName::Body - | MessageDataItemName::Rfc822 - | MessageDataItemName::Rfc822Text - | MessageDataItemName::BodyStructure => true, - - MessageDataItemName::BodyExt { - section: Some(section), - partial: _, - peek: _, - } => match section { - Section::Header(None) - | Section::HeaderFields(None, _) - | Section::HeaderFieldsNot(None, _) => false, - _ => true, - }, - MessageDataItemName::BodyExt { .. } => true, - _ => false, - }) - } -} diff --git a/src/imap/capability.rs b/src/imap/capability.rs deleted file mode 100644 index c76b51c..0000000 --- a/src/imap/capability.rs +++ /dev/null @@ -1,159 +0,0 @@ -use imap_codec::imap_types::command::{FetchModifier, SelectExamineModifier, StoreModifier}; -use imap_codec::imap_types::core::Vec1; -use imap_codec::imap_types::extensions::enable::{CapabilityEnable, Utf8Kind}; -use imap_codec::imap_types::response::Capability; -use std::collections::HashSet; - -use crate::imap::attributes::AttributesProxy; - -fn capability_unselect() -> Capability<'static> { - Capability::try_from("UNSELECT").unwrap() -} - -fn capability_condstore() -> Capability<'static> { - Capability::try_from("CONDSTORE").unwrap() -} - -fn capability_uidplus() -> Capability<'static> { - Capability::try_from("UIDPLUS").unwrap() -} - -fn capability_liststatus() -> Capability<'static> { - Capability::try_from("LIST-STATUS").unwrap() -} - -/* -fn capability_qresync() -> Capability<'static> { - Capability::try_from("QRESYNC").unwrap() -} -*/ - -#[derive(Debug, Clone)] -pub struct ServerCapability(HashSet>); - -impl Default for ServerCapability { - fn default() -> Self { - Self(HashSet::from([ - Capability::Imap4Rev1, - Capability::Enable, - Capability::Move, - Capability::LiteralPlus, - Capability::Idle, - capability_unselect(), - capability_condstore(), - capability_uidplus(), - capability_liststatus(), - //capability_qresync(), - ])) - } -} - -impl ServerCapability { - pub fn to_vec(&self) -> Vec1> { - self.0 - .iter() - .map(|v| v.clone()) - .collect::>() - .try_into() - .unwrap() - } - - #[allow(dead_code)] - pub fn support(&self, cap: &Capability<'static>) -> bool { - self.0.contains(cap) - } -} - -#[derive(Clone)] -pub enum ClientStatus { - NotSupportedByServer, - Disabled, - Enabled, -} -impl ClientStatus { - pub fn is_enabled(&self) -> bool { - matches!(self, Self::Enabled) - } - - pub fn enable(&self) -> Self { - match self { - Self::Disabled => Self::Enabled, - other => other.clone(), - } - } -} - -pub struct ClientCapability { - pub condstore: ClientStatus, - pub utf8kind: Option, -} - -impl ClientCapability { - pub fn new(sc: &ServerCapability) -> Self { - Self { - condstore: match sc.0.contains(&capability_condstore()) { - true => ClientStatus::Disabled, - _ => ClientStatus::NotSupportedByServer, - }, - utf8kind: None, - } - } - - pub fn enable_condstore(&mut self) { - self.condstore = self.condstore.enable(); - } - - pub fn attributes_enable(&mut self, ap: &AttributesProxy) { - if ap.is_enabling_condstore() { - self.enable_condstore() - } - } - - pub fn fetch_modifiers_enable(&mut self, mods: &[FetchModifier]) { - if mods - .iter() - .any(|x| matches!(x, FetchModifier::ChangedSince(..))) - { - self.enable_condstore() - } - } - - pub fn store_modifiers_enable(&mut self, mods: &[StoreModifier]) { - if mods - .iter() - .any(|x| matches!(x, StoreModifier::UnchangedSince(..))) - { - self.enable_condstore() - } - } - - pub fn select_enable(&mut self, mods: &[SelectExamineModifier]) { - for m in mods.iter() { - match m { - SelectExamineModifier::Condstore => self.enable_condstore(), - } - } - } - - pub fn try_enable( - &mut self, - caps: &[CapabilityEnable<'static>], - ) -> Vec> { - let mut enabled = vec![]; - for cap in caps { - match cap { - CapabilityEnable::CondStore if matches!(self.condstore, ClientStatus::Disabled) => { - self.condstore = ClientStatus::Enabled; - enabled.push(cap.clone()); - } - CapabilityEnable::Utf8(kind) if Some(kind) != self.utf8kind.as_ref() => { - self.utf8kind = Some(kind.clone()); - enabled.push(cap.clone()); - } - _ => (), - } - } - - enabled - } -} diff --git a/src/imap/command/anonymous.rs b/src/imap/command/anonymous.rs deleted file mode 100644 index 811d1e4..0000000 --- a/src/imap/command/anonymous.rs +++ /dev/null @@ -1,83 +0,0 @@ -use anyhow::Result; -use imap_codec::imap_types::command::{Command, CommandBody}; -use imap_codec::imap_types::core::AString; -use imap_codec::imap_types::response::Code; -use imap_codec::imap_types::secret::Secret; - -use crate::imap::capability::ServerCapability; -use crate::imap::command::anystate; -use crate::imap::flow; -use crate::imap::response::Response; -use crate::login::ArcLoginProvider; -use crate::user::User; - -//--- dispatching - -pub struct AnonymousContext<'a> { - pub req: &'a Command<'static>, - pub server_capabilities: &'a ServerCapability, - pub login_provider: &'a ArcLoginProvider, -} - -pub async fn dispatch(ctx: AnonymousContext<'_>) -> Result<(Response<'static>, flow::Transition)> { - match &ctx.req.body { - // Any State - CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), - CommandBody::Capability => { - anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) - } - CommandBody::Logout => anystate::logout(), - - // Specific to anonymous context (3 commands) - CommandBody::Login { username, password } => ctx.login(username, password).await, - CommandBody::Authenticate { .. } => { - anystate::not_implemented(ctx.req.tag.clone(), "authenticate") - } - //StartTLS is not implemented for now, we will probably go full TLS. - - // Collect other commands - _ => anystate::wrong_state(ctx.req.tag.clone()), - } -} - -//--- Command controllers, private - -impl<'a> AnonymousContext<'a> { - async fn login( - self, - username: &AString<'a>, - password: &Secret>, - ) -> Result<(Response<'static>, flow::Transition)> { - let (u, p) = ( - std::str::from_utf8(username.as_ref())?, - std::str::from_utf8(password.declassify().as_ref())?, - ); - tracing::info!(user = %u, "command.login"); - - let creds = match self.login_provider.login(&u, &p).await { - Err(e) => { - tracing::debug!(error=%e, "authentication failed"); - return Ok(( - Response::build() - .to_req(self.req) - .message("Authentication failed") - .no()?, - flow::Transition::None, - )); - } - Ok(c) => c, - }; - - let user = User::new(u.to_string(), creds).await?; - - tracing::info!(username=%u, "connected"); - Ok(( - Response::build() - .to_req(self.req) - .code(Code::Capability(self.server_capabilities.to_vec())) - .message("Completed") - .ok()?, - flow::Transition::Authenticate(user), - )) - } -} diff --git a/src/imap/command/anystate.rs b/src/imap/command/anystate.rs deleted file mode 100644 index 718ba3f..0000000 --- a/src/imap/command/anystate.rs +++ /dev/null @@ -1,54 +0,0 @@ -use anyhow::Result; -use imap_codec::imap_types::core::Tag; -use imap_codec::imap_types::response::Data; - -use crate::imap::capability::ServerCapability; -use crate::imap::flow; -use crate::imap::response::Response; - -pub(crate) fn capability( - tag: Tag<'static>, - cap: &ServerCapability, -) -> Result<(Response<'static>, flow::Transition)> { - let res = Response::build() - .tag(tag) - .message("Server capabilities") - .data(Data::Capability(cap.to_vec())) - .ok()?; - - Ok((res, flow::Transition::None)) -} - -pub(crate) fn noop_nothing(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build().tag(tag).message("Noop completed.").ok()?, - flow::Transition::None, - )) -} - -pub(crate) fn logout() -> Result<(Response<'static>, flow::Transition)> { - Ok((Response::bye()?, flow::Transition::Logout)) -} - -pub(crate) fn not_implemented<'a>( - tag: Tag<'a>, - what: &str, -) -> Result<(Response<'a>, flow::Transition)> { - Ok(( - Response::build() - .tag(tag) - .message(format!("Command not implemented {}", what)) - .bad()?, - flow::Transition::None, - )) -} - -pub(crate) fn wrong_state(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build() - .tag(tag) - .message("Command not authorized in this state") - .bad()?, - flow::Transition::None, - )) -} diff --git a/src/imap/command/authenticated.rs b/src/imap/command/authenticated.rs deleted file mode 100644 index 3d332ec..0000000 --- a/src/imap/command/authenticated.rs +++ /dev/null @@ -1,683 +0,0 @@ -use std::collections::BTreeMap; -use std::sync::Arc; -use thiserror::Error; - -use anyhow::{anyhow, bail, Result}; -use imap_codec::imap_types::command::{ - Command, CommandBody, ListReturnItem, SelectExamineModifier, -}; -use imap_codec::imap_types::core::{Atom, Literal, QuotedChar, Vec1}; -use imap_codec::imap_types::datetime::DateTime; -use imap_codec::imap_types::extensions::enable::CapabilityEnable; -use imap_codec::imap_types::flag::{Flag, FlagNameAttribute}; -use imap_codec::imap_types::mailbox::{ListMailbox, Mailbox as MailboxCodec}; -use imap_codec::imap_types::response::{Code, CodeOther, Data}; -use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName}; - -use crate::imap::capability::{ClientCapability, ServerCapability}; -use crate::imap::command::{anystate, MailboxName}; -use crate::imap::flow; -use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; -use crate::imap::response::Response; -use crate::imap::Body; - -use crate::mail::uidindex::*; -use crate::user::User; -use crate::mail::IMF; -use crate::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; - -pub struct AuthenticatedContext<'a> { - pub req: &'a Command<'static>, - pub server_capabilities: &'a ServerCapability, - pub client_capabilities: &'a mut ClientCapability, - pub user: &'a Arc, -} - -pub async fn dispatch<'a>( - mut ctx: AuthenticatedContext<'a>, -) -> Result<(Response<'static>, flow::Transition)> { - match &ctx.req.body { - // Any state - CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), - CommandBody::Capability => { - anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) - } - CommandBody::Logout => anystate::logout(), - - // Specific to this state (11 commands) - CommandBody::Create { mailbox } => ctx.create(mailbox).await, - CommandBody::Delete { mailbox } => ctx.delete(mailbox).await, - CommandBody::Rename { from, to } => ctx.rename(from, to).await, - CommandBody::Lsub { - reference, - mailbox_wildcard, - } => ctx.list(reference, mailbox_wildcard, &[], true).await, - CommandBody::List { - reference, - mailbox_wildcard, - r#return, - } => ctx.list(reference, mailbox_wildcard, r#return, false).await, - CommandBody::Status { - mailbox, - item_names, - } => ctx.status(mailbox, item_names).await, - CommandBody::Subscribe { mailbox } => ctx.subscribe(mailbox).await, - CommandBody::Unsubscribe { mailbox } => ctx.unsubscribe(mailbox).await, - CommandBody::Select { mailbox, modifiers } => ctx.select(mailbox, modifiers).await, - CommandBody::Examine { mailbox, modifiers } => ctx.examine(mailbox, modifiers).await, - CommandBody::Append { - mailbox, - flags, - date, - message, - } => ctx.append(mailbox, flags, date, message).await, - - // rfc5161 ENABLE - CommandBody::Enable { capabilities } => ctx.enable(capabilities), - - // Collect other commands - _ => anystate::wrong_state(ctx.req.tag.clone()), - } -} - -// --- PRIVATE --- -impl<'a> AuthenticatedContext<'a> { - async fn create( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name = match mailbox { - MailboxCodec::Inbox => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Cannot create INBOX") - .bad()?, - flow::Transition::None, - )); - } - MailboxCodec::Other(aname) => std::str::from_utf8(aname.as_ref())?, - }; - - match self.user.create_mailbox(&name).await { - Ok(()) => Ok(( - Response::build() - .to_req(self.req) - .message("CREATE complete") - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(&e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - async fn delete( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - match self.user.delete_mailbox(&name).await { - Ok(()) => Ok(( - Response::build() - .to_req(self.req) - .message("DELETE complete") - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - async fn rename( - self, - from: &MailboxCodec<'a>, - to: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(from).try_into()?; - let new_name: &str = MailboxName(to).try_into()?; - - match self.user.rename_mailbox(&name, &new_name).await { - Ok(()) => Ok(( - Response::build() - .to_req(self.req) - .message("RENAME complete") - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - async fn list( - &mut self, - reference: &MailboxCodec<'a>, - mailbox_wildcard: &ListMailbox<'a>, - must_return: &[ListReturnItem], - is_lsub: bool, - ) -> Result<(Response<'static>, flow::Transition)> { - let mbx_hier_delim: QuotedChar = QuotedChar::unvalidated(MBX_HIER_DELIM_RAW); - - let reference: &str = MailboxName(reference).try_into()?; - if !reference.is_empty() { - return Ok(( - Response::build() - .to_req(self.req) - .message("References not supported") - .bad()?, - flow::Transition::None, - )); - } - - let status_item_names = must_return.iter().find_map(|m| match m { - ListReturnItem::Status(v) => Some(v), - _ => None, - }); - - // @FIXME would probably need a rewrite to better use the imap_codec library - let wildcard = match mailbox_wildcard { - ListMailbox::Token(v) => std::str::from_utf8(v.as_ref())?, - ListMailbox::String(v) => std::str::from_utf8(v.as_ref())?, - }; - if wildcard.is_empty() { - if is_lsub { - return Ok(( - Response::build() - .to_req(self.req) - .message("LSUB complete") - .data(Data::Lsub { - items: vec![], - delimiter: Some(mbx_hier_delim), - mailbox: "".try_into().unwrap(), - }) - .ok()?, - flow::Transition::None, - )); - } else { - return Ok(( - Response::build() - .to_req(self.req) - .message("LIST complete") - .data(Data::List { - items: vec![], - delimiter: Some(mbx_hier_delim), - mailbox: "".try_into().unwrap(), - }) - .ok()?, - flow::Transition::None, - )); - } - } - - let mailboxes = self.user.list_mailboxes().await?; - let mut vmailboxes = BTreeMap::new(); - for mb in mailboxes.iter() { - for (i, _) in mb.match_indices(MBX_HIER_DELIM_RAW) { - if i > 0 { - let smb = &mb[..i]; - vmailboxes.entry(smb).or_insert(false); - } - } - vmailboxes.insert(mb, true); - } - - let mut ret = vec![]; - for (mb, is_real) in vmailboxes.iter() { - if matches_wildcard(&wildcard, mb) { - let mailbox: MailboxCodec = mb - .to_string() - .try_into() - .map_err(|_| anyhow!("invalid mailbox name"))?; - let mut items = vec![FlagNameAttribute::from(Atom::unvalidated("Subscribed"))]; - - // Decoration - if !*is_real { - items.push(FlagNameAttribute::Noselect); - } else { - match *mb { - "Drafts" => items.push(Atom::unvalidated("Drafts").into()), - "Archive" => items.push(Atom::unvalidated("Archive").into()), - "Sent" => items.push(Atom::unvalidated("Sent").into()), - "Trash" => items.push(Atom::unvalidated("Trash").into()), - _ => (), - }; - } - - // Result type - if is_lsub { - ret.push(Data::Lsub { - items, - delimiter: Some(mbx_hier_delim), - mailbox: mailbox.clone(), - }); - } else { - ret.push(Data::List { - items, - delimiter: Some(mbx_hier_delim), - mailbox: mailbox.clone(), - }); - } - - // Also collect status - if let Some(sin) = status_item_names { - let ret_attrs = match self.status_items(mb, sin).await { - Ok(a) => a, - Err(e) => { - tracing::error!(err=?e, mailbox=%mb, "Unable to fetch status for mailbox"); - continue; - } - }; - - let data = Data::Status { - mailbox, - items: ret_attrs.into(), - }; - - ret.push(data); - } - } - } - - let msg = if is_lsub { - "LSUB completed" - } else { - "LIST completed" - }; - Ok(( - Response::build() - .to_req(self.req) - .message(msg) - .many_data(ret) - .ok()?, - flow::Transition::None, - )) - } - - async fn status( - &mut self, - mailbox: &MailboxCodec<'static>, - attributes: &[StatusDataItemName], - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - let ret_attrs = match self.status_items(name, attributes).await { - Ok(v) => v, - Err(e) => match e.downcast_ref::() { - Some(CommandError::MailboxNotFound) => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Mailbox does not exist") - .no()?, - flow::Transition::None, - )) - } - _ => return Err(e.into()), - }, - }; - - let data = Data::Status { - mailbox: mailbox.clone(), - items: ret_attrs.into(), - }; - - Ok(( - Response::build() - .to_req(self.req) - .message("STATUS completed") - .data(data) - .ok()?, - flow::Transition::None, - )) - } - - async fn status_items( - &mut self, - name: &str, - attributes: &[StatusDataItemName], - ) -> Result> { - let mb_opt = self.user.open_mailbox(name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => return Err(CommandError::MailboxNotFound.into()), - }; - - let view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - - let mut ret_attrs = vec![]; - for attr in attributes.iter() { - ret_attrs.push(match attr { - StatusDataItemName::Messages => StatusDataItem::Messages(view.exists()?), - StatusDataItemName::Unseen => StatusDataItem::Unseen(view.unseen_count() as u32), - StatusDataItemName::Recent => StatusDataItem::Recent(view.recent()?), - StatusDataItemName::UidNext => StatusDataItem::UidNext(view.uidnext()), - StatusDataItemName::UidValidity => { - StatusDataItem::UidValidity(view.uidvalidity()) - } - StatusDataItemName::Deleted => { - bail!("quota not implemented, can't return deleted elements waiting for EXPUNGE"); - }, - StatusDataItemName::DeletedStorage => { - bail!("quota not implemented, can't return freed storage after EXPUNGE will be run"); - }, - StatusDataItemName::HighestModSeq => { - self.client_capabilities.enable_condstore(); - StatusDataItem::HighestModSeq(view.highestmodseq().get()) - }, - }); - } - Ok(ret_attrs) - } - - async fn subscribe( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - if self.user.has_mailbox(&name).await? { - Ok(( - Response::build() - .to_req(self.req) - .message("SUBSCRIBE complete") - .ok()?, - flow::Transition::None, - )) - } else { - Ok(( - Response::build() - .to_req(self.req) - .message(format!("Mailbox {} does not exist", name)) - .bad()?, - flow::Transition::None, - )) - } - } - - async fn unsubscribe( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - if self.user.has_mailbox(&name).await? { - Ok(( - Response::build() - .to_req(self.req) - .message(format!( - "Cannot unsubscribe from mailbox {}: not supported by Aerogramme", - name - )) - .bad()?, - flow::Transition::None, - )) - } else { - Ok(( - Response::build() - .to_req(self.req) - .message(format!("Mailbox {} does not exist", name)) - .no()?, - flow::Transition::None, - )) - } - } - - /* - * TRACE BEGIN --- - - - Example: C: A142 SELECT INBOX - S: * 172 EXISTS - S: * 1 RECENT - S: * OK [UNSEEN 12] Message 12 is first unseen - S: * OK [UIDVALIDITY 3857529045] UIDs valid - S: * OK [UIDNEXT 4392] Predicted next UID - S: * FLAGS (\Answered \Flagged \Deleted \Seen \Draft) - S: * OK [PERMANENTFLAGS (\Deleted \Seen \*)] Limited - S: A142 OK [READ-WRITE] SELECT completed - - --- a mailbox with no unseen message -> no unseen entry - NOTES: - RFC3501 (imap4rev1) says if there is no OK [UNSEEN] response, client must make no assumption, - it is therefore correct to not return it even if there are unseen messages - RFC9051 (imap4rev2) says that OK [UNSEEN] responses are deprecated after SELECT and EXAMINE - For Aerogramme, we just don't send the OK [UNSEEN], it's correct to do in both specifications. - - - 20 select "INBOX.achats" - * FLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1) - * OK [PERMANENTFLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1 \*)] Flags permitted. - * 88 EXISTS - * 0 RECENT - * OK [UIDVALIDITY 1347986788] UIDs valid - * OK [UIDNEXT 91] Predicted next UID - * OK [HIGHESTMODSEQ 72] Highest - 20 OK [READ-WRITE] Select completed (0.001 + 0.000 secs). - - * TRACE END --- - */ - async fn select( - self, - mailbox: &MailboxCodec<'a>, - modifiers: &[SelectExamineModifier], - ) -> Result<(Response<'static>, flow::Transition)> { - self.client_capabilities.select_enable(modifiers); - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Mailbox does not exist") - .no()?, - flow::Transition::None, - )) - } - }; - tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.selected"); - - let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - let data = mb.summary()?; - - Ok(( - Response::build() - .message("Select completed") - .to_req(self.req) - .code(Code::ReadWrite) - .set_body(data) - .ok()?, - flow::Transition::Select(mb, flow::MailboxPerm::ReadWrite), - )) - } - - async fn examine( - self, - mailbox: &MailboxCodec<'a>, - modifiers: &[SelectExamineModifier], - ) -> Result<(Response<'static>, flow::Transition)> { - self.client_capabilities.select_enable(modifiers); - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Mailbox does not exist") - .no()?, - flow::Transition::None, - )) - } - }; - tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.examined"); - - let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - let data = mb.summary()?; - - Ok(( - Response::build() - .to_req(self.req) - .message("Examine completed") - .code(Code::ReadOnly) - .set_body(data) - .ok()?, - flow::Transition::Select(mb, flow::MailboxPerm::ReadOnly), - )) - } - - //@FIXME we should write a specific version for the "selected" state - //that returns some unsollicited responses - async fn append( - self, - mailbox: &MailboxCodec<'a>, - flags: &[Flag<'a>], - date: &Option, - message: &Literal<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let append_tag = self.req.tag.clone(); - match self.append_internal(mailbox, flags, date, message).await { - Ok((_mb_view, uidvalidity, uid, _modseq)) => Ok(( - Response::build() - .tag(append_tag) - .message("APPEND completed") - .code(Code::Other(CodeOther::unvalidated( - format!("APPENDUID {} {}", uidvalidity, uid).into_bytes(), - ))) - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .tag(append_tag) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - fn enable( - self, - cap_enable: &Vec1>, - ) -> Result<(Response<'static>, flow::Transition)> { - let mut response_builder = Response::build().to_req(self.req); - let capabilities = self.client_capabilities.try_enable(cap_enable.as_ref()); - if capabilities.len() > 0 { - response_builder = response_builder.data(Data::Enabled { capabilities }); - } - Ok(( - response_builder.message("ENABLE completed").ok()?, - flow::Transition::None, - )) - } - - //@FIXME should be refactored and integrated to the mailbox view - pub(crate) async fn append_internal( - self, - mailbox: &MailboxCodec<'a>, - flags: &[Flag<'a>], - date: &Option, - message: &Literal<'a>, - ) -> Result<(MailboxView, ImapUidvalidity, ImapUid, ModSeq)> { - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => bail!("Mailbox does not exist"), - }; - let mut view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - - if date.is_some() { - tracing::warn!("Cannot set date when appending message"); - } - - let msg = - IMF::try_from(message.data()).map_err(|_| anyhow!("Could not parse e-mail message"))?; - let flags = flags.iter().map(|x| x.to_string()).collect::>(); - // TODO: filter allowed flags? ping @Quentin - - let (uidvalidity, uid, modseq) = - view.internal.mailbox.append(msg, None, &flags[..]).await?; - //let unsollicited = view.update(UpdateParameters::default()).await?; - - Ok((view, uidvalidity, uid, modseq)) - } -} - -fn matches_wildcard(wildcard: &str, name: &str) -> bool { - let wildcard = wildcard.chars().collect::>(); - let name = name.chars().collect::>(); - - let mut matches = vec![vec![false; wildcard.len() + 1]; name.len() + 1]; - - for i in 0..=name.len() { - for j in 0..=wildcard.len() { - matches[i][j] = (i == 0 && j == 0) - || (j > 0 - && matches[i][j - 1] - && (wildcard[j - 1] == '%' || wildcard[j - 1] == '*')) - || (i > 0 - && j > 0 - && matches[i - 1][j - 1] - && wildcard[j - 1] == name[i - 1] - && wildcard[j - 1] != '%' - && wildcard[j - 1] != '*') - || (i > 0 - && j > 0 - && matches[i - 1][j] - && (wildcard[j - 1] == '*' - || (wildcard[j - 1] == '%' && name[i - 1] != MBX_HIER_DELIM_RAW))); - } - } - - matches[name.len()][wildcard.len()] -} - -#[derive(Error, Debug)] -pub enum CommandError { - #[error("Mailbox not found")] - MailboxNotFound, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_wildcard_matches() { - assert!(matches_wildcard("INBOX", "INBOX")); - assert!(matches_wildcard("*", "INBOX")); - assert!(matches_wildcard("%", "INBOX")); - assert!(!matches_wildcard("%", "Test.Azerty")); - assert!(!matches_wildcard("INBOX.*", "INBOX")); - assert!(matches_wildcard("Sent.*", "Sent.A")); - assert!(matches_wildcard("Sent.*", "Sent.A.B")); - assert!(!matches_wildcard("Sent.%", "Sent.A.B")); - } -} diff --git a/src/imap/command/mod.rs b/src/imap/command/mod.rs deleted file mode 100644 index f201eb6..0000000 --- a/src/imap/command/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -pub mod anonymous; -pub mod anystate; -pub mod authenticated; -pub mod selected; - -use crate::mail::namespace::INBOX; -use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; - -/// Convert an IMAP mailbox name/identifier representation -/// to an utf-8 string that is used internally in Aerogramme -struct MailboxName<'a>(&'a MailboxCodec<'a>); -impl<'a> TryInto<&'a str> for MailboxName<'a> { - type Error = std::str::Utf8Error; - fn try_into(self) -> Result<&'a str, Self::Error> { - match self.0 { - MailboxCodec::Inbox => Ok(INBOX), - MailboxCodec::Other(aname) => Ok(std::str::from_utf8(aname.as_ref())?), - } - } -} diff --git a/src/imap/command/selected.rs b/src/imap/command/selected.rs deleted file mode 100644 index eedfbd6..0000000 --- a/src/imap/command/selected.rs +++ /dev/null @@ -1,424 +0,0 @@ -use std::num::NonZeroU64; -use std::sync::Arc; - -use anyhow::Result; -use imap_codec::imap_types::command::{Command, CommandBody, FetchModifier, StoreModifier}; -use imap_codec::imap_types::core::{Charset, Vec1}; -use imap_codec::imap_types::fetch::MacroOrMessageDataItemNames; -use imap_codec::imap_types::flag::{Flag, StoreResponse, StoreType}; -use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; -use imap_codec::imap_types::response::{Code, CodeOther}; -use imap_codec::imap_types::search::SearchKey; -use imap_codec::imap_types::sequence::SequenceSet; - -use crate::imap::attributes::AttributesProxy; -use crate::imap::capability::{ClientCapability, ServerCapability}; -use crate::imap::command::{anystate, authenticated, MailboxName}; -use crate::imap::flow; -use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; -use crate::imap::response::Response; -use crate::user::User; - -pub struct SelectedContext<'a> { - pub req: &'a Command<'static>, - pub user: &'a Arc, - pub mailbox: &'a mut MailboxView, - pub server_capabilities: &'a ServerCapability, - pub client_capabilities: &'a mut ClientCapability, - pub perm: &'a flow::MailboxPerm, -} - -pub async fn dispatch<'a>( - ctx: SelectedContext<'a>, -) -> Result<(Response<'static>, flow::Transition)> { - match &ctx.req.body { - // Any State - // noop is specific to this state - CommandBody::Capability => { - anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) - } - CommandBody::Logout => anystate::logout(), - - // Specific to this state (7 commands + NOOP) - CommandBody::Close => match ctx.perm { - flow::MailboxPerm::ReadWrite => ctx.close().await, - flow::MailboxPerm::ReadOnly => ctx.examine_close().await, - }, - CommandBody::Noop | CommandBody::Check => ctx.noop().await, - CommandBody::Fetch { - sequence_set, - macro_or_item_names, - modifiers, - uid, - } => { - ctx.fetch(sequence_set, macro_or_item_names, modifiers, uid) - .await - } - //@FIXME SearchKey::And is a legacy hack, should be refactored - CommandBody::Search { - charset, - criteria, - uid, - } => { - ctx.search(charset, &SearchKey::And(criteria.clone()), uid) - .await - } - CommandBody::Expunge { - // UIDPLUS (rfc4315) - uid_sequence_set, - } => ctx.expunge(uid_sequence_set).await, - CommandBody::Store { - sequence_set, - kind, - response, - flags, - modifiers, - uid, - } => { - ctx.store(sequence_set, kind, response, flags, modifiers, uid) - .await - } - CommandBody::Copy { - sequence_set, - mailbox, - uid, - } => ctx.copy(sequence_set, mailbox, uid).await, - CommandBody::Move { - sequence_set, - mailbox, - uid, - } => ctx.r#move(sequence_set, mailbox, uid).await, - - // UNSELECT extension (rfc3691) - CommandBody::Unselect => ctx.unselect().await, - - // In selected mode, we fallback to authenticated when needed - _ => { - authenticated::dispatch(authenticated::AuthenticatedContext { - req: ctx.req, - server_capabilities: ctx.server_capabilities, - client_capabilities: ctx.client_capabilities, - user: ctx.user, - }) - .await - } - } -} - -// --- PRIVATE --- - -impl<'a> SelectedContext<'a> { - async fn close(self) -> Result<(Response<'static>, flow::Transition)> { - // We expunge messages, - // but we don't send the untagged EXPUNGE responses - let tag = self.req.tag.clone(); - self.expunge(&None).await?; - Ok(( - Response::build().tag(tag).message("CLOSE completed").ok()?, - flow::Transition::Unselect, - )) - } - - /// CLOSE in examined state is not the same as in selected state - /// (in selected state it also does an EXPUNGE, here it doesn't) - async fn examine_close(self) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build() - .to_req(self.req) - .message("CLOSE completed") - .ok()?, - flow::Transition::Unselect, - )) - } - - async fn unselect(self) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build() - .to_req(self.req) - .message("UNSELECT completed") - .ok()?, - flow::Transition::Unselect, - )) - } - - pub async fn fetch( - self, - sequence_set: &SequenceSet, - attributes: &'a MacroOrMessageDataItemNames<'static>, - modifiers: &[FetchModifier], - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - let ap = AttributesProxy::new(attributes, modifiers, *uid); - let mut changed_since: Option = None; - modifiers.iter().for_each(|m| match m { - FetchModifier::ChangedSince(val) => { - changed_since = Some(*val); - } - }); - - match self - .mailbox - .fetch(sequence_set, &ap, changed_since, uid) - .await - { - Ok(resp) => { - // Capabilities enabling logic only on successful command - // (according to my understanding of the spec) - self.client_capabilities.attributes_enable(&ap); - self.client_capabilities.fetch_modifiers_enable(modifiers); - - // Response to the client - Ok(( - Response::build() - .to_req(self.req) - .message("FETCH completed") - .set_body(resp) - .ok()?, - flow::Transition::None, - )) - } - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - pub async fn search( - self, - charset: &Option>, - criteria: &SearchKey<'a>, - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - let (found, enable_condstore) = self.mailbox.search(charset, criteria, *uid).await?; - if enable_condstore { - self.client_capabilities.enable_condstore(); - } - Ok(( - Response::build() - .to_req(self.req) - .set_body(found) - .message("SEARCH completed") - .ok()?, - flow::Transition::None, - )) - } - - pub async fn noop(self) -> Result<(Response<'static>, flow::Transition)> { - self.mailbox.internal.mailbox.force_sync().await?; - - let updates = self.mailbox.update(UpdateParameters::default()).await?; - Ok(( - Response::build() - .to_req(self.req) - .message("NOOP completed.") - .set_body(updates) - .ok()?, - flow::Transition::None, - )) - } - - async fn expunge( - self, - uid_sequence_set: &Option, - ) -> Result<(Response<'static>, flow::Transition)> { - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let tag = self.req.tag.clone(); - let data = self.mailbox.expunge(uid_sequence_set).await?; - - Ok(( - Response::build() - .tag(tag) - .message("EXPUNGE completed") - .set_body(data) - .ok()?, - flow::Transition::None, - )) - } - - async fn store( - self, - sequence_set: &SequenceSet, - kind: &StoreType, - response: &StoreResponse, - flags: &[Flag<'a>], - modifiers: &[StoreModifier], - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let mut unchanged_since: Option = None; - modifiers.iter().for_each(|m| match m { - StoreModifier::UnchangedSince(val) => { - unchanged_since = Some(*val); - } - }); - - let (data, modified) = self - .mailbox - .store(sequence_set, kind, response, flags, unchanged_since, uid) - .await?; - - let mut ok_resp = Response::build() - .to_req(self.req) - .message("STORE completed") - .set_body(data); - - match modified[..] { - [] => (), - [_head, ..] => { - let modified_str = format!( - "MODIFIED {}", - modified - .into_iter() - .map(|x| x.to_string()) - .collect::>() - .join(",") - ); - ok_resp = ok_resp.code(Code::Other(CodeOther::unvalidated( - modified_str.into_bytes(), - ))); - } - }; - - self.client_capabilities.store_modifiers_enable(modifiers); - - Ok((ok_resp.ok()?, flow::Transition::None)) - } - - async fn copy( - self, - sequence_set: &SequenceSet, - mailbox: &MailboxCodec<'a>, - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - //@FIXME Could copy be valid in EXAMINE mode? - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Destination mailbox does not exist") - .code(Code::TryCreate) - .no()?, - flow::Transition::None, - )) - } - }; - - let (uidval, uid_map) = self.mailbox.copy(sequence_set, mb, uid).await?; - - let copyuid_str = format!( - "{} {} {}", - uidval, - uid_map - .iter() - .map(|(sid, _)| format!("{}", sid)) - .collect::>() - .join(","), - uid_map - .iter() - .map(|(_, tuid)| format!("{}", tuid)) - .collect::>() - .join(",") - ); - - Ok(( - Response::build() - .to_req(self.req) - .message("COPY completed") - .code(Code::Other(CodeOther::unvalidated( - format!("COPYUID {}", copyuid_str).into_bytes(), - ))) - .ok()?, - flow::Transition::None, - )) - } - - async fn r#move( - self, - sequence_set: &SequenceSet, - mailbox: &MailboxCodec<'a>, - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Destination mailbox does not exist") - .code(Code::TryCreate) - .no()?, - flow::Transition::None, - )) - } - }; - - let (uidval, uid_map, data) = self.mailbox.r#move(sequence_set, mb, uid).await?; - - // compute code - let copyuid_str = format!( - "{} {} {}", - uidval, - uid_map - .iter() - .map(|(sid, _)| format!("{}", sid)) - .collect::>() - .join(","), - uid_map - .iter() - .map(|(_, tuid)| format!("{}", tuid)) - .collect::>() - .join(",") - ); - - Ok(( - Response::build() - .to_req(self.req) - .message("COPY completed") - .code(Code::Other(CodeOther::unvalidated( - format!("COPYUID {}", copyuid_str).into_bytes(), - ))) - .set_body(data) - .ok()?, - flow::Transition::None, - )) - } - - fn fail_read_only(&self) -> Option> { - match self.perm { - flow::MailboxPerm::ReadWrite => None, - flow::MailboxPerm::ReadOnly => Some( - Response::build() - .to_req(self.req) - .message("Write command are forbidden while exmining mailbox") - .no() - .unwrap(), - ), - } - } -} diff --git a/src/imap/flags.rs b/src/imap/flags.rs deleted file mode 100644 index 0f6ec64..0000000 --- a/src/imap/flags.rs +++ /dev/null @@ -1,30 +0,0 @@ -use imap_codec::imap_types::core::Atom; -use imap_codec::imap_types::flag::{Flag, FlagFetch}; - -pub fn from_str(f: &str) -> Option> { - match f.chars().next() { - Some('\\') => match f { - "\\Seen" => Some(FlagFetch::Flag(Flag::Seen)), - "\\Answered" => Some(FlagFetch::Flag(Flag::Answered)), - "\\Flagged" => Some(FlagFetch::Flag(Flag::Flagged)), - "\\Deleted" => Some(FlagFetch::Flag(Flag::Deleted)), - "\\Draft" => Some(FlagFetch::Flag(Flag::Draft)), - "\\Recent" => Some(FlagFetch::Recent), - _ => match Atom::try_from(f.strip_prefix('\\').unwrap().to_string()) { - Err(_) => { - tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); - None - } - Ok(a) => Some(FlagFetch::Flag(Flag::system(a))), - }, - }, - Some(_) => match Atom::try_from(f.to_string()) { - Err(_) => { - tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); - None - } - Ok(a) => Some(FlagFetch::Flag(Flag::keyword(a))), - }, - None => None, - } -} diff --git a/src/imap/flow.rs b/src/imap/flow.rs deleted file mode 100644 index 86eb12e..0000000 --- a/src/imap/flow.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::error::Error as StdError; -use std::fmt; -use std::sync::Arc; - -use imap_codec::imap_types::core::Tag; -use tokio::sync::Notify; - -use crate::imap::mailbox_view::MailboxView; -use crate::user::User; - -#[derive(Debug)] -pub enum Error { - ForbiddenTransition, -} -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Forbidden Transition") - } -} -impl StdError for Error {} - -pub enum State { - NotAuthenticated, - Authenticated(Arc), - Selected(Arc, MailboxView, MailboxPerm), - Idle( - Arc, - MailboxView, - MailboxPerm, - Tag<'static>, - Arc, - ), - Logout, -} -impl State { - pub fn notify(&self) -> Option> { - match self { - Self::Idle(_, _, _, _, anotif) => Some(anotif.clone()), - _ => None, - } - } -} -impl fmt::Display for State { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use State::*; - match self { - NotAuthenticated => write!(f, "NotAuthenticated"), - Authenticated(..) => write!(f, "Authenticated"), - Selected(..) => write!(f, "Selected"), - Idle(..) => write!(f, "Idle"), - Logout => write!(f, "Logout"), - } - } -} - -#[derive(Clone)] -pub enum MailboxPerm { - ReadOnly, - ReadWrite, -} - -pub enum Transition { - None, - Authenticate(Arc), - Select(MailboxView, MailboxPerm), - Idle(Tag<'static>, Notify), - UnIdle, - Unselect, - Logout, -} -impl fmt::Display for Transition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use Transition::*; - match self { - None => write!(f, "None"), - Authenticate(..) => write!(f, "Authenticated"), - Select(..) => write!(f, "Selected"), - Idle(..) => write!(f, "Idle"), - UnIdle => write!(f, "UnIdle"), - Unselect => write!(f, "Unselect"), - Logout => write!(f, "Logout"), - } - } -} - -// See RFC3501 section 3. -// https://datatracker.ietf.org/doc/html/rfc3501#page-13 -impl State { - pub fn apply(&mut self, tr: Transition) -> Result<(), Error> { - tracing::debug!(state=%self, transition=%tr, "try change state"); - - let new_state = match (std::mem::replace(self, State::Logout), tr) { - (s, Transition::None) => s, - (State::NotAuthenticated, Transition::Authenticate(u)) => State::Authenticated(u), - (State::Authenticated(u) | State::Selected(u, _, _), Transition::Select(m, p)) => { - State::Selected(u, m, p) - } - (State::Selected(u, _, _), Transition::Unselect) => State::Authenticated(u.clone()), - (State::Selected(u, m, p), Transition::Idle(t, s)) => { - State::Idle(u, m, p, t, Arc::new(s)) - } - (State::Idle(u, m, p, _, _), Transition::UnIdle) => State::Selected(u, m, p), - (_, Transition::Logout) => State::Logout, - (s, t) => { - tracing::error!(state=%s, transition=%t, "forbidden transition"); - return Err(Error::ForbiddenTransition); - } - }; - *self = new_state; - tracing::debug!(state=%self, "transition succeeded"); - - Ok(()) - } -} diff --git a/src/imap/imf_view.rs b/src/imap/imf_view.rs deleted file mode 100644 index a4ca2e8..0000000 --- a/src/imap/imf_view.rs +++ /dev/null @@ -1,109 +0,0 @@ -use anyhow::{anyhow, Result}; -use chrono::naive::NaiveDate; - -use imap_codec::imap_types::core::{IString, NString}; -use imap_codec::imap_types::envelope::{Address, Envelope}; - -use eml_codec::imf; - -pub struct ImfView<'a>(pub &'a imf::Imf<'a>); - -impl<'a> ImfView<'a> { - pub fn naive_date(&self) -> Result { - Ok(self.0.date.ok_or(anyhow!("date is not set"))?.date_naive()) - } - - /// Envelope rules are defined in RFC 3501, section 7.4.2 - /// https://datatracker.ietf.org/doc/html/rfc3501#section-7.4.2 - /// - /// Some important notes: - /// - /// If the Sender or Reply-To lines are absent in the [RFC-2822] - /// header, or are present but empty, the server sets the - /// corresponding member of the envelope to be the same value as - /// the from member (the client is not expected to know to do - /// this). Note: [RFC-2822] requires that all messages have a valid - /// From header. Therefore, the from, sender, and reply-to - /// members in the envelope can not be NIL. - /// - /// If the Date, Subject, In-Reply-To, and Message-ID header lines - /// are absent in the [RFC-2822] header, the corresponding member - /// of the envelope is NIL; if these header lines are present but - /// empty the corresponding member of the envelope is the empty - /// string. - - //@FIXME return an error if the envelope is invalid instead of panicking - //@FIXME some fields must be defaulted if there are not set. - pub fn message_envelope(&self) -> Envelope<'static> { - let msg = self.0; - let from = msg.from.iter().map(convert_mbx).collect::>(); - - Envelope { - date: NString( - msg.date - .as_ref() - .map(|d| IString::try_from(d.to_rfc3339()).unwrap()), - ), - subject: NString( - msg.subject - .as_ref() - .map(|d| IString::try_from(d.to_string()).unwrap()), - ), - sender: msg - .sender - .as_ref() - .map(|v| vec![convert_mbx(v)]) - .unwrap_or(from.clone()), - reply_to: if msg.reply_to.is_empty() { - from.clone() - } else { - convert_addresses(&msg.reply_to) - }, - from, - to: convert_addresses(&msg.to), - cc: convert_addresses(&msg.cc), - bcc: convert_addresses(&msg.bcc), - in_reply_to: NString( - msg.in_reply_to - .iter() - .next() - .map(|d| IString::try_from(d.to_string()).unwrap()), - ), - message_id: NString( - msg.msg_id - .as_ref() - .map(|d| IString::try_from(d.to_string()).unwrap()), - ), - } - } -} - -pub fn convert_addresses(addrlist: &Vec) -> Vec> { - let mut acc = vec![]; - for item in addrlist { - match item { - imf::address::AddressRef::Single(a) => acc.push(convert_mbx(a)), - imf::address::AddressRef::Many(l) => acc.extend(l.participants.iter().map(convert_mbx)), - } - } - return acc; -} - -pub fn convert_mbx(addr: &imf::mailbox::MailboxRef) -> Address<'static> { - Address { - name: NString( - addr.name - .as_ref() - .map(|x| IString::try_from(x.to_string()).unwrap()), - ), - // SMTP at-domain-list (source route) seems obsolete since at least 1991 - // https://www.mhonarc.org/archive/html/ietf-822/1991-06/msg00060.html - adl: NString(None), - mailbox: NString(Some( - IString::try_from(addr.addrspec.local_part.to_string()).unwrap(), - )), - host: NString(Some( - IString::try_from(addr.addrspec.domain.to_string()).unwrap(), - )), - } -} diff --git a/src/imap/index.rs b/src/imap/index.rs deleted file mode 100644 index 9b794b8..0000000 --- a/src/imap/index.rs +++ /dev/null @@ -1,211 +0,0 @@ -use std::num::{NonZeroU32, NonZeroU64}; - -use anyhow::{anyhow, Result}; -use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; - -use crate::mail::uidindex::{ImapUid, ModSeq, UidIndex}; -use crate::mail::unique_ident::UniqueIdent; - -pub struct Index<'a> { - pub imap_index: Vec>, - pub internal: &'a UidIndex, -} -impl<'a> Index<'a> { - pub fn new(internal: &'a UidIndex) -> Result { - let imap_index = internal - .idx_by_uid - .iter() - .enumerate() - .map(|(i_enum, (&uid, &uuid))| { - let (_, modseq, flags) = internal - .table - .get(&uuid) - .ok_or(anyhow!("mail is missing from index"))?; - let i_int: u32 = (i_enum + 1).try_into()?; - let i: NonZeroU32 = i_int.try_into()?; - - Ok(MailIndex { - i, - uid, - uuid, - modseq: *modseq, - flags, - }) - }) - .collect::>>()?; - - Ok(Self { - imap_index, - internal, - }) - } - - pub fn last(&'a self) -> Option<&'a MailIndex<'a>> { - self.imap_index.last() - } - - /// Fetch mail descriptors based on a sequence of UID - /// - /// Complexity analysis: - /// - Sort is O(n * log n) where n is the number of uid generated by the sequence - /// - Finding the starting point in the index O(log m) where m is the size of the mailbox - /// While n =< m, it's not clear if the difference is big or not. - /// - /// For now, the algorithm tries to be fast for small values of n, - /// as it is what is expected by clients. - /// - /// So we assume for our implementation that : n << m. - /// It's not true for full mailbox searches for example... - pub fn fetch_on_uid(&'a self, sequence_set: &SequenceSet) -> Vec<&'a MailIndex<'a>> { - if self.imap_index.is_empty() { - return vec![]; - } - let largest = self.last().expect("The mailbox is not empty").uid; - let mut unroll_seq = sequence_set.iter(largest).collect::>(); - unroll_seq.sort(); - - let start_seq = match unroll_seq.iter().next() { - Some(elem) => elem, - None => return vec![], - }; - - // Quickly jump to the right point in the mailbox vector O(log m) instead - // of iterating one by one O(m). Works only because both unroll_seq & imap_index are sorted per uid. - let mut imap_idx = { - let start_idx = self - .imap_index - .partition_point(|mail_idx| &mail_idx.uid < start_seq); - &self.imap_index[start_idx..] - }; - - let mut acc = vec![]; - for wanted_uid in unroll_seq.iter() { - // Slide the window forward as long as its first element is lower than our wanted uid. - let start_idx = match imap_idx.iter().position(|midx| &midx.uid >= wanted_uid) { - Some(v) => v, - None => break, - }; - imap_idx = &imap_idx[start_idx..]; - - // If the beginning of our new window is the uid we want, we collect it - if &imap_idx[0].uid == wanted_uid { - acc.push(&imap_idx[0]); - } - } - - acc - } - - pub fn fetch_on_id(&'a self, sequence_set: &SequenceSet) -> Result>> { - if self.imap_index.is_empty() { - return Ok(vec![]); - } - let largest = NonZeroU32::try_from(self.imap_index.len() as u32)?; - let mut acc = sequence_set - .iter(largest) - .map(|wanted_id| { - self.imap_index - .get((wanted_id.get() as usize) - 1) - .ok_or(anyhow!("Mail not found")) - }) - .collect::>>()?; - - // Sort the result to be consistent with UID - acc.sort_by(|a, b| a.i.cmp(&b.i)); - - Ok(acc) - } - - pub fn fetch( - self: &'a Index<'a>, - sequence_set: &SequenceSet, - by_uid: bool, - ) -> Result>> { - match by_uid { - true => Ok(self.fetch_on_uid(sequence_set)), - _ => self.fetch_on_id(sequence_set), - } - } - - pub fn fetch_changed_since( - self: &'a Index<'a>, - sequence_set: &SequenceSet, - maybe_modseq: Option, - by_uid: bool, - ) -> Result>> { - let raw = self.fetch(sequence_set, by_uid)?; - let res = match maybe_modseq { - Some(pit) => raw.into_iter().filter(|midx| midx.modseq > pit).collect(), - None => raw, - }; - - Ok(res) - } - - pub fn fetch_unchanged_since( - self: &'a Index<'a>, - sequence_set: &SequenceSet, - maybe_modseq: Option, - by_uid: bool, - ) -> Result<(Vec<&'a MailIndex<'a>>, Vec<&'a MailIndex<'a>>)> { - let raw = self.fetch(sequence_set, by_uid)?; - let res = match maybe_modseq { - Some(pit) => raw.into_iter().partition(|midx| midx.modseq <= pit), - None => (raw, vec![]), - }; - - Ok(res) - } -} - -#[derive(Clone, Debug)] -pub struct MailIndex<'a> { - pub i: NonZeroU32, - pub uid: ImapUid, - pub uuid: UniqueIdent, - pub modseq: ModSeq, - pub flags: &'a Vec, -} - -impl<'a> MailIndex<'a> { - // The following functions are used to implement the SEARCH command - pub fn is_in_sequence_i(&self, seq: &Sequence) -> bool { - match seq { - Sequence::Single(SeqOrUid::Asterisk) => true, - Sequence::Single(SeqOrUid::Value(target)) => target == &self.i, - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) - | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.i, - Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { - if x1 < x2 { - x1 <= &self.i && &self.i <= x2 - } else { - x1 >= &self.i && &self.i >= x2 - } - } - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, - } - } - - pub fn is_in_sequence_uid(&self, seq: &Sequence) -> bool { - match seq { - Sequence::Single(SeqOrUid::Asterisk) => true, - Sequence::Single(SeqOrUid::Value(target)) => target == &self.uid, - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) - | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.uid, - Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { - if x1 < x2 { - x1 <= &self.uid && &self.uid <= x2 - } else { - x1 >= &self.uid && &self.uid >= x2 - } - } - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, - } - } - - pub fn is_flag_set(&self, flag: &str) -> bool { - self.flags - .iter() - .any(|candidate| candidate.as_str() == flag) - } -} diff --git a/src/imap/mail_view.rs b/src/imap/mail_view.rs deleted file mode 100644 index a8db733..0000000 --- a/src/imap/mail_view.rs +++ /dev/null @@ -1,306 +0,0 @@ -use std::num::NonZeroU32; - -use anyhow::{anyhow, bail, Result}; -use chrono::{naive::NaiveDate, DateTime as ChronoDateTime, Local, Offset, TimeZone, Utc}; - -use imap_codec::imap_types::core::NString; -use imap_codec::imap_types::datetime::DateTime; -use imap_codec::imap_types::fetch::{ - MessageDataItem, MessageDataItemName, Section as FetchSection, -}; -use imap_codec::imap_types::flag::Flag; -use imap_codec::imap_types::response::Data; - -use eml_codec::{ - imf, - part::{composite::Message, AnyPart}, -}; - -use crate::mail::query::QueryResult; - -use crate::imap::attributes::AttributesProxy; -use crate::imap::flags; -use crate::imap::imf_view::ImfView; -use crate::imap::index::MailIndex; -use crate::imap::mime_view; -use crate::imap::response::Body; - -pub struct MailView<'a> { - pub in_idx: &'a MailIndex<'a>, - pub query_result: &'a QueryResult, - pub content: FetchedMail<'a>, -} - -impl<'a> MailView<'a> { - pub fn new(query_result: &'a QueryResult, in_idx: &'a MailIndex<'a>) -> Result> { - Ok(Self { - in_idx, - query_result, - content: match query_result { - QueryResult::FullResult { content, .. } => { - let (_, parsed) = - eml_codec::parse_message(&content).or(Err(anyhow!("Invalid mail body")))?; - FetchedMail::full_from_message(parsed) - } - QueryResult::PartialResult { metadata, .. } => { - let (_, parsed) = eml_codec::parse_message(&metadata.headers) - .or(Err(anyhow!("unable to parse email headers")))?; - FetchedMail::partial_from_message(parsed) - } - QueryResult::IndexResult { .. } => FetchedMail::IndexOnly, - }, - }) - } - - pub fn imf(&self) -> Option { - self.content.as_imf().map(ImfView) - } - - pub fn selected_mime(&'a self) -> Option> { - self.content.as_anypart().ok().map(mime_view::SelectedMime) - } - - pub fn filter(&self, ap: &AttributesProxy) -> Result<(Body<'static>, SeenFlag)> { - let mut seen = SeenFlag::DoNothing; - let res_attrs = ap - .attrs - .iter() - .map(|attr| match attr { - MessageDataItemName::Uid => Ok(self.uid()), - MessageDataItemName::Flags => Ok(self.flags()), - MessageDataItemName::Rfc822Size => self.rfc_822_size(), - MessageDataItemName::Rfc822Header => self.rfc_822_header(), - MessageDataItemName::Rfc822Text => self.rfc_822_text(), - MessageDataItemName::Rfc822 => { - if self.is_not_yet_seen() { - seen = SeenFlag::MustAdd; - } - self.rfc822() - } - MessageDataItemName::Envelope => Ok(self.envelope()), - MessageDataItemName::Body => self.body(), - MessageDataItemName::BodyStructure => self.body_structure(), - MessageDataItemName::BodyExt { - section, - partial, - peek, - } => { - let (body, has_seen) = self.body_ext(section, partial, peek)?; - seen = has_seen; - Ok(body) - } - MessageDataItemName::InternalDate => self.internal_date(), - MessageDataItemName::ModSeq => Ok(self.modseq()), - }) - .collect::, _>>()?; - - Ok(( - Body::Data(Data::Fetch { - seq: self.in_idx.i, - items: res_attrs.try_into()?, - }), - seen, - )) - } - - pub fn stored_naive_date(&self) -> Result { - let mail_meta = self.query_result.metadata().expect("metadata were fetched"); - let mail_ts: i64 = mail_meta.internaldate.try_into()?; - let msg_date: ChronoDateTime = ChronoDateTime::from_timestamp(mail_ts, 0) - .ok_or(anyhow!("unable to parse timestamp"))? - .with_timezone(&Local); - - Ok(msg_date.date_naive()) - } - - pub fn is_header_contains_pattern(&self, hdr: &[u8], pattern: &[u8]) -> bool { - let mime = match self.selected_mime() { - None => return false, - Some(x) => x, - }; - - let val = match mime.header_value(hdr) { - None => return false, - Some(x) => x, - }; - - val.windows(pattern.len()).any(|win| win == pattern) - } - - // Private function, mainly for filter! - fn uid(&self) -> MessageDataItem<'static> { - MessageDataItem::Uid(self.in_idx.uid.clone()) - } - - fn flags(&self) -> MessageDataItem<'static> { - MessageDataItem::Flags( - self.in_idx - .flags - .iter() - .filter_map(|f| flags::from_str(f)) - .collect(), - ) - } - - fn rfc_822_size(&self) -> Result> { - let sz = self - .query_result - .metadata() - .ok_or(anyhow!("mail metadata are required"))? - .rfc822_size; - Ok(MessageDataItem::Rfc822Size(sz as u32)) - } - - fn rfc_822_header(&self) -> Result> { - let hdrs: NString = self - .query_result - .metadata() - .ok_or(anyhow!("mail metadata are required"))? - .headers - .to_vec() - .try_into()?; - Ok(MessageDataItem::Rfc822Header(hdrs)) - } - - fn rfc_822_text(&self) -> Result> { - let txt: NString = self.content.as_msg()?.raw_body.to_vec().try_into()?; - Ok(MessageDataItem::Rfc822Text(txt)) - } - - fn rfc822(&self) -> Result> { - let full: NString = self.content.as_msg()?.raw_part.to_vec().try_into()?; - Ok(MessageDataItem::Rfc822(full)) - } - - fn envelope(&self) -> MessageDataItem<'static> { - MessageDataItem::Envelope( - self.imf() - .expect("an imf object is derivable from fetchedmail") - .message_envelope(), - ) - } - - fn body(&self) -> Result> { - Ok(MessageDataItem::Body(mime_view::bodystructure( - self.content.as_msg()?.child.as_ref(), - false, - )?)) - } - - fn body_structure(&self) -> Result> { - Ok(MessageDataItem::BodyStructure(mime_view::bodystructure( - self.content.as_msg()?.child.as_ref(), - true, - )?)) - } - - fn is_not_yet_seen(&self) -> bool { - let seen_flag = Flag::Seen.to_string(); - !self.in_idx.flags.iter().any(|x| *x == seen_flag) - } - - /// maps to BODY[
]<> and BODY.PEEK[
]<> - /// peek does not implicitly set the \Seen flag - /// eg. BODY[HEADER.FIELDS (DATE FROM)] - /// eg. BODY[]<0.2048> - fn body_ext( - &self, - section: &Option>, - partial: &Option<(u32, NonZeroU32)>, - peek: &bool, - ) -> Result<(MessageDataItem<'static>, SeenFlag)> { - // Manage Seen flag - let mut seen = SeenFlag::DoNothing; - if !peek && self.is_not_yet_seen() { - // Add \Seen flag - //self.mailbox.add_flags(uuid, &[seen_flag]).await?; - seen = SeenFlag::MustAdd; - } - - // Process message - let (text, origin) = - match mime_view::body_ext(self.content.as_anypart()?, section, partial)? { - mime_view::BodySection::Full(body) => (body, None), - mime_view::BodySection::Slice { body, origin_octet } => (body, Some(origin_octet)), - }; - - let data: NString = text.to_vec().try_into()?; - - return Ok(( - MessageDataItem::BodyExt { - section: section.as_ref().map(|fs| fs.clone()), - origin, - data, - }, - seen, - )); - } - - fn internal_date(&self) -> Result> { - let dt = Utc - .fix() - .timestamp_opt( - i64::try_from( - self.query_result - .metadata() - .ok_or(anyhow!("mail metadata were not fetched"))? - .internaldate - / 1000, - )?, - 0, - ) - .earliest() - .ok_or(anyhow!("Unable to parse internal date"))?; - Ok(MessageDataItem::InternalDate(DateTime::unvalidated(dt))) - } - - fn modseq(&self) -> MessageDataItem<'static> { - MessageDataItem::ModSeq(self.in_idx.modseq) - } -} - -pub enum SeenFlag { - DoNothing, - MustAdd, -} - -// ------------------- - -pub enum FetchedMail<'a> { - IndexOnly, - Partial(AnyPart<'a>), - Full(AnyPart<'a>), -} -impl<'a> FetchedMail<'a> { - pub fn full_from_message(msg: Message<'a>) -> Self { - Self::Full(AnyPart::Msg(msg)) - } - - pub fn partial_from_message(msg: Message<'a>) -> Self { - Self::Partial(AnyPart::Msg(msg)) - } - - pub fn as_anypart(&self) -> Result<&AnyPart<'a>> { - match self { - FetchedMail::Full(x) => Ok(&x), - FetchedMail::Partial(x) => Ok(&x), - _ => bail!("The full message must be fetched, not only its headers"), - } - } - - pub fn as_msg(&self) -> Result<&Message<'a>> { - match self { - FetchedMail::Full(AnyPart::Msg(x)) => Ok(&x), - FetchedMail::Partial(AnyPart::Msg(x)) => Ok(&x), - _ => bail!("The full message must be fetched, not only its headers AND it must be an AnyPart::Msg."), - } - } - - pub fn as_imf(&self) -> Option<&imf::Imf<'a>> { - match self { - FetchedMail::Full(AnyPart::Msg(x)) => Some(&x.imf), - FetchedMail::Partial(AnyPart::Msg(x)) => Some(&x.imf), - _ => None, - } - } -} diff --git a/src/imap/mailbox_view.rs b/src/imap/mailbox_view.rs deleted file mode 100644 index 1c53b93..0000000 --- a/src/imap/mailbox_view.rs +++ /dev/null @@ -1,772 +0,0 @@ -use std::collections::HashSet; -use std::num::{NonZeroU32, NonZeroU64}; -use std::sync::Arc; - -use anyhow::{anyhow, Error, Result}; - -use futures::stream::{StreamExt, TryStreamExt}; - -use imap_codec::imap_types::core::{Charset, Vec1}; -use imap_codec::imap_types::fetch::MessageDataItem; -use imap_codec::imap_types::flag::{Flag, FlagFetch, FlagPerm, StoreResponse, StoreType}; -use imap_codec::imap_types::response::{Code, CodeOther, Data, Status}; -use imap_codec::imap_types::search::SearchKey; -use imap_codec::imap_types::sequence::SequenceSet; - -use crate::mail::mailbox::Mailbox; -use crate::mail::query::QueryScope; -use crate::mail::snapshot::FrozenMailbox; -use crate::mail::uidindex::{ImapUid, ImapUidvalidity, ModSeq}; -use crate::mail::unique_ident::UniqueIdent; - -use crate::imap::attributes::AttributesProxy; -use crate::imap::flags; -use crate::imap::index::Index; -use crate::imap::mail_view::{MailView, SeenFlag}; -use crate::imap::response::Body; -use crate::imap::search; - -const DEFAULT_FLAGS: [Flag; 5] = [ - Flag::Seen, - Flag::Answered, - Flag::Flagged, - Flag::Deleted, - Flag::Draft, -]; - -pub struct UpdateParameters { - pub silence: HashSet, - pub with_modseq: bool, - pub with_uid: bool, -} -impl Default for UpdateParameters { - fn default() -> Self { - Self { - silence: HashSet::new(), - with_modseq: false, - with_uid: false, - } - } -} - -/// A MailboxView is responsible for giving the client the information -/// it needs about a mailbox, such as an initial summary of the mailbox's -/// content and continuous updates indicating when the content -/// of the mailbox has been changed. -/// To do this, it keeps a variable `known_state` that corresponds to -/// what the client knows, and produces IMAP messages to be sent to the -/// client that go along updates to `known_state`. -pub struct MailboxView { - pub internal: FrozenMailbox, - pub is_condstore: bool, -} - -impl MailboxView { - /// Creates a new IMAP view into a mailbox. - pub async fn new(mailbox: Arc, is_cond: bool) -> Self { - Self { - internal: mailbox.frozen().await, - is_condstore: is_cond, - } - } - - /// Create an updated view, useful to make a diff - /// between what the client knows and new stuff - /// Produces a set of IMAP responses describing the change between - /// what the client knows and what is actually in the mailbox. - /// This does NOT trigger a sync, it bases itself on what is currently - /// loaded in RAM by Bayou. - pub async fn update(&mut self, params: UpdateParameters) -> Result>> { - let old_snapshot = self.internal.update().await; - let new_snapshot = &self.internal.snapshot; - - let mut data = Vec::::new(); - - // Calculate diff between two mailbox states - // See example in IMAP RFC in section on NOOP command: - // we want to produce something like this: - // C: a047 NOOP - // S: * 22 EXPUNGE - // S: * 23 EXISTS - // S: * 14 FETCH (UID 1305 FLAGS (\Seen \Deleted)) - // S: a047 OK Noop completed - // In other words: - // - notify client of expunged mails - // - if new mails arrived, notify client of number of existing mails - // - if flags changed for existing mails, tell client - // (for this last step: if uidvalidity changed, do nothing, - // just notify of new uidvalidity and they will resync) - - // - notify client of expunged mails - let mut n_expunge = 0; - for (i, (_uid, uuid)) in old_snapshot.idx_by_uid.iter().enumerate() { - if !new_snapshot.table.contains_key(uuid) { - data.push(Body::Data(Data::Expunge( - NonZeroU32::try_from((i + 1 - n_expunge) as u32).unwrap(), - ))); - n_expunge += 1; - } - } - - // - if new mails arrived, notify client of number of existing mails - if new_snapshot.table.len() != old_snapshot.table.len() - n_expunge - || new_snapshot.uidvalidity != old_snapshot.uidvalidity - { - data.push(self.exists_status()?); - } - - if new_snapshot.uidvalidity != old_snapshot.uidvalidity { - // TODO: do we want to push less/more info than this? - data.push(self.uidvalidity_status()?); - data.push(self.uidnext_status()?); - } else { - // - if flags changed for existing mails, tell client - for (i, (_uid, uuid)) in new_snapshot.idx_by_uid.iter().enumerate() { - if params.silence.contains(uuid) { - continue; - } - - let old_mail = old_snapshot.table.get(uuid); - let new_mail = new_snapshot.table.get(uuid); - if old_mail.is_some() && old_mail != new_mail { - if let Some((uid, modseq, flags)) = new_mail { - let mut items = vec![MessageDataItem::Flags( - flags.iter().filter_map(|f| flags::from_str(f)).collect(), - )]; - - if params.with_uid { - items.push(MessageDataItem::Uid(*uid)); - } - - if params.with_modseq { - items.push(MessageDataItem::ModSeq(*modseq)); - } - - data.push(Body::Data(Data::Fetch { - seq: NonZeroU32::try_from((i + 1) as u32).unwrap(), - items: items.try_into()?, - })); - } - } - } - } - Ok(data) - } - - /// Generates the necessary IMAP messages so that the client - /// has a satisfactory summary of the current mailbox's state. - /// These are the messages that are sent in response to a SELECT command. - pub fn summary(&self) -> Result>> { - let mut data = Vec::::new(); - data.push(self.exists_status()?); - data.push(self.recent_status()?); - data.extend(self.flags_status()?.into_iter()); - data.push(self.uidvalidity_status()?); - data.push(self.uidnext_status()?); - if self.is_condstore { - data.push(self.highestmodseq_status()?); - } - /*self.unseen_first_status()? - .map(|unseen_status| data.push(unseen_status));*/ - - Ok(data) - } - - pub async fn store<'a>( - &mut self, - sequence_set: &SequenceSet, - kind: &StoreType, - response: &StoreResponse, - flags: &[Flag<'a>], - unchanged_since: Option, - is_uid_store: &bool, - ) -> Result<(Vec>, Vec)> { - self.internal.sync().await?; - - let flags = flags.iter().map(|x| x.to_string()).collect::>(); - - let idx = self.index()?; - let (editable, in_conflict) = - idx.fetch_unchanged_since(sequence_set, unchanged_since, *is_uid_store)?; - - for mi in editable.iter() { - match kind { - StoreType::Add => { - self.internal.mailbox.add_flags(mi.uuid, &flags[..]).await?; - } - StoreType::Remove => { - self.internal.mailbox.del_flags(mi.uuid, &flags[..]).await?; - } - StoreType::Replace => { - self.internal.mailbox.set_flags(mi.uuid, &flags[..]).await?; - } - } - } - - let silence = match response { - StoreResponse::Answer => HashSet::new(), - StoreResponse::Silent => editable.iter().map(|midx| midx.uuid).collect(), - }; - - let conflict_id_or_uid = match is_uid_store { - true => in_conflict.into_iter().map(|midx| midx.uid).collect(), - _ => in_conflict.into_iter().map(|midx| midx.i).collect(), - }; - - let summary = self - .update(UpdateParameters { - with_uid: *is_uid_store, - with_modseq: unchanged_since.is_some(), - silence, - }) - .await?; - - Ok((summary, conflict_id_or_uid)) - } - - pub async fn idle_sync(&mut self) -> Result>> { - self.internal - .mailbox - .notify() - .await - .upgrade() - .ok_or(anyhow!("test"))? - .notified() - .await; - self.internal.mailbox.opportunistic_sync().await?; - self.update(UpdateParameters::default()).await - } - - pub async fn expunge( - &mut self, - maybe_seq_set: &Option, - ) -> Result>> { - // Get a recent view to apply our change - self.internal.sync().await?; - let state = self.internal.peek().await; - let idx = Index::new(&state)?; - - // Build a default sequence set for the default case - use imap_codec::imap_types::sequence::{SeqOrUid, Sequence}; - let seq = match maybe_seq_set { - Some(s) => s.clone(), - None => SequenceSet( - vec![Sequence::Range( - SeqOrUid::Value(NonZeroU32::MIN), - SeqOrUid::Asterisk, - )] - .try_into() - .unwrap(), - ), - }; - - let deleted_flag = Flag::Deleted.to_string(); - let msgs = idx - .fetch_on_uid(&seq) - .into_iter() - .filter(|midx| midx.flags.iter().any(|x| *x == deleted_flag)) - .map(|midx| midx.uuid); - - for msg in msgs { - self.internal.mailbox.delete(msg).await?; - } - - self.update(UpdateParameters::default()).await - } - - pub async fn copy( - &self, - sequence_set: &SequenceSet, - to: Arc, - is_uid_copy: &bool, - ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>)> { - let idx = self.index()?; - let mails = idx.fetch(sequence_set, *is_uid_copy)?; - - let mut new_uuids = vec![]; - for mi in mails.iter() { - new_uuids.push(to.copy_from(&self.internal.mailbox, mi.uuid).await?); - } - - let mut ret = vec![]; - let to_state = to.current_uid_index().await; - for (mi, new_uuid) in mails.iter().zip(new_uuids.iter()) { - let dest_uid = to_state - .table - .get(new_uuid) - .ok_or(anyhow!("copied mail not in destination mailbox"))? - .0; - ret.push((mi.uid, dest_uid)); - } - - Ok((to_state.uidvalidity, ret)) - } - - pub async fn r#move( - &mut self, - sequence_set: &SequenceSet, - to: Arc, - is_uid_copy: &bool, - ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>, Vec>)> { - let idx = self.index()?; - let mails = idx.fetch(sequence_set, *is_uid_copy)?; - - for mi in mails.iter() { - to.move_from(&self.internal.mailbox, mi.uuid).await?; - } - - let mut ret = vec![]; - let to_state = to.current_uid_index().await; - for mi in mails.iter() { - let dest_uid = to_state - .table - .get(&mi.uuid) - .ok_or(anyhow!("moved mail not in destination mailbox"))? - .0; - ret.push((mi.uid, dest_uid)); - } - - let update = self - .update(UpdateParameters { - with_uid: *is_uid_copy, - ..UpdateParameters::default() - }) - .await?; - - Ok((to_state.uidvalidity, ret, update)) - } - - /// Looks up state changes in the mailbox and produces a set of IMAP - /// responses describing the new state. - pub async fn fetch<'b>( - &self, - sequence_set: &SequenceSet, - ap: &AttributesProxy, - changed_since: Option, - is_uid_fetch: &bool, - ) -> Result>> { - // [1/6] Pre-compute data - // a. what are the uuids of the emails we want? - // b. do we need to fetch the full body? - //let ap = AttributesProxy::new(attributes, *is_uid_fetch); - let query_scope = match ap.need_body() { - true => QueryScope::Full, - _ => QueryScope::Partial, - }; - tracing::debug!("Query scope {:?}", query_scope); - let idx = self.index()?; - let mail_idx_list = idx.fetch_changed_since(sequence_set, changed_since, *is_uid_fetch)?; - - // [2/6] Fetch the emails - let uuids = mail_idx_list - .iter() - .map(|midx| midx.uuid) - .collect::>(); - - let query = self.internal.query(&uuids, query_scope); - //let query_result = self.internal.query(&uuids, query_scope).fetch().await?; - - let query_stream = query - .fetch() - .zip(futures::stream::iter(mail_idx_list)) - // [3/6] Derive an IMAP-specific view from the results, apply the filters - .map(|(maybe_qr, midx)| match maybe_qr { - Ok(qr) => Ok((MailView::new(&qr, midx)?.filter(&ap)?, midx)), - Err(e) => Err(e), - }) - // [4/6] Apply the IMAP transformation - .then(|maybe_ret| async move { - let ((body, seen), midx) = maybe_ret?; - - // [5/6] Register the \Seen flags - if matches!(seen, SeenFlag::MustAdd) { - let seen_flag = Flag::Seen.to_string(); - self.internal - .mailbox - .add_flags(midx.uuid, &[seen_flag]) - .await?; - } - - Ok::<_, anyhow::Error>(body) - }); - - // [6/6] Build the final result that will be sent to the client. - query_stream.try_collect().await - } - - /// A naive search implementation... - pub async fn search<'a>( - &self, - _charset: &Option>, - search_key: &SearchKey<'a>, - uid: bool, - ) -> Result<(Vec>, bool)> { - // 1. Compute the subset of sequence identifiers we need to fetch - // based on the search query - let crit = search::Criteria(search_key); - let (seq_set, seq_type) = crit.to_sequence_set(); - - // 2. Get the selection - let idx = self.index()?; - let selection = idx.fetch(&seq_set, seq_type.is_uid())?; - - // 3. Filter the selection based on the ID / UID / Flags - let (kept_idx, to_fetch) = crit.filter_on_idx(&selection); - - // 4.a Fetch additional info about the emails - let query_scope = crit.query_scope(); - let uuids = to_fetch.iter().map(|midx| midx.uuid).collect::>(); - let query = self.internal.query(&uuids, query_scope); - - // 4.b We don't want to keep all data in memory, so we do the computing in a stream - let query_stream = query - .fetch() - .zip(futures::stream::iter(&to_fetch)) - // 5.a Build a mailview with the body, might fail with an error - // 5.b If needed, filter the selection based on the body, but keep the errors - // 6. Drop the query+mailbox, keep only the mail index - // Here we release a lot of memory, this is the most important part ^^ - .filter_map(|(maybe_qr, midx)| { - let r = match maybe_qr { - Ok(qr) => match MailView::new(&qr, midx).map(|mv| crit.is_keep_on_query(&mv)) { - Ok(true) => Some(Ok(*midx)), - Ok(_) => None, - Err(e) => Some(Err(e)), - }, - Err(e) => Some(Err(e)), - }; - futures::future::ready(r) - }); - - // 7. Chain both streams (part resolved from index, part resolved from metadata+body) - let main_stream = futures::stream::iter(kept_idx) - .map(Ok) - .chain(query_stream) - .map_ok(|idx| match uid { - true => (idx.uid, idx.modseq), - _ => (idx.i, idx.modseq), - }); - - // 8. Do the actual computation - let internal_result: Vec<_> = main_stream.try_collect().await?; - let (selection, modseqs): (Vec<_>, Vec<_>) = internal_result.into_iter().unzip(); - - // 9. Aggregate the maximum modseq value - let maybe_modseq = match crit.is_modseq() { - true => modseqs.into_iter().max(), - _ => None, - }; - - // 10. Return the final result - Ok(( - vec![Body::Data(Data::Search(selection, maybe_modseq))], - maybe_modseq.is_some(), - )) - } - - // ---- - /// @FIXME index should be stored for longer than a single request - /// Instead they should be tied to the FrozenMailbox refresh - /// It's not trivial to refactor the code to do that, so we are doing - /// some useless computation for now... - fn index<'a>(&'a self) -> Result> { - Index::new(&self.internal.snapshot) - } - - /// Produce an OK [UIDVALIDITY _] message corresponding to `known_state` - fn uidvalidity_status(&self) -> Result> { - let uid_validity = Status::ok( - None, - Some(Code::UidValidity(self.uidvalidity())), - "UIDs valid", - ) - .map_err(Error::msg)?; - Ok(Body::Status(uid_validity)) - } - - pub(crate) fn uidvalidity(&self) -> ImapUidvalidity { - self.internal.snapshot.uidvalidity - } - - /// Produce an OK [UIDNEXT _] message corresponding to `known_state` - fn uidnext_status(&self) -> Result> { - let next_uid = Status::ok( - None, - Some(Code::UidNext(self.uidnext())), - "Predict next UID", - ) - .map_err(Error::msg)?; - Ok(Body::Status(next_uid)) - } - - pub(crate) fn uidnext(&self) -> ImapUid { - self.internal.snapshot.uidnext - } - - pub(crate) fn highestmodseq_status(&self) -> Result> { - Ok(Body::Status(Status::ok( - None, - Some(Code::Other(CodeOther::unvalidated( - format!("HIGHESTMODSEQ {}", self.highestmodseq()).into_bytes(), - ))), - "Highest", - )?)) - } - - pub(crate) fn highestmodseq(&self) -> ModSeq { - self.internal.snapshot.highestmodseq - } - - /// Produce an EXISTS message corresponding to the number of mails - /// in `known_state` - fn exists_status(&self) -> Result> { - Ok(Body::Data(Data::Exists(self.exists()?))) - } - - pub(crate) fn exists(&self) -> Result { - Ok(u32::try_from(self.internal.snapshot.idx_by_uid.len())?) - } - - /// Produce a RECENT message corresponding to the number of - /// recent mails in `known_state` - fn recent_status(&self) -> Result> { - Ok(Body::Data(Data::Recent(self.recent()?))) - } - - #[allow(dead_code)] - fn unseen_first_status(&self) -> Result>> { - Ok(self - .unseen_first()? - .map(|unseen_id| { - Status::ok(None, Some(Code::Unseen(unseen_id)), "First unseen.").map(Body::Status) - }) - .transpose()?) - } - - #[allow(dead_code)] - fn unseen_first(&self) -> Result> { - Ok(self - .internal - .snapshot - .table - .values() - .enumerate() - .find(|(_i, (_imap_uid, _modseq, flags))| !flags.contains(&"\\Seen".to_string())) - .map(|(i, _)| NonZeroU32::try_from(i as u32 + 1)) - .transpose()?) - } - - pub(crate) fn recent(&self) -> Result { - let recent = self - .internal - .snapshot - .idx_by_flag - .get(&"\\Recent".to_string()) - .map(|os| os.len()) - .unwrap_or(0); - Ok(u32::try_from(recent)?) - } - - /// Produce a FLAGS and a PERMANENTFLAGS message that indicates - /// the flags that are in `known_state` + default flags - fn flags_status(&self) -> Result>> { - let mut body = vec![]; - - // 1. Collecting all the possible flags in the mailbox - // 1.a Fetch them from our index - let mut known_flags: Vec = self - .internal - .snapshot - .idx_by_flag - .flags() - .filter_map(|f| match flags::from_str(f) { - Some(FlagFetch::Flag(fl)) => Some(fl), - _ => None, - }) - .collect(); - // 1.b Merge it with our default flags list - for f in DEFAULT_FLAGS.iter() { - if !known_flags.contains(f) { - known_flags.push(f.clone()); - } - } - // 1.c Create the IMAP message - body.push(Body::Data(Data::Flags(known_flags.clone()))); - - // 2. Returning flags that are persisted - // 2.a Always advertise our default flags - let mut permanent = DEFAULT_FLAGS - .iter() - .map(|f| FlagPerm::Flag(f.clone())) - .collect::>(); - // 2.b Say that we support any keyword flag - permanent.push(FlagPerm::Asterisk); - // 2.c Create the IMAP message - let permanent_flags = Status::ok( - None, - Some(Code::PermanentFlags(permanent)), - "Flags permitted", - ) - .map_err(Error::msg)?; - body.push(Body::Status(permanent_flags)); - - // Done! - Ok(body) - } - - pub(crate) fn unseen_count(&self) -> usize { - let total = self.internal.snapshot.table.len(); - let seen = self - .internal - .snapshot - .idx_by_flag - .get(&Flag::Seen.to_string()) - .map(|x| x.len()) - .unwrap_or(0); - total - seen - } -} - -#[cfg(test)] -mod tests { - use super::*; - use imap_codec::encode::Encoder; - use imap_codec::imap_types::core::Vec1; - use imap_codec::imap_types::fetch::Section; - use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName}; - use imap_codec::imap_types::response::Response; - use imap_codec::ResponseCodec; - use std::fs; - - use crate::cryptoblob; - use crate::imap::index::MailIndex; - use crate::imap::mail_view::MailView; - use crate::imap::mime_view; - use crate::mail::mailbox::MailMeta; - use crate::mail::query::QueryResult; - use crate::mail::unique_ident; - - #[test] - fn mailview_body_ext() -> Result<()> { - let ap = AttributesProxy::new( - &MacroOrMessageDataItemNames::MessageDataItemNames(vec![ - MessageDataItemName::BodyExt { - section: Some(Section::Header(None)), - partial: None, - peek: false, - }, - ]), - &[], - false, - ); - - let key = cryptoblob::gen_key(); - let meta = MailMeta { - internaldate: 0u64, - headers: vec![], - message_key: key, - rfc822_size: 8usize, - }; - - let index_entry = (NonZeroU32::MIN, NonZeroU64::MIN, vec![]); - let mail_in_idx = MailIndex { - i: NonZeroU32::MIN, - uid: index_entry.0, - modseq: index_entry.1, - uuid: unique_ident::gen_ident(), - flags: &index_entry.2, - }; - let rfc822 = b"Subject: hello\r\nFrom: a@a.a\r\nTo: b@b.b\r\nDate: Thu, 12 Oct 2023 08:45:28 +0000\r\n\r\nhello world"; - let qr = QueryResult::FullResult { - uuid: mail_in_idx.uuid.clone(), - metadata: meta, - content: rfc822.to_vec(), - }; - - let mv = MailView::new(&qr, &mail_in_idx)?; - let (res_body, _seen) = mv.filter(&ap)?; - - let fattr = match res_body { - Body::Data(Data::Fetch { - seq: _seq, - items: attr, - }) => Ok(attr), - _ => Err(anyhow!("Not a fetch body")), - }?; - - assert_eq!(fattr.as_ref().len(), 1); - - let (sec, _orig, _data) = match &fattr.as_ref()[0] { - MessageDataItem::BodyExt { - section, - origin, - data, - } => Ok((section, origin, data)), - _ => Err(anyhow!("not a body ext message attribute")), - }?; - - assert_eq!(sec.as_ref().unwrap(), &Section::Header(None)); - - Ok(()) - } - - /// Future automated test. We use lossy utf8 conversion + lowercase everything, - /// so this test might allow invalid results. But at least it allows us to quickly test a - /// large variety of emails. - /// Keep in mind that special cases must still be tested manually! - #[test] - fn fetch_body() -> Result<()> { - let prefixes = [ - /* *** MY OWN DATASET *** */ - "tests/emails/dxflrs/0001_simple", - "tests/emails/dxflrs/0002_mime", - "tests/emails/dxflrs/0003_mime-in-mime", - "tests/emails/dxflrs/0004_msg-in-msg", - // eml_codec do not support continuation for the moment - //"tests/emails/dxflrs/0005_mail-parser-readme", - "tests/emails/dxflrs/0006_single-mime", - "tests/emails/dxflrs/0007_raw_msg_in_rfc822", - /* *** (STRANGE) RFC *** */ - //"tests/emails/rfc/000", // must return text/enriched, we return text/plain - //"tests/emails/rfc/001", // does not recognize the multipart/external-body, breaks the - // whole parsing - //"tests/emails/rfc/002", // wrong date in email - - //"tests/emails/rfc/003", // dovecot fixes \r\r: the bytes number is wrong + text/enriched - - /* *** THIRD PARTY *** */ - //"tests/emails/thirdparty/000", // dovecot fixes \r\r: the bytes number is wrong - //"tests/emails/thirdparty/001", // same - "tests/emails/thirdparty/002", // same - - /* *** LEGACY *** */ - //"tests/emails/legacy/000", // same issue with \r\r - ]; - - for pref in prefixes.iter() { - println!("{}", pref); - let txt = fs::read(format!("{}.eml", pref))?; - let oracle = fs::read(format!("{}.dovecot.body", pref))?; - let message = eml_codec::parse_message(&txt).unwrap().1; - - let test_repr = Response::Data(Data::Fetch { - seq: NonZeroU32::new(1).unwrap(), - items: Vec1::from(MessageDataItem::Body(mime_view::bodystructure( - &message.child, - false, - )?)), - }); - let test_bytes = ResponseCodec::new().encode(&test_repr).dump(); - let test_str = String::from_utf8_lossy(&test_bytes).to_lowercase(); - - let oracle_str = - format!("* 1 FETCH {}\r\n", String::from_utf8_lossy(&oracle)).to_lowercase(); - - println!("aerogramme: {}\n\ndovecot: {}\n\n", test_str, oracle_str); - //println!("\n\n {} \n\n", String::from_utf8_lossy(&resp)); - assert_eq!(test_str, oracle_str); - } - - Ok(()) - } -} diff --git a/src/imap/mime_view.rs b/src/imap/mime_view.rs deleted file mode 100644 index 8bbbd2d..0000000 --- a/src/imap/mime_view.rs +++ /dev/null @@ -1,580 +0,0 @@ -use std::borrow::Cow; -use std::collections::HashSet; -use std::num::NonZeroU32; - -use anyhow::{anyhow, bail, Result}; - -use imap_codec::imap_types::body::{ - BasicFields, Body as FetchBody, BodyStructure, MultiPartExtensionData, SinglePartExtensionData, - SpecificFields, -}; -use imap_codec::imap_types::core::{AString, IString, NString, Vec1}; -use imap_codec::imap_types::fetch::{Part as FetchPart, Section as FetchSection}; - -use eml_codec::{ - header, mime, mime::r#type::Deductible, part::composite, part::discrete, part::AnyPart, -}; - -use crate::imap::imf_view::ImfView; - -pub enum BodySection<'a> { - Full(Cow<'a, [u8]>), - Slice { - body: Cow<'a, [u8]>, - origin_octet: u32, - }, -} - -/// Logic for BODY[
]<> -/// Works in 3 times: -/// 1. Find the section (RootMime::subset) -/// 2. Apply the extraction logic (SelectedMime::extract), like TEXT, HEADERS, etc. -/// 3. Keep only the given subset provided by partial -/// -/// Example of message sections: -/// -/// ``` -/// HEADER ([RFC-2822] header of the message) -/// TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED -/// 1 TEXT/PLAIN -/// 2 APPLICATION/OCTET-STREAM -/// 3 MESSAGE/RFC822 -/// 3.HEADER ([RFC-2822] header of the message) -/// 3.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED -/// 3.1 TEXT/PLAIN -/// 3.2 APPLICATION/OCTET-STREAM -/// 4 MULTIPART/MIXED -/// 4.1 IMAGE/GIF -/// 4.1.MIME ([MIME-IMB] header for the IMAGE/GIF) -/// 4.2 MESSAGE/RFC822 -/// 4.2.HEADER ([RFC-2822] header of the message) -/// 4.2.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED -/// 4.2.1 TEXT/PLAIN -/// 4.2.2 MULTIPART/ALTERNATIVE -/// 4.2.2.1 TEXT/PLAIN -/// 4.2.2.2 TEXT/RICHTEXT -/// ``` -pub fn body_ext<'a>( - part: &'a AnyPart<'a>, - section: &'a Option>, - partial: &'a Option<(u32, NonZeroU32)>, -) -> Result> { - let root_mime = NodeMime(part); - let (extractor, path) = SubsettedSection::from(section); - let selected_mime = root_mime.subset(path)?; - let extracted_full = selected_mime.extract(&extractor)?; - Ok(extracted_full.to_body_section(partial)) -} - -/// Logic for BODY and BODYSTRUCTURE -/// -/// ```raw -/// b fetch 29878:29879 (BODY) -/// * 29878 FETCH (BODY (("text" "plain" ("charset" "utf-8") NIL NIL "quoted-printable" 3264 82)("text" "html" ("charset" "utf-8") NIL NIL "quoted-printable" 31834 643) "alternative")) -/// * 29879 FETCH (BODY ("text" "html" ("charset" "us-ascii") NIL NIL "7bit" 4107 131)) -/// ^^^^^^^^^^^^^^^^^^^^^^ ^^^ ^^^ ^^^^^^ ^^^^ ^^^ -/// | | | | | | number of lines -/// | | | | | size -/// | | | | content transfer encoding -/// | | | description -/// | | id -/// | parameter list -/// b OK Fetch completed (0.001 + 0.000 secs). -/// ``` -pub fn bodystructure(part: &AnyPart, is_ext: bool) -> Result> { - NodeMime(part).structure(is_ext) -} - -/// NodeMime -/// -/// Used for recursive logic on MIME. -/// See SelectedMime for inspection. -struct NodeMime<'a>(&'a AnyPart<'a>); -impl<'a> NodeMime<'a> { - /// A MIME object is a tree of elements. - /// The path indicates which element must be picked. - /// This function returns the picked element as the new view - fn subset(self, path: Option<&'a FetchPart>) -> Result> { - match path { - None => Ok(SelectedMime(self.0)), - Some(v) => self.rec_subset(v.0.as_ref()), - } - } - - fn rec_subset(self, path: &'a [NonZeroU32]) -> Result { - if path.is_empty() { - Ok(SelectedMime(self.0)) - } else { - match self.0 { - AnyPart::Mult(x) => { - let next = Self(x.children - .get(path[0].get() as usize - 1) - .ok_or(anyhow!("Unable to resolve subpath {:?}, current multipart has only {} elements", path, x.children.len()))?); - next.rec_subset(&path[1..]) - }, - AnyPart::Msg(x) => { - let next = Self(x.child.as_ref()); - next.rec_subset(path) - }, - _ => bail!("You tried to access a subpart on an atomic part (text or binary). Unresolved subpath {:?}", path), - } - } - } - - fn structure(&self, is_ext: bool) -> Result> { - match self.0 { - AnyPart::Txt(x) => NodeTxt(self, x).structure(is_ext), - AnyPart::Bin(x) => NodeBin(self, x).structure(is_ext), - AnyPart::Mult(x) => NodeMult(self, x).structure(is_ext), - AnyPart::Msg(x) => NodeMsg(self, x).structure(is_ext), - } - } -} - -//---------------------------------------------------------- - -/// A FetchSection must be handled in 2 times: -/// - First we must extract the MIME part -/// - Then we must process it as desired -/// The given struct mixes both work, so -/// we separate this work here. -enum SubsettedSection<'a> { - Part, - Header, - HeaderFields(&'a Vec1>), - HeaderFieldsNot(&'a Vec1>), - Text, - Mime, -} -impl<'a> SubsettedSection<'a> { - fn from(section: &'a Option) -> (Self, Option<&'a FetchPart>) { - match section { - Some(FetchSection::Text(maybe_part)) => (Self::Text, maybe_part.as_ref()), - Some(FetchSection::Header(maybe_part)) => (Self::Header, maybe_part.as_ref()), - Some(FetchSection::HeaderFields(maybe_part, fields)) => { - (Self::HeaderFields(fields), maybe_part.as_ref()) - } - Some(FetchSection::HeaderFieldsNot(maybe_part, fields)) => { - (Self::HeaderFieldsNot(fields), maybe_part.as_ref()) - } - Some(FetchSection::Mime(part)) => (Self::Mime, Some(part)), - Some(FetchSection::Part(part)) => (Self::Part, Some(part)), - None => (Self::Part, None), - } - } -} - -/// Used for current MIME inspection -/// -/// See NodeMime for recursive logic -pub struct SelectedMime<'a>(pub &'a AnyPart<'a>); -impl<'a> SelectedMime<'a> { - pub fn header_value(&'a self, to_match_ext: &[u8]) -> Option<&'a [u8]> { - let to_match = to_match_ext.to_ascii_lowercase(); - - self.eml_mime() - .kv - .iter() - .filter_map(|field| match field { - header::Field::Good(header::Kv2(k, v)) => Some((k, v)), - _ => None, - }) - .find(|(k, _)| k.to_ascii_lowercase() == to_match) - .map(|(_, v)| v) - .copied() - } - - /// The subsetted fetch section basically tells us the - /// extraction logic to apply on our selected MIME. - /// This function acts as a router for these logic. - fn extract(&self, extractor: &SubsettedSection<'a>) -> Result> { - match extractor { - SubsettedSection::Text => self.text(), - SubsettedSection::Header => self.header(), - SubsettedSection::HeaderFields(fields) => self.header_fields(fields, false), - SubsettedSection::HeaderFieldsNot(fields) => self.header_fields(fields, true), - SubsettedSection::Part => self.part(), - SubsettedSection::Mime => self.mime(), - } - } - - fn mime(&self) -> Result> { - let bytes = match &self.0 { - AnyPart::Txt(p) => p.mime.fields.raw, - AnyPart::Bin(p) => p.mime.fields.raw, - AnyPart::Msg(p) => p.child.mime().raw, - AnyPart::Mult(p) => p.mime.fields.raw, - }; - Ok(ExtractedFull(bytes.into())) - } - - fn part(&self) -> Result> { - let bytes = match &self.0 { - AnyPart::Txt(p) => p.body, - AnyPart::Bin(p) => p.body, - AnyPart::Msg(p) => p.raw_part, - AnyPart::Mult(_) => bail!("Multipart part has no body"), - }; - Ok(ExtractedFull(bytes.to_vec().into())) - } - - fn eml_mime(&self) -> &eml_codec::mime::NaiveMIME<'_> { - match &self.0 { - AnyPart::Msg(msg) => msg.child.mime(), - other => other.mime(), - } - } - - /// The [...] HEADER.FIELDS, and HEADER.FIELDS.NOT part - /// specifiers refer to the [RFC-2822] header of the message or of - /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. - /// HEADER.FIELDS and HEADER.FIELDS.NOT are followed by a list of - /// field-name (as defined in [RFC-2822]) names, and return a - /// subset of the header. The subset returned by HEADER.FIELDS - /// contains only those header fields with a field-name that - /// matches one of the names in the list; similarly, the subset - /// returned by HEADER.FIELDS.NOT contains only the header fields - /// with a non-matching field-name. The field-matching is - /// case-insensitive but otherwise exact. - fn header_fields( - &self, - fields: &'a Vec1>, - invert: bool, - ) -> Result> { - // Build a lowercase ascii hashset with the fields to fetch - let index = fields - .as_ref() - .iter() - .map(|x| { - match x { - AString::Atom(a) => a.inner().as_bytes(), - AString::String(IString::Literal(l)) => l.as_ref(), - AString::String(IString::Quoted(q)) => q.inner().as_bytes(), - } - .to_ascii_lowercase() - }) - .collect::>(); - - // Extract MIME headers - let mime = self.eml_mime(); - - // Filter our MIME headers based on the field index - // 1. Keep only the correctly formatted headers - // 2. Keep only based on the index presence or absence - // 3. Reduce as a byte vector - let buffer = mime - .kv - .iter() - .filter_map(|field| match field { - header::Field::Good(header::Kv2(k, v)) => Some((k, v)), - _ => None, - }) - .filter(|(k, _)| index.contains(&k.to_ascii_lowercase()) ^ invert) - .fold(vec![], |mut acc, (k, v)| { - acc.extend(*k); - acc.extend(b": "); - acc.extend(*v); - acc.extend(b"\r\n"); - acc - }); - - Ok(ExtractedFull(buffer.into())) - } - - /// The HEADER [...] part specifiers refer to the [RFC-2822] header of the message or of - /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. - /// ```raw - /// HEADER ([RFC-2822] header of the message) - /// ``` - fn header(&self) -> Result> { - let msg = self - .0 - .as_message() - .ok_or(anyhow!("Selected part must be a message/rfc822"))?; - Ok(ExtractedFull(msg.raw_headers.into())) - } - - /// The TEXT part specifier refers to the text body of the message, omitting the [RFC-2822] header. - fn text(&self) -> Result> { - let msg = self - .0 - .as_message() - .ok_or(anyhow!("Selected part must be a message/rfc822"))?; - Ok(ExtractedFull(msg.raw_body.into())) - } - - // ------------ - - /// Basic field of a MIME part that is - /// common to all parts - fn basic_fields(&self) -> Result> { - let sz = match self.0 { - AnyPart::Txt(x) => x.body.len(), - AnyPart::Bin(x) => x.body.len(), - AnyPart::Msg(x) => x.raw_part.len(), - AnyPart::Mult(_) => 0, - }; - let m = self.0.mime(); - let parameter_list = m - .ctype - .as_ref() - .map(|x| { - x.params - .iter() - .map(|p| { - ( - IString::try_from(String::from_utf8_lossy(p.name).to_string()), - IString::try_from(p.value.to_string()), - ) - }) - .filter(|(k, v)| k.is_ok() && v.is_ok()) - .map(|(k, v)| (k.unwrap(), v.unwrap())) - .collect() - }) - .unwrap_or(vec![]); - - Ok(BasicFields { - parameter_list, - id: NString( - m.id.as_ref() - .and_then(|ci| IString::try_from(ci.to_string()).ok()), - ), - description: NString( - m.description - .as_ref() - .and_then(|cd| IString::try_from(cd.to_string()).ok()), - ), - content_transfer_encoding: match m.transfer_encoding { - mime::mechanism::Mechanism::_8Bit => unchecked_istring("8bit"), - mime::mechanism::Mechanism::Binary => unchecked_istring("binary"), - mime::mechanism::Mechanism::QuotedPrintable => { - unchecked_istring("quoted-printable") - } - mime::mechanism::Mechanism::Base64 => unchecked_istring("base64"), - _ => unchecked_istring("7bit"), - }, - // @FIXME we can't compute the size of the message currently... - size: u32::try_from(sz)?, - }) - } -} - -// --------------------------- -struct NodeMsg<'a>(&'a NodeMime<'a>, &'a composite::Message<'a>); -impl<'a> NodeMsg<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let basic = SelectedMime(self.0 .0).basic_fields()?; - - Ok(BodyStructure::Single { - body: FetchBody { - basic, - specific: SpecificFields::Message { - envelope: Box::new(ImfView(&self.1.imf).message_envelope()), - body_structure: Box::new(NodeMime(&self.1.child).structure(is_ext)?), - number_of_lines: nol(self.1.raw_part), - }, - }, - extension_data: match is_ext { - true => Some(SinglePartExtensionData { - md5: NString(None), - tail: None, - }), - _ => None, - }, - }) - } -} -struct NodeMult<'a>(&'a NodeMime<'a>, &'a composite::Multipart<'a>); -impl<'a> NodeMult<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let itype = &self.1.mime.interpreted_type; - let subtype = IString::try_from(itype.subtype.to_string()) - .unwrap_or(unchecked_istring("alternative")); - - let inner_bodies = self - .1 - .children - .iter() - .filter_map(|inner| NodeMime(&inner).structure(is_ext).ok()) - .collect::>(); - - Vec1::validate(&inner_bodies)?; - let bodies = Vec1::unvalidated(inner_bodies); - - Ok(BodyStructure::Multi { - bodies, - subtype, - extension_data: match is_ext { - true => Some(MultiPartExtensionData { - parameter_list: vec![( - IString::try_from("boundary").unwrap(), - IString::try_from(self.1.mime.interpreted_type.boundary.to_string())?, - )], - tail: None, - }), - _ => None, - }, - }) - } -} -struct NodeTxt<'a>(&'a NodeMime<'a>, &'a discrete::Text<'a>); -impl<'a> NodeTxt<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let mut basic = SelectedMime(self.0 .0).basic_fields()?; - - // Get the interpreted content type, set it - let itype = match &self.1.mime.interpreted_type { - Deductible::Inferred(v) | Deductible::Explicit(v) => v, - }; - let subtype = - IString::try_from(itype.subtype.to_string()).unwrap_or(unchecked_istring("plain")); - - // Add charset to the list of parameters if we know it has been inferred as it will be - // missing from the parsed content. - if let Deductible::Inferred(charset) = &itype.charset { - basic.parameter_list.push(( - unchecked_istring("charset"), - IString::try_from(charset.to_string()).unwrap_or(unchecked_istring("us-ascii")), - )); - } - - Ok(BodyStructure::Single { - body: FetchBody { - basic, - specific: SpecificFields::Text { - subtype, - number_of_lines: nol(self.1.body), - }, - }, - extension_data: match is_ext { - true => Some(SinglePartExtensionData { - md5: NString(None), - tail: None, - }), - _ => None, - }, - }) - } -} - -struct NodeBin<'a>(&'a NodeMime<'a>, &'a discrete::Binary<'a>); -impl<'a> NodeBin<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let basic = SelectedMime(self.0 .0).basic_fields()?; - - let default = mime::r#type::NaiveType { - main: &b"application"[..], - sub: &b"octet-stream"[..], - params: vec![], - }; - let ct = self.1.mime.fields.ctype.as_ref().unwrap_or(&default); - - let r#type = IString::try_from(String::from_utf8_lossy(ct.main).to_string()).or(Err( - anyhow!("Unable to build IString from given Content-Type type given"), - ))?; - - let subtype = IString::try_from(String::from_utf8_lossy(ct.sub).to_string()).or(Err( - anyhow!("Unable to build IString from given Content-Type subtype given"), - ))?; - - Ok(BodyStructure::Single { - body: FetchBody { - basic, - specific: SpecificFields::Basic { r#type, subtype }, - }, - extension_data: match is_ext { - true => Some(SinglePartExtensionData { - md5: NString(None), - tail: None, - }), - _ => None, - }, - }) - } -} - -// --------------------------- - -struct ExtractedFull<'a>(Cow<'a, [u8]>); -impl<'a> ExtractedFull<'a> { - /// It is possible to fetch a substring of the designated text. - /// This is done by appending an open angle bracket ("<"), the - /// octet position of the first desired octet, a period, the - /// maximum number of octets desired, and a close angle bracket - /// (">") to the part specifier. If the starting octet is beyond - /// the end of the text, an empty string is returned. - /// - /// Any partial fetch that attempts to read beyond the end of the - /// text is truncated as appropriate. A partial fetch that starts - /// at octet 0 is returned as a partial fetch, even if this - /// truncation happened. - /// - /// Note: This means that BODY[]<0.2048> of a 1500-octet message - /// will return BODY[]<0> with a literal of size 1500, not - /// BODY[]. - /// - /// Note: A substring fetch of a HEADER.FIELDS or - /// HEADER.FIELDS.NOT part specifier is calculated after - /// subsetting the header. - fn to_body_section(self, partial: &'_ Option<(u32, NonZeroU32)>) -> BodySection<'a> { - match partial { - Some((begin, len)) => self.partialize(*begin, *len), - None => BodySection::Full(self.0), - } - } - - fn partialize(self, begin: u32, len: NonZeroU32) -> BodySection<'a> { - // Asked range is starting after the end of the content, - // returning an empty buffer - if begin as usize > self.0.len() { - return BodySection::Slice { - body: Cow::Borrowed(&[][..]), - origin_octet: begin, - }; - } - - // Asked range is ending after the end of the content, - // slice only the beginning of the buffer - if (begin + len.get()) as usize >= self.0.len() { - return BodySection::Slice { - body: match self.0 { - Cow::Borrowed(body) => Cow::Borrowed(&body[begin as usize..]), - Cow::Owned(body) => Cow::Owned(body[begin as usize..].to_vec()), - }, - origin_octet: begin, - }; - } - - // Range is included inside the considered content, - // this is the "happy case" - BodySection::Slice { - body: match self.0 { - Cow::Borrowed(body) => { - Cow::Borrowed(&body[begin as usize..(begin + len.get()) as usize]) - } - Cow::Owned(body) => { - Cow::Owned(body[begin as usize..(begin + len.get()) as usize].to_vec()) - } - }, - origin_octet: begin, - } - } -} - -/// ---- LEGACY - -/// s is set to static to ensure that only compile time values -/// checked by developpers are passed. -fn unchecked_istring(s: &'static str) -> IString { - IString::try_from(s).expect("this value is expected to be a valid imap-codec::IString") -} - -// Number Of Lines -fn nol(input: &[u8]) -> u32 { - input - .iter() - .filter(|x| **x == b'\n') - .count() - .try_into() - .unwrap_or(0) -} diff --git a/src/imap/mod.rs b/src/imap/mod.rs deleted file mode 100644 index 02ab9ce..0000000 --- a/src/imap/mod.rs +++ /dev/null @@ -1,421 +0,0 @@ -mod attributes; -mod capability; -mod command; -mod flags; -mod flow; -mod imf_view; -mod index; -mod mail_view; -mod mailbox_view; -mod mime_view; -mod request; -mod response; -mod search; -mod session; - -use std::net::SocketAddr; - -use anyhow::{anyhow, bail, Context, Result}; -use futures::stream::{FuturesUnordered, StreamExt}; - -use tokio::net::TcpListener; -use tokio::sync::mpsc; -use tokio::sync::watch; - -use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status}; -use imap_codec::imap_types::{core::Text, response::Greeting}; -use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions}; -use imap_flow::stream::AnyStream; -use rustls_pemfile::{certs, private_key}; -use tokio_rustls::TlsAcceptor; - -use crate::config::{ImapConfig, ImapUnsecureConfig}; -use crate::imap::capability::ServerCapability; -use crate::imap::request::Request; -use crate::imap::response::{Body, ResponseOrIdle}; -use crate::imap::session::Instance; -use crate::login::ArcLoginProvider; - -/// Server is a thin wrapper to register our Services in BàL -pub struct Server { - bind_addr: SocketAddr, - login_provider: ArcLoginProvider, - capabilities: ServerCapability, - tls: Option, -} - -#[derive(Clone)] -struct ClientContext { - addr: SocketAddr, - login_provider: ArcLoginProvider, - must_exit: watch::Receiver, - server_capabilities: ServerCapability, -} - -pub fn new(config: ImapConfig, login: ArcLoginProvider) -> Result { - let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( - config.certs, - )?)) - .collect::, _>>()?; - let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( - config.key, - )?))? - .unwrap(); - - let tls_config = rustls::ServerConfig::builder() - .with_no_client_auth() - .with_single_cert(loaded_certs, loaded_key)?; - let acceptor = TlsAcceptor::from(Arc::new(tls_config)); - - Ok(Server { - bind_addr: config.bind_addr, - login_provider: login, - capabilities: ServerCapability::default(), - tls: Some(acceptor), - }) -} - -pub fn new_unsecure(config: ImapUnsecureConfig, login: ArcLoginProvider) -> Server { - Server { - bind_addr: config.bind_addr, - login_provider: login, - capabilities: ServerCapability::default(), - tls: None, - } -} - -impl Server { - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!("IMAP server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - tracing::info!("IMAP: accepted connection from {}", remote_addr); - let stream = match self.tls.clone() { - Some(acceptor) => { - let stream = match acceptor.accept(socket).await { - Ok(v) => v, - Err(e) => { - tracing::error!(err=?e, "TLS negociation failed"); - continue; - } - }; - AnyStream::new(stream) - } - None => AnyStream::new(socket), - }; - - let client = ClientContext { - addr: remote_addr.clone(), - login_provider: self.login_provider.clone(), - must_exit: must_exit.clone(), - server_capabilities: self.capabilities.clone(), - }; - let conn = tokio::spawn(NetLoop::handler(client, stream)); - connections.push(conn); - } - drop(tcp); - - tracing::info!("IMAP server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -use std::sync::Arc; -use tokio::sync::mpsc::*; -use tokio::sync::Notify; -use tokio_util::bytes::BytesMut; - -const PIPELINABLE_COMMANDS: usize = 64; - -// @FIXME a full refactor of this part of the code will be needed sooner or later -struct NetLoop { - ctx: ClientContext, - server: ServerFlow, - cmd_tx: Sender, - resp_rx: UnboundedReceiver, -} - -impl NetLoop { - async fn handler(ctx: ClientContext, sock: AnyStream) { - let addr = ctx.addr.clone(); - - let mut nl = match Self::new(ctx, sock).await { - Ok(nl) => { - tracing::debug!(addr=?addr, "netloop successfully initialized"); - nl - } - Err(e) => { - tracing::error!(addr=?addr, err=?e, "netloop can not be initialized, closing session"); - return; - } - }; - - match nl.core().await { - Ok(()) => { - tracing::debug!("closing successful netloop core for {:?}", addr); - } - Err(e) => { - tracing::error!("closing errored netloop core for {:?}: {}", addr, e); - } - } - } - - async fn new(ctx: ClientContext, sock: AnyStream) -> Result { - let mut opts = ServerFlowOptions::default(); - opts.crlf_relaxed = false; - opts.literal_accept_text = Text::unvalidated("OK"); - opts.literal_reject_text = Text::unvalidated("Literal rejected"); - - // Send greeting - let (server, _) = ServerFlow::send_greeting( - sock, - opts, - Greeting::ok( - Some(Code::Capability(ctx.server_capabilities.to_vec())), - "Aerogramme", - ) - .unwrap(), - ) - .await?; - - // Start a mailbox session in background - let (cmd_tx, cmd_rx) = mpsc::channel::(PIPELINABLE_COMMANDS); - let (resp_tx, resp_rx) = mpsc::unbounded_channel::(); - tokio::spawn(Self::session(ctx.clone(), cmd_rx, resp_tx)); - - // Return the object - Ok(NetLoop { - ctx, - server, - cmd_tx, - resp_rx, - }) - } - - /// Coms with the background session - async fn session( - ctx: ClientContext, - mut cmd_rx: Receiver, - resp_tx: UnboundedSender, - ) -> () { - let mut session = Instance::new(ctx.login_provider, ctx.server_capabilities); - loop { - let cmd = match cmd_rx.recv().await { - None => break, - Some(cmd_recv) => cmd_recv, - }; - - tracing::debug!(cmd=?cmd, sock=%ctx.addr, "command"); - let maybe_response = session.request(cmd).await; - tracing::debug!(cmd=?maybe_response, sock=%ctx.addr, "response"); - - match resp_tx.send(maybe_response) { - Err(_) => break, - Ok(_) => (), - }; - } - tracing::info!("runner is quitting"); - } - - async fn core(&mut self) -> Result<()> { - let mut maybe_idle: Option> = None; - loop { - tokio::select! { - // Managing imap_flow stuff - srv_evt = self.server.progress() => match srv_evt? { - ServerFlowEvent::ResponseSent { handle: _handle, response } => { - match response { - Response::Status(Status::Bye(_)) => return Ok(()), - _ => tracing::trace!("sent to {} content {:?}", self.ctx.addr, response), - } - }, - ServerFlowEvent::CommandReceived { command } => { - match self.cmd_tx.try_send(Request::ImapCommand(command)) { - Ok(_) => (), - Err(mpsc::error::TrySendError::Full(_)) => { - self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); - tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); - } - _ => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - } - } - }, - ServerFlowEvent::IdleCommandReceived { tag } => { - match self.cmd_tx.try_send(Request::IdleStart(tag)) { - Ok(_) => (), - Err(mpsc::error::TrySendError::Full(_)) => { - self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); - tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); - } - _ => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - } - } - } - ServerFlowEvent::IdleDoneReceived => { - tracing::trace!("client sent DONE and want to stop IDLE"); - maybe_idle.ok_or(anyhow!("Received IDLE done but not idling currently"))?.notify_one(); - maybe_idle = None; - } - flow => { - self.server.enqueue_status(Status::bye(None, "Unsupported server flow event").unwrap()); - tracing::error!("session task exited for {:?} due to unsupported flow {:?}", self.ctx.addr, flow); - } - }, - - // Managing response generated by Aerogramme - maybe_msg = self.resp_rx.recv() => match maybe_msg { - Some(ResponseOrIdle::Response(response)) => { - tracing::trace!("Interactive, server has a response for the client"); - for body_elem in response.body.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.server.enqueue_status(response.completion); - }, - Some(ResponseOrIdle::IdleAccept(stop)) => { - tracing::trace!("Interactive, server agreed to switch in idle mode"); - let cr = CommandContinuationRequest::basic(None, "Idling")?; - self.server.idle_accept(cr).or(Err(anyhow!("refused continuation for idle accept")))?; - self.cmd_tx.try_send(Request::IdlePoll)?; - if maybe_idle.is_some() { - bail!("Can't start IDLE if already idling"); - } - maybe_idle = Some(stop); - }, - Some(ResponseOrIdle::IdleEvent(elems)) => { - tracing::trace!("server imap session has some change to communicate to the client"); - for body_elem in elems.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.cmd_tx.try_send(Request::IdlePoll)?; - }, - Some(ResponseOrIdle::IdleReject(response)) => { - tracing::trace!("inform client that session rejected idle"); - self.server - .idle_reject(response.completion) - .or(Err(anyhow!("wrong reject command")))?; - }, - None => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - }, - Some(_) => unreachable!(), - - }, - - // When receiving a CTRL+C - _ = self.ctx.must_exit.changed() => { - tracing::trace!("Interactive, CTRL+C, exiting"); - self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); - }, - }; - } - } - - /* - async fn idle_mode(&mut self, mut buff: BytesMut, stop: Arc) -> Result { - // Flush send - loop { - tracing::trace!("flush server send"); - match self.server.progress_send().await? { - Some(..) => continue, - None => break, - } - } - - tokio::select! { - // Receiving IDLE event from background - maybe_msg = self.resp_rx.recv() => match maybe_msg { - // Session decided idle is terminated - Some(ResponseOrIdle::Response(response)) => { - tracing::trace!("server imap session said idle is done, sending response done, switching to interactive"); - for body_elem in response.body.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.server.enqueue_status(response.completion); - return Ok(LoopMode::Interactive) - }, - // Session has some information for user - Some(ResponseOrIdle::IdleEvent(elems)) => { - tracing::trace!("server imap session has some change to communicate to the client"); - for body_elem in elems.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.cmd_tx.try_send(Request::Idle)?; - return Ok(LoopMode::Idle(buff, stop)) - }, - - // Session crashed - None => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - return Ok(LoopMode::Interactive) - }, - - // Session can't start idling while already idling, it's a logic error! - Some(ResponseOrIdle::StartIdle(..)) => bail!("can't start idling while already idling!"), - }, - - // User is trying to interact with us - read_client_result = self.server.stream.read(&mut buff) => { - let _bytes_read = read_client_result?; - use imap_codec::decode::Decoder; - let codec = imap_codec::IdleDoneCodec::new(); - tracing::trace!("client sent some data for the server IMAP session"); - match codec.decode(&buff) { - Ok(([], imap_codec::imap_types::extensions::idle::IdleDone)) => { - // Session will be informed that it must stop idle - // It will generate the "done" message and change the loop mode - tracing::trace!("client sent DONE and want to stop IDLE"); - stop.notify_one() - }, - Err(_) => { - tracing::trace!("Unable to decode DONE, maybe not enough data were sent?"); - }, - _ => bail!("Client sent data after terminating the continuation without waiting for the server. This is an unsupported behavior and bug in Aerogramme, quitting."), - }; - - return Ok(LoopMode::Idle(buff, stop)) - }, - - // When receiving a CTRL+C - _ = self.ctx.must_exit.changed() => { - tracing::trace!("CTRL+C sent, aborting IDLE for this session"); - self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); - return Ok(LoopMode::Interactive) - }, - }; - }*/ -} diff --git a/src/imap/request.rs b/src/imap/request.rs deleted file mode 100644 index cff18a3..0000000 --- a/src/imap/request.rs +++ /dev/null @@ -1,9 +0,0 @@ -use imap_codec::imap_types::command::Command; -use imap_codec::imap_types::core::Tag; - -#[derive(Debug)] -pub enum Request { - ImapCommand(Command<'static>), - IdleStart(Tag<'static>), - IdlePoll, -} diff --git a/src/imap/response.rs b/src/imap/response.rs deleted file mode 100644 index b6a0e98..0000000 --- a/src/imap/response.rs +++ /dev/null @@ -1,124 +0,0 @@ -use anyhow::Result; -use imap_codec::imap_types::command::Command; -use imap_codec::imap_types::core::Tag; -use imap_codec::imap_types::response::{Code, Data, Status}; -use std::sync::Arc; -use tokio::sync::Notify; - -#[derive(Debug)] -pub enum Body<'a> { - Data(Data<'a>), - Status(Status<'a>), -} - -pub struct ResponseBuilder<'a> { - tag: Option>, - code: Option>, - text: String, - body: Vec>, -} - -impl<'a> ResponseBuilder<'a> { - pub fn to_req(mut self, cmd: &Command<'a>) -> Self { - self.tag = Some(cmd.tag.clone()); - self - } - pub fn tag(mut self, tag: Tag<'a>) -> Self { - self.tag = Some(tag); - self - } - - pub fn message(mut self, txt: impl Into) -> Self { - self.text = txt.into(); - self - } - - pub fn code(mut self, code: Code<'a>) -> Self { - self.code = Some(code); - self - } - - pub fn data(mut self, data: Data<'a>) -> Self { - self.body.push(Body::Data(data)); - self - } - - pub fn many_data(mut self, data: Vec>) -> Self { - for d in data.into_iter() { - self = self.data(d); - } - self - } - - #[allow(dead_code)] - pub fn info(mut self, status: Status<'a>) -> Self { - self.body.push(Body::Status(status)); - self - } - - #[allow(dead_code)] - pub fn many_info(mut self, status: Vec>) -> Self { - for d in status.into_iter() { - self = self.info(d); - } - self - } - - pub fn set_body(mut self, body: Vec>) -> Self { - self.body = body; - self - } - - pub fn ok(self) -> Result> { - Ok(Response { - completion: Status::ok(self.tag, self.code, self.text)?, - body: self.body, - }) - } - - pub fn no(self) -> Result> { - Ok(Response { - completion: Status::no(self.tag, self.code, self.text)?, - body: self.body, - }) - } - - pub fn bad(self) -> Result> { - Ok(Response { - completion: Status::bad(self.tag, self.code, self.text)?, - body: self.body, - }) - } -} - -#[derive(Debug)] -pub struct Response<'a> { - pub body: Vec>, - pub completion: Status<'a>, -} - -impl<'a> Response<'a> { - pub fn build() -> ResponseBuilder<'a> { - ResponseBuilder { - tag: None, - code: None, - text: "".to_string(), - body: vec![], - } - } - - pub fn bye() -> Result> { - Ok(Response { - completion: Status::bye(None, "bye")?, - body: vec![], - }) - } -} - -#[derive(Debug)] -pub enum ResponseOrIdle { - Response(Response<'static>), - IdleAccept(Arc), - IdleReject(Response<'static>), - IdleEvent(Vec>), -} diff --git a/src/imap/search.rs b/src/imap/search.rs deleted file mode 100644 index 37a7e9e..0000000 --- a/src/imap/search.rs +++ /dev/null @@ -1,477 +0,0 @@ -use std::num::{NonZeroU32, NonZeroU64}; - -use imap_codec::imap_types::core::Vec1; -use imap_codec::imap_types::search::{MetadataItemSearch, SearchKey}; -use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; - -use crate::imap::index::MailIndex; -use crate::imap::mail_view::MailView; -use crate::mail::query::QueryScope; - -pub enum SeqType { - Undefined, - NonUid, - Uid, -} -impl SeqType { - pub fn is_uid(&self) -> bool { - matches!(self, Self::Uid) - } -} - -pub struct Criteria<'a>(pub &'a SearchKey<'a>); -impl<'a> Criteria<'a> { - /// Returns a set of email identifiers that is greater or equal - /// to the set of emails to return - pub fn to_sequence_set(&self) -> (SequenceSet, SeqType) { - match self.0 { - SearchKey::All => (sequence_set_all(), SeqType::Undefined), - SearchKey::SequenceSet(seq_set) => (seq_set.clone(), SeqType::NonUid), - SearchKey::Uid(seq_set) => (seq_set.clone(), SeqType::Uid), - SearchKey::Not(_inner) => { - tracing::debug!( - "using NOT in a search request is slow: it selects all identifiers" - ); - (sequence_set_all(), SeqType::Undefined) - } - SearchKey::Or(left, right) => { - tracing::debug!("using OR in a search request is slow: no deduplication is done"); - let (base, base_seqtype) = Self(&left).to_sequence_set(); - let (ext, ext_seqtype) = Self(&right).to_sequence_set(); - - // Check if we have a UID/ID conflict in fetching: now we don't know how to handle them - match (base_seqtype, ext_seqtype) { - (SeqType::Uid, SeqType::NonUid) | (SeqType::NonUid, SeqType::Uid) => { - (sequence_set_all(), SeqType::Undefined) - } - (SeqType::Undefined, x) | (x, _) => { - let mut new_vec = base.0.into_inner(); - new_vec.extend_from_slice(ext.0.as_ref()); - let seq = SequenceSet( - Vec1::try_from(new_vec) - .expect("merging non empty vec lead to non empty vec"), - ); - (seq, x) - } - } - } - SearchKey::And(search_list) => { - tracing::debug!( - "using AND in a search request is slow: no intersection is performed" - ); - // As we perform no intersection, we don't care if we mix uid or id. - // We only keep the smallest range, being it ID or UID, depending of - // which one has the less items. This is an approximation as UID ranges - // can have holes while ID ones can't. - search_list - .as_ref() - .iter() - .map(|crit| Self(&crit).to_sequence_set()) - .min_by(|(x, _), (y, _)| { - let x_size = approx_sequence_set_size(x); - let y_size = approx_sequence_set_size(y); - x_size.cmp(&y_size) - }) - .unwrap_or((sequence_set_all(), SeqType::Undefined)) - } - _ => (sequence_set_all(), SeqType::Undefined), - } - } - - /// Not really clever as we can have cases where we filter out - /// the email before needing to inspect its meta. - /// But for now we are seeking the most basic/stupid algorithm. - pub fn query_scope(&self) -> QueryScope { - use SearchKey::*; - match self.0 { - // Combinators - And(and_list) => and_list - .as_ref() - .iter() - .fold(QueryScope::Index, |prev, sk| { - prev.union(&Criteria(sk).query_scope()) - }), - Not(inner) => Criteria(inner).query_scope(), - Or(left, right) => Criteria(left) - .query_scope() - .union(&Criteria(right).query_scope()), - All => QueryScope::Index, - - // IMF Headers - Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) - | Subject(_) | To(_) => QueryScope::Partial, - // Internal Date is also stored in MailMeta - Before(_) | On(_) | Since(_) => QueryScope::Partial, - // Message size is also stored in MailMeta - Larger(_) | Smaller(_) => QueryScope::Partial, - // Text and Body require that we fetch the full content! - Text(_) | Body(_) => QueryScope::Full, - - _ => QueryScope::Index, - } - } - - pub fn is_modseq(&self) -> bool { - use SearchKey::*; - match self.0 { - And(and_list) => and_list - .as_ref() - .iter() - .any(|child| Criteria(child).is_modseq()), - Or(left, right) => Criteria(left).is_modseq() || Criteria(right).is_modseq(), - Not(child) => Criteria(child).is_modseq(), - ModSeq { .. } => true, - _ => false, - } - } - - /// Returns emails that we now for sure we want to keep - /// but also a second list of emails we need to investigate further by - /// fetching some remote data - pub fn filter_on_idx<'b>( - &self, - midx_list: &[&'b MailIndex<'b>], - ) -> (Vec<&'b MailIndex<'b>>, Vec<&'b MailIndex<'b>>) { - let (p1, p2): (Vec<_>, Vec<_>) = midx_list - .iter() - .map(|x| (x, self.is_keep_on_idx(x))) - .filter(|(_midx, decision)| decision.is_keep()) - .map(|(midx, decision)| (*midx, decision)) - .partition(|(_midx, decision)| matches!(decision, PartialDecision::Keep)); - - let to_keep = p1.into_iter().map(|(v, _)| v).collect(); - let to_fetch = p2.into_iter().map(|(v, _)| v).collect(); - (to_keep, to_fetch) - } - - // ---- - - /// Here we are doing a partial filtering: we do not have access - /// to the headers or to the body, so every time we encounter a rule - /// based on them, we need to keep it. - /// - /// @TODO Could be optimized on a per-email basis by also returning the QueryScope - /// when more information is needed! - fn is_keep_on_idx(&self, midx: &MailIndex) -> PartialDecision { - use SearchKey::*; - match self.0 { - // Combinator logic - And(expr_list) => expr_list - .as_ref() - .iter() - .fold(PartialDecision::Keep, |acc, cur| { - acc.and(&Criteria(cur).is_keep_on_idx(midx)) - }), - Or(left, right) => { - let left_decision = Criteria(left).is_keep_on_idx(midx); - let right_decision = Criteria(right).is_keep_on_idx(midx); - left_decision.or(&right_decision) - } - Not(expr) => Criteria(expr).is_keep_on_idx(midx).not(), - All => PartialDecision::Keep, - - // Sequence logic - maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, midx).into(), - maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, midx).into(), - ModSeq { - metadata_item, - modseq, - } => is_keep_modseq(metadata_item, modseq, midx).into(), - - // All the stuff we can't evaluate yet - Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) - | Subject(_) | To(_) | Before(_) | On(_) | Since(_) | Larger(_) | Smaller(_) - | Text(_) | Body(_) => PartialDecision::Postpone, - - unknown => { - tracing::error!("Unknown filter {:?}", unknown); - PartialDecision::Discard - } - } - } - - /// @TODO we re-eveluate twice the same logic. The correct way would be, on each pass, - /// to simplify the searck query, by removing the elements that were already checked. - /// For example if we have AND(OR(seqid(X), body(Y)), body(X)), we can't keep for sure - /// the email, as body(x) might be false. So we need to check it. But as seqid(x) is true, - /// we could simplify the request to just body(x) and truncate the first OR. Today, we are - /// not doing that, and thus we reevaluate everything. - pub fn is_keep_on_query(&self, mail_view: &MailView) -> bool { - use SearchKey::*; - match self.0 { - // Combinator logic - And(expr_list) => expr_list - .as_ref() - .iter() - .all(|cur| Criteria(cur).is_keep_on_query(mail_view)), - Or(left, right) => { - Criteria(left).is_keep_on_query(mail_view) - || Criteria(right).is_keep_on_query(mail_view) - } - Not(expr) => !Criteria(expr).is_keep_on_query(mail_view), - All => true, - - //@FIXME Reevaluating our previous logic... - maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, &mail_view.in_idx), - maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, &mail_view.in_idx), - ModSeq { - metadata_item, - modseq, - } => is_keep_modseq(metadata_item, modseq, &mail_view.in_idx).into(), - - // Filter on mail meta - Before(search_naive) => match mail_view.stored_naive_date() { - Ok(msg_naive) => &msg_naive < search_naive.as_ref(), - _ => false, - }, - On(search_naive) => match mail_view.stored_naive_date() { - Ok(msg_naive) => &msg_naive == search_naive.as_ref(), - _ => false, - }, - Since(search_naive) => match mail_view.stored_naive_date() { - Ok(msg_naive) => &msg_naive > search_naive.as_ref(), - _ => false, - }, - - // Message size is also stored in MailMeta - Larger(size_ref) => { - mail_view - .query_result - .metadata() - .expect("metadata were fetched") - .rfc822_size - > *size_ref as usize - } - Smaller(size_ref) => { - mail_view - .query_result - .metadata() - .expect("metadata were fetched") - .rfc822_size - < *size_ref as usize - } - - // Filter on well-known headers - Bcc(txt) => mail_view.is_header_contains_pattern(&b"bcc"[..], txt.as_ref()), - Cc(txt) => mail_view.is_header_contains_pattern(&b"cc"[..], txt.as_ref()), - From(txt) => mail_view.is_header_contains_pattern(&b"from"[..], txt.as_ref()), - Subject(txt) => mail_view.is_header_contains_pattern(&b"subject"[..], txt.as_ref()), - To(txt) => mail_view.is_header_contains_pattern(&b"to"[..], txt.as_ref()), - Header(hdr, txt) => mail_view.is_header_contains_pattern(hdr.as_ref(), txt.as_ref()), - - // Filter on Date header - SentBefore(search_naive) => mail_view - .imf() - .map(|imf| imf.naive_date().ok()) - .flatten() - .map(|msg_naive| &msg_naive < search_naive.as_ref()) - .unwrap_or(false), - SentOn(search_naive) => mail_view - .imf() - .map(|imf| imf.naive_date().ok()) - .flatten() - .map(|msg_naive| &msg_naive == search_naive.as_ref()) - .unwrap_or(false), - SentSince(search_naive) => mail_view - .imf() - .map(|imf| imf.naive_date().ok()) - .flatten() - .map(|msg_naive| &msg_naive > search_naive.as_ref()) - .unwrap_or(false), - - // Filter on the full content of the email - Text(txt) => mail_view - .content - .as_msg() - .map(|msg| { - msg.raw_part - .windows(txt.as_ref().len()) - .any(|win| win == txt.as_ref()) - }) - .unwrap_or(false), - Body(txt) => mail_view - .content - .as_msg() - .map(|msg| { - msg.raw_body - .windows(txt.as_ref().len()) - .any(|win| win == txt.as_ref()) - }) - .unwrap_or(false), - - unknown => { - tracing::error!("Unknown filter {:?}", unknown); - false - } - } - } -} - -// ---- Sequence things ---- -fn sequence_set_all() -> SequenceSet { - SequenceSet::from(Sequence::Range( - SeqOrUid::Value(NonZeroU32::MIN), - SeqOrUid::Asterisk, - )) -} - -// This is wrong as sequences can overlap -fn approx_sequence_set_size(seq_set: &SequenceSet) -> u64 { - seq_set.0.as_ref().iter().fold(0u64, |acc, seq| { - acc.saturating_add(approx_sequence_size(seq)) - }) -} - -// This is wrong as sequence UID can have holes, -// as we don't know the number of messages in the mailbox also -// we gave to guess -fn approx_sequence_size(seq: &Sequence) -> u64 { - match seq { - Sequence::Single(_) => 1, - Sequence::Range(SeqOrUid::Asterisk, _) | Sequence::Range(_, SeqOrUid::Asterisk) => u64::MAX, - Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { - let x2 = x2.get() as i64; - let x1 = x1.get() as i64; - (x2 - x1).abs().try_into().unwrap_or(1) - } - } -} - -// --- Partial decision things ---- - -enum PartialDecision { - Keep, - Discard, - Postpone, -} -impl From for PartialDecision { - fn from(x: bool) -> Self { - match x { - true => PartialDecision::Keep, - _ => PartialDecision::Discard, - } - } -} -impl PartialDecision { - fn not(&self) -> Self { - match self { - Self::Keep => Self::Discard, - Self::Discard => Self::Keep, - Self::Postpone => Self::Postpone, - } - } - - fn or(&self, other: &Self) -> Self { - match (self, other) { - (Self::Keep, _) | (_, Self::Keep) => Self::Keep, - (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, - (Self::Discard, Self::Discard) => Self::Discard, - } - } - - fn and(&self, other: &Self) -> Self { - match (self, other) { - (Self::Discard, _) | (_, Self::Discard) => Self::Discard, - (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, - (Self::Keep, Self::Keep) => Self::Keep, - } - } - - fn is_keep(&self) -> bool { - !matches!(self, Self::Discard) - } -} - -// ----- Search Key things --- -fn is_sk_flag(sk: &SearchKey) -> bool { - use SearchKey::*; - match sk { - Answered | Deleted | Draft | Flagged | Keyword(..) | New | Old | Recent | Seen - | Unanswered | Undeleted | Undraft | Unflagged | Unkeyword(..) | Unseen => true, - _ => false, - } -} - -fn is_keep_flag(sk: &SearchKey, midx: &MailIndex) -> bool { - use SearchKey::*; - match sk { - Answered => midx.is_flag_set("\\Answered"), - Deleted => midx.is_flag_set("\\Deleted"), - Draft => midx.is_flag_set("\\Draft"), - Flagged => midx.is_flag_set("\\Flagged"), - Keyword(kw) => midx.is_flag_set(kw.inner()), - New => { - let is_recent = midx.is_flag_set("\\Recent"); - let is_seen = midx.is_flag_set("\\Seen"); - is_recent && !is_seen - } - Old => { - let is_recent = midx.is_flag_set("\\Recent"); - !is_recent - } - Recent => midx.is_flag_set("\\Recent"), - Seen => midx.is_flag_set("\\Seen"), - Unanswered => { - let is_answered = midx.is_flag_set("\\Recent"); - !is_answered - } - Undeleted => { - let is_deleted = midx.is_flag_set("\\Deleted"); - !is_deleted - } - Undraft => { - let is_draft = midx.is_flag_set("\\Draft"); - !is_draft - } - Unflagged => { - let is_flagged = midx.is_flag_set("\\Flagged"); - !is_flagged - } - Unkeyword(kw) => { - let is_keyword_set = midx.is_flag_set(kw.inner()); - !is_keyword_set - } - Unseen => { - let is_seen = midx.is_flag_set("\\Seen"); - !is_seen - } - - // Not flag logic - _ => unreachable!(), - } -} - -fn is_sk_seq(sk: &SearchKey) -> bool { - use SearchKey::*; - match sk { - SequenceSet(..) | Uid(..) => true, - _ => false, - } -} -fn is_keep_seq(sk: &SearchKey, midx: &MailIndex) -> bool { - use SearchKey::*; - match sk { - SequenceSet(seq_set) => seq_set - .0 - .as_ref() - .iter() - .any(|seq| midx.is_in_sequence_i(seq)), - Uid(seq_set) => seq_set - .0 - .as_ref() - .iter() - .any(|seq| midx.is_in_sequence_uid(seq)), - _ => unreachable!(), - } -} - -fn is_keep_modseq( - filter: &Option, - modseq: &NonZeroU64, - midx: &MailIndex, -) -> bool { - if filter.is_some() { - tracing::warn!(filter=?filter, "Ignoring search metadata filter as it's not supported yet"); - } - modseq <= &midx.modseq -} diff --git a/src/imap/session.rs b/src/imap/session.rs deleted file mode 100644 index fa3232a..0000000 --- a/src/imap/session.rs +++ /dev/null @@ -1,173 +0,0 @@ -use crate::imap::capability::{ClientCapability, ServerCapability}; -use crate::imap::command::{anonymous, authenticated, selected}; -use crate::imap::flow; -use crate::imap::request::Request; -use crate::imap::response::{Response, ResponseOrIdle}; -use crate::login::ArcLoginProvider; -use anyhow::{anyhow, bail, Context, Result}; -use imap_codec::imap_types::{command::Command, core::Tag}; - -//----- -pub struct Instance { - pub login_provider: ArcLoginProvider, - pub server_capabilities: ServerCapability, - pub client_capabilities: ClientCapability, - pub state: flow::State, -} -impl Instance { - pub fn new(login_provider: ArcLoginProvider, cap: ServerCapability) -> Self { - let client_cap = ClientCapability::new(&cap); - Self { - login_provider, - state: flow::State::NotAuthenticated, - server_capabilities: cap, - client_capabilities: client_cap, - } - } - - pub async fn request(&mut self, req: Request) -> ResponseOrIdle { - match req { - Request::IdleStart(tag) => self.idle_init(tag), - Request::IdlePoll => self.idle_poll().await, - Request::ImapCommand(cmd) => self.command(cmd).await, - } - } - - pub fn idle_init(&mut self, tag: Tag<'static>) -> ResponseOrIdle { - // Build transition - //@FIXME the notifier should be hidden inside the state and thus not part of the transition! - let transition = flow::Transition::Idle(tag.clone(), tokio::sync::Notify::new()); - - // Try to apply the transition and get the stop notifier - let maybe_stop = self - .state - .apply(transition) - .context("IDLE transition failed") - .and_then(|_| { - self.state - .notify() - .ok_or(anyhow!("IDLE state has no Notify object")) - }); - - // Build an appropriate response - match maybe_stop { - Ok(stop) => ResponseOrIdle::IdleAccept(stop), - Err(e) => { - tracing::error!(err=?e, "unable to init idle due to a transition error"); - //ResponseOrIdle::IdleReject(tag) - let no = Response::build() - .tag(tag) - .message( - "Internal error, processing command triggered an illegal IMAP state transition", - ) - .no() - .unwrap(); - ResponseOrIdle::IdleReject(no) - } - } - } - - pub async fn idle_poll(&mut self) -> ResponseOrIdle { - match self.idle_poll_happy().await { - Ok(r) => r, - Err(e) => { - tracing::error!(err=?e, "something bad happened in idle"); - ResponseOrIdle::Response(Response::bye().unwrap()) - } - } - } - - pub async fn idle_poll_happy(&mut self) -> Result { - let (mbx, tag, stop) = match &mut self.state { - flow::State::Idle(_, ref mut mbx, _, tag, stop) => (mbx, tag.clone(), stop.clone()), - _ => bail!("Invalid session state, can't idle"), - }; - - tokio::select! { - _ = stop.notified() => { - self.state.apply(flow::Transition::UnIdle)?; - return Ok(ResponseOrIdle::Response(Response::build() - .tag(tag.clone()) - .message("IDLE completed") - .ok()?)) - }, - change = mbx.idle_sync() => { - tracing::debug!("idle event"); - return Ok(ResponseOrIdle::IdleEvent(change?)); - } - } - } - - pub async fn command(&mut self, cmd: Command<'static>) -> ResponseOrIdle { - // Command behavior is modulated by the state. - // To prevent state error, we handle the same command in separate code paths. - let (resp, tr) = match &mut self.state { - flow::State::NotAuthenticated => { - let ctx = anonymous::AnonymousContext { - req: &cmd, - login_provider: &self.login_provider, - server_capabilities: &self.server_capabilities, - }; - anonymous::dispatch(ctx).await - } - flow::State::Authenticated(ref user) => { - let ctx = authenticated::AuthenticatedContext { - req: &cmd, - server_capabilities: &self.server_capabilities, - client_capabilities: &mut self.client_capabilities, - user, - }; - authenticated::dispatch(ctx).await - } - flow::State::Selected(ref user, ref mut mailbox, ref perm) => { - let ctx = selected::SelectedContext { - req: &cmd, - server_capabilities: &self.server_capabilities, - client_capabilities: &mut self.client_capabilities, - user, - mailbox, - perm, - }; - selected::dispatch(ctx).await - } - flow::State::Idle(..) => Err(anyhow!("can not receive command while idling")), - flow::State::Logout => Response::build() - .tag(cmd.tag.clone()) - .message("No commands are allowed in the LOGOUT state.") - .bad() - .map(|r| (r, flow::Transition::None)), - } - .unwrap_or_else(|err| { - tracing::error!("Command error {:?} occured while processing {:?}", err, cmd); - ( - Response::build() - .to_req(&cmd) - .message("Internal error while processing command") - .bad() - .unwrap(), - flow::Transition::None, - ) - }); - - if let Err(e) = self.state.apply(tr) { - tracing::error!( - "Transition error {:?} occured while processing on command {:?}", - e, - cmd - ); - return ResponseOrIdle::Response(Response::build() - .to_req(&cmd) - .message( - "Internal error, processing command triggered an illegal IMAP state transition", - ) - .bad() - .unwrap()); - } - ResponseOrIdle::Response(resp) - - /*match &self.state { - flow::State::Idle(_, _, _, _, n) => ResponseOrIdle::StartIdle(n.clone()), - _ => ResponseOrIdle::Response(resp), - }*/ - } -} diff --git a/src/k2v_util.rs b/src/k2v_util.rs deleted file mode 100644 index 3cd969b..0000000 --- a/src/k2v_util.rs +++ /dev/null @@ -1,26 +0,0 @@ -/* -use anyhow::Result; -// ---- UTIL: function to wait for a value to have changed in K2V ---- - -pub async fn k2v_wait_value_changed( - k2v: &storage::RowStore, - key: &storage::RowRef, -) -> Result { - loop { - if let Some(ct) = prev_ct { - match k2v.poll_item(pk, sk, ct.clone(), None).await? { - None => continue, - Some(cv) => return Ok(cv), - } - } else { - match k2v.read_item(pk, sk).await { - Err(k2v_client::Error::NotFound) => { - k2v.insert_item(pk, sk, vec![0u8], None).await?; - } - Err(e) => return Err(e.into()), - Ok(cv) => return Ok(cv), - } - } - } -} -*/ diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index f065478..0000000 --- a/src/lib.rs +++ /dev/null @@ -1,19 +0,0 @@ -#![feature(type_alias_impl_trait)] -#![feature(async_fn_in_trait)] -#![feature(async_closure)] -#![feature(trait_alias)] - -pub mod auth; -pub mod bayou; -pub mod config; -pub mod cryptoblob; -pub mod dav; -pub mod imap; -pub mod k2v_util; -pub mod lmtp; -pub mod login; -pub mod mail; -pub mod server; -pub mod storage; -pub mod timestamp; -pub mod user; diff --git a/src/lmtp.rs b/src/lmtp.rs deleted file mode 100644 index dcd4bcc..0000000 --- a/src/lmtp.rs +++ /dev/null @@ -1,221 +0,0 @@ -use std::net::SocketAddr; -use std::{pin::Pin, sync::Arc}; - -use anyhow::Result; -use async_trait::async_trait; -use duplexify::Duplex; -use futures::{io, AsyncRead, AsyncReadExt, AsyncWrite}; -use futures::{ - stream, - stream::{FuturesOrdered, FuturesUnordered}, - StreamExt, -}; -use log::*; -use tokio::net::TcpListener; -use tokio::select; -use tokio::sync::watch; -use tokio_util::compat::*; - -use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode}; -use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata}; - -use crate::config::*; -use crate::login::*; -use crate::mail::incoming::EncryptedMessage; - -pub struct LmtpServer { - bind_addr: SocketAddr, - hostname: String, - login_provider: Arc, -} - -impl LmtpServer { - pub fn new( - config: LmtpConfig, - login_provider: Arc, - ) -> Arc { - Arc::new(Self { - bind_addr: config.bind_addr, - hostname: config.hostname, - login_provider, - }) - } - - pub async fn run(self: &Arc, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - info!("LMTP server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - info!("LMTP: accepted connection from {}", remote_addr); - - let conn = tokio::spawn(smtp_server::interact( - socket.compat(), - smtp_server::IsAlreadyTls::No, - (), - self.clone(), - )); - - connections.push(conn); - } - drop(tcp); - - info!("LMTP server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -// ---- - -pub struct Message { - to: Vec, -} - -#[async_trait] -impl Config for LmtpServer { - type Protocol = smtp_server::protocol::Lmtp; - - type ConnectionUserMeta = (); - type MailUserMeta = Message; - - fn hostname(&self, _conn_meta: &ConnectionMetadata<()>) -> &str { - &self.hostname - } - - async fn new_mail(&self, _conn_meta: &mut ConnectionMetadata<()>) -> Message { - Message { to: vec![] } - } - - async fn tls_accept( - &self, - _io: IO, - _conn_meta: &mut ConnectionMetadata<()>, - ) -> io::Result>, Pin>>> - where - IO: Send + AsyncRead + AsyncWrite, - { - Err(io::Error::new( - io::ErrorKind::InvalidInput, - "TLS not implemented for LMTP server", - )) - } - - async fn filter_from( - &self, - from: Option, - _meta: &mut MailMetadata, - _conn_meta: &mut ConnectionMetadata<()>, - ) -> Decision> { - Decision::Accept { - reply: reply::okay_from().convert(), - res: from, - } - } - - async fn filter_to( - &self, - to: Email, - meta: &mut MailMetadata, - _conn_meta: &mut ConnectionMetadata<()>, - ) -> Decision { - let to_str = match to.hostname.as_ref() { - Some(h) => format!("{}@{}", to.localpart, h), - None => to.localpart.to_string(), - }; - match self.login_provider.public_login(&to_str).await { - Ok(creds) => { - meta.user.to.push(creds); - Decision::Accept { - reply: reply::okay_to().convert(), - res: to, - } - } - Err(e) => Decision::Reject { - reply: Reply { - code: ReplyCode::POLICY_REASON, - ecode: None, - text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], - }, - }, - } - } - - async fn handle_mail<'resp, R>( - &'resp self, - reader: &mut EscapedDataReader<'_, R>, - meta: MailMetadata, - _conn_meta: &'resp mut ConnectionMetadata<()>, - ) -> Pin> + Send + 'resp>> - where - R: Send + Unpin + AsyncRead, - { - let err_response_stream = |meta: MailMetadata, msg: String| { - Box::pin( - stream::iter(meta.user.to.into_iter()).map(move |_| Decision::Reject { - reply: Reply { - code: ReplyCode::POLICY_REASON, - ecode: None, - text: vec![smtp_message::MaybeUtf8::Utf8(msg.clone())], - }, - }), - ) - }; - - let mut text = Vec::new(); - if let Err(e) = reader.read_to_end(&mut text).await { - return err_response_stream(meta, format!("io error: {}", e)); - } - reader.complete(); - let raw_size = text.len(); - - // Unescape email, shrink it also to remove last dot - let unesc_res = DataUnescaper::new(true).unescape(&mut text); - text.truncate(unesc_res.written); - tracing::debug!(prev_sz = raw_size, new_sz = text.len(), "unescaped"); - - let encrypted_message = match EncryptedMessage::new(text) { - Ok(x) => Arc::new(x), - Err(e) => return err_response_stream(meta, e.to_string()), - }; - - Box::pin( - meta.user - .to - .into_iter() - .map(move |creds| { - let encrypted_message = encrypted_message.clone(); - async move { - match encrypted_message.deliver_to(creds).await { - Ok(()) => Decision::Accept { - reply: reply::okay_mail().convert(), - res: (), - }, - Err(e) => Decision::Reject { - reply: Reply { - code: ReplyCode::POLICY_REASON, - ecode: None, - text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], - }, - }, - } - } - }) - .collect::>(), - ) - } -} diff --git a/src/login/demo_provider.rs b/src/login/demo_provider.rs deleted file mode 100644 index 11c7d54..0000000 --- a/src/login/demo_provider.rs +++ /dev/null @@ -1,51 +0,0 @@ -use crate::login::*; -use crate::storage::*; - -pub struct DemoLoginProvider { - keys: CryptoKeys, - in_memory_store: in_memory::MemDb, -} - -impl DemoLoginProvider { - pub fn new() -> Self { - Self { - keys: CryptoKeys::init(), - in_memory_store: in_memory::MemDb::new(), - } - } -} - -#[async_trait] -impl LoginProvider for DemoLoginProvider { - async fn login(&self, username: &str, password: &str) -> Result { - tracing::debug!(user=%username, "login"); - - if username != "alice" { - bail!("user does not exist"); - } - - if password != "hunter2" { - bail!("wrong password"); - } - - let storage = self.in_memory_store.builder("alice").await; - let keys = self.keys.clone(); - - Ok(Credentials { storage, keys }) - } - - async fn public_login(&self, email: &str) -> Result { - tracing::debug!(user=%email, "public_login"); - if email != "alice@example.tld" { - bail!("invalid email address"); - } - - let storage = self.in_memory_store.builder("alice").await; - let public_key = self.keys.public.clone(); - - Ok(PublicCredentials { - storage, - public_key, - }) - } -} diff --git a/src/login/ldap_provider.rs b/src/login/ldap_provider.rs deleted file mode 100644 index 0af5676..0000000 --- a/src/login/ldap_provider.rs +++ /dev/null @@ -1,265 +0,0 @@ -use anyhow::Result; -use async_trait::async_trait; -use ldap3::{LdapConnAsync, Scope, SearchEntry}; -use log::debug; - -use crate::config::*; -use crate::login::*; -use crate::storage; - -pub struct LdapLoginProvider { - ldap_server: String, - - pre_bind_on_login: bool, - bind_dn_and_pw: Option<(String, String)>, - - search_base: String, - attrs_to_retrieve: Vec, - username_attr: String, - mail_attr: String, - crypto_root_attr: String, - - storage_specific: StorageSpecific, - in_memory_store: storage::in_memory::MemDb, - garage_store: storage::garage::GarageRoot, -} - -enum BucketSource { - Constant(String), - Attr(String), -} - -enum StorageSpecific { - InMemory, - Garage { - from_config: LdapGarageConfig, - bucket_source: BucketSource, - }, -} - -impl LdapLoginProvider { - pub fn new(config: LoginLdapConfig) -> Result { - let bind_dn_and_pw = match (config.bind_dn, config.bind_password) { - (Some(dn), Some(pw)) => Some((dn, pw)), - (None, None) => None, - _ => bail!( - "If either of `bind_dn` or `bind_password` is set, the other must be set as well." - ), - }; - - if config.pre_bind_on_login && bind_dn_and_pw.is_none() { - bail!("Cannot use `pre_bind_on_login` without setting `bind_dn` and `bind_password`"); - } - - let mut attrs_to_retrieve = vec![ - config.username_attr.clone(), - config.mail_attr.clone(), - config.crypto_root_attr.clone(), - ]; - - // storage specific - let specific = match config.storage { - LdapStorage::InMemory => StorageSpecific::InMemory, - LdapStorage::Garage(grgconf) => { - attrs_to_retrieve.push(grgconf.aws_access_key_id_attr.clone()); - attrs_to_retrieve.push(grgconf.aws_secret_access_key_attr.clone()); - - let bucket_source = - match (grgconf.default_bucket.clone(), grgconf.bucket_attr.clone()) { - (Some(b), None) => BucketSource::Constant(b), - (None, Some(a)) => BucketSource::Attr(a), - _ => bail!("Must set `bucket` or `bucket_attr`, but not both"), - }; - - if let BucketSource::Attr(a) = &bucket_source { - attrs_to_retrieve.push(a.clone()); - } - - StorageSpecific::Garage { - from_config: grgconf, - bucket_source, - } - } - }; - - Ok(Self { - ldap_server: config.ldap_server, - pre_bind_on_login: config.pre_bind_on_login, - bind_dn_and_pw, - search_base: config.search_base, - attrs_to_retrieve, - username_attr: config.username_attr, - mail_attr: config.mail_attr, - crypto_root_attr: config.crypto_root_attr, - storage_specific: specific, - //@FIXME should be created outside of the login provider - //Login provider should return only a cryptoroot + a storage URI - //storage URI that should be resolved outside... - in_memory_store: storage::in_memory::MemDb::new(), - garage_store: storage::garage::GarageRoot::new()?, - }) - } - - async fn storage_creds_from_ldap_user(&self, user: &SearchEntry) -> Result { - let storage: Builder = match &self.storage_specific { - StorageSpecific::InMemory => { - self.in_memory_store - .builder(&get_attr(user, &self.username_attr)?) - .await - } - StorageSpecific::Garage { - from_config, - bucket_source, - } => { - let aws_access_key_id = get_attr(user, &from_config.aws_access_key_id_attr)?; - let aws_secret_access_key = - get_attr(user, &from_config.aws_secret_access_key_attr)?; - let bucket = match bucket_source { - BucketSource::Constant(b) => b.clone(), - BucketSource::Attr(a) => get_attr(user, &a)?, - }; - - self.garage_store.user(storage::garage::GarageConf { - region: from_config.aws_region.clone(), - s3_endpoint: from_config.s3_endpoint.clone(), - k2v_endpoint: from_config.k2v_endpoint.clone(), - aws_access_key_id, - aws_secret_access_key, - bucket, - })? - } - }; - - Ok(storage) - } -} - -#[async_trait] -impl LoginProvider for LdapLoginProvider { - async fn login(&self, username: &str, password: &str) -> Result { - check_identifier(username)?; - - let (conn, mut ldap) = LdapConnAsync::new(&self.ldap_server).await?; - ldap3::drive!(conn); - - if self.pre_bind_on_login { - let (dn, pw) = self.bind_dn_and_pw.as_ref().unwrap(); - ldap.simple_bind(dn, pw).await?.success()?; - } - - let (matches, _res) = ldap - .search( - &self.search_base, - Scope::Subtree, - &format!( - "(&(objectClass=inetOrgPerson)({}={}))", - self.username_attr, username - ), - &self.attrs_to_retrieve, - ) - .await? - .success()?; - - if matches.is_empty() { - bail!("Invalid username"); - } - if matches.len() > 1 { - bail!("Invalid username (multiple matching accounts)"); - } - let user = SearchEntry::construct(matches.into_iter().next().unwrap()); - debug!( - "Found matching LDAP user for username {}: {}", - username, user.dn - ); - - // Try to login against LDAP server with provided password - // to check user's password - ldap.simple_bind(&user.dn, password) - .await? - .success() - .context("Invalid password")?; - debug!("Ldap login with user name {} successfull", username); - - // cryptography - let crstr = get_attr(&user, &self.crypto_root_attr)?; - let cr = CryptoRoot(crstr); - let keys = cr.crypto_keys(password)?; - - // storage - let storage = self.storage_creds_from_ldap_user(&user).await?; - - drop(ldap); - - Ok(Credentials { storage, keys }) - } - - async fn public_login(&self, email: &str) -> Result { - check_identifier(email)?; - - let (dn, pw) = match self.bind_dn_and_pw.as_ref() { - Some(x) => x, - None => bail!("Missing bind_dn and bind_password in LDAP login provider config"), - }; - - let (conn, mut ldap) = LdapConnAsync::new(&self.ldap_server).await?; - ldap3::drive!(conn); - ldap.simple_bind(dn, pw).await?.success()?; - - let (matches, _res) = ldap - .search( - &self.search_base, - Scope::Subtree, - &format!( - "(&(objectClass=inetOrgPerson)({}={}))", - self.mail_attr, email - ), - &self.attrs_to_retrieve, - ) - .await? - .success()?; - - if matches.is_empty() { - bail!("No such user account"); - } - if matches.len() > 1 { - bail!("Multiple matching user accounts"); - } - let user = SearchEntry::construct(matches.into_iter().next().unwrap()); - debug!("Found matching LDAP user for email {}: {}", email, user.dn); - - // cryptography - let crstr = get_attr(&user, &self.crypto_root_attr)?; - let cr = CryptoRoot(crstr); - let public_key = cr.public_key()?; - - // storage - let storage = self.storage_creds_from_ldap_user(&user).await?; - drop(ldap); - - Ok(PublicCredentials { - storage, - public_key, - }) - } -} - -fn get_attr(user: &SearchEntry, attr: &str) -> Result { - Ok(user - .attrs - .get(attr) - .ok_or(anyhow!("Missing attr: {}", attr))? - .iter() - .next() - .ok_or(anyhow!("No value for attr: {}", attr))? - .clone()) -} - -fn check_identifier(id: &str) -> Result<()> { - let is_ok = id - .chars() - .all(|c| c.is_alphanumeric() || "-+_.@".contains(c)); - if !is_ok { - bail!("Invalid username/email address, must contain only a-z A-Z 0-9 - + _ . @"); - } - Ok(()) -} diff --git a/src/login/mod.rs b/src/login/mod.rs deleted file mode 100644 index 4a1dee1..0000000 --- a/src/login/mod.rs +++ /dev/null @@ -1,245 +0,0 @@ -pub mod demo_provider; -pub mod ldap_provider; -pub mod static_provider; - -use base64::Engine; -use std::sync::Arc; - -use anyhow::{anyhow, bail, Context, Result}; -use async_trait::async_trait; -use rand::prelude::*; - -use crate::cryptoblob::*; -use crate::storage::*; - -/// The trait LoginProvider defines the interface for a login provider that allows -/// to retrieve storage and cryptographic credentials for access to a user account -/// from their username and password. -#[async_trait] -pub trait LoginProvider { - /// The login method takes an account's password as an input to decypher - /// decryption keys and obtain full access to the user's account. - async fn login(&self, username: &str, password: &str) -> Result; - /// The public_login method takes an account's email address and returns - /// public credentials for adding mails to the user's inbox. - async fn public_login(&self, email: &str) -> Result; -} - -/// ArcLoginProvider is simply an alias on a structure that is used -/// in many places in the code -pub type ArcLoginProvider = Arc; - -/// The struct Credentials represent all of the necessary information to interact -/// with a user account's data after they are logged in. -#[derive(Clone, Debug)] -pub struct Credentials { - /// The storage credentials are used to authenticate access to the underlying storage (S3, K2V) - pub storage: Builder, - /// The cryptographic keys are used to encrypt and decrypt data stored in S3 and K2V - pub keys: CryptoKeys, -} - -#[derive(Clone, Debug)] -pub struct PublicCredentials { - /// The storage credentials are used to authenticate access to the underlying storage (S3, K2V) - pub storage: Builder, - pub public_key: PublicKey, -} - -use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct CryptoRoot(pub String); - -impl CryptoRoot { - pub fn create_pass(password: &str, k: &CryptoKeys) -> Result { - let bytes = k.password_seal(password)?; - let b64 = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes); - let cr = format!("aero:cryptoroot:pass:{}", b64); - Ok(Self(cr)) - } - - pub fn create_cleartext(k: &CryptoKeys) -> Self { - let bytes = k.serialize(); - let b64 = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes); - let cr = format!("aero:cryptoroot:cleartext:{}", b64); - Self(cr) - } - - pub fn create_incoming(pk: &PublicKey) -> Self { - let bytes: &[u8] = &pk[..]; - let b64 = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes); - let cr = format!("aero:cryptoroot:incoming:{}", b64); - Self(cr) - } - - pub fn public_key(&self) -> Result { - match self.0.splitn(4, ':').collect::>()[..] { - ["aero", "cryptoroot", "pass", b64blob] => { - let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; - if blob.len() < 32 { - bail!( - "Decoded data is {} bytes long, expect at least 32 bytes", - blob.len() - ); - } - PublicKey::from_slice(&blob[..32]).context("must be a valid public key") - } - ["aero", "cryptoroot", "cleartext", b64blob] => { - let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; - Ok(CryptoKeys::deserialize(&blob)?.public) - } - ["aero", "cryptoroot", "incoming", b64blob] => { - let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; - if blob.len() < 32 { - bail!( - "Decoded data is {} bytes long, expect at least 32 bytes", - blob.len() - ); - } - PublicKey::from_slice(&blob[..32]).context("must be a valid public key") - } - ["aero", "cryptoroot", "keyring", _] => { - bail!("keyring is not yet implemented!") - } - _ => bail!(format!( - "passed string '{}' is not a valid cryptoroot", - self.0 - )), - } - } - pub fn crypto_keys(&self, password: &str) -> Result { - match self.0.splitn(4, ':').collect::>()[..] { - ["aero", "cryptoroot", "pass", b64blob] => { - let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; - CryptoKeys::password_open(password, &blob) - } - ["aero", "cryptoroot", "cleartext", b64blob] => { - let blob = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64blob)?; - CryptoKeys::deserialize(&blob) - } - ["aero", "cryptoroot", "incoming", _] => { - bail!("incoming cryptoroot does not contain a crypto key!") - } - ["aero", "cryptoroot", "keyring", _] => { - bail!("keyring is not yet implemented!") - } - _ => bail!(format!( - "passed string '{}' is not a valid cryptoroot", - self.0 - )), - } - } -} - -/// The struct CryptoKeys contains the cryptographic keys used to encrypt and decrypt -/// data in a user's mailbox. -#[derive(Clone, Debug)] -pub struct CryptoKeys { - /// Master key for symmetric encryption of mailbox data - pub master: Key, - /// Public/private keypair for encryption of incomming emails (secret part) - pub secret: SecretKey, - /// Public/private keypair for encryption of incomming emails (public part) - pub public: PublicKey, -} - -// ---- - -impl CryptoKeys { - /// Initialize a new cryptography root - pub fn init() -> Self { - let (public, secret) = gen_keypair(); - let master = gen_key(); - CryptoKeys { - master, - secret, - public, - } - } - - // Clear text serialize/deserialize - /// Serialize the root as bytes without encryption - fn serialize(&self) -> [u8; 64] { - let mut res = [0u8; 64]; - res[..32].copy_from_slice(self.master.as_ref()); - res[32..].copy_from_slice(self.secret.as_ref()); - res - } - - /// Deserialize a clear text crypto root without encryption - fn deserialize(bytes: &[u8]) -> Result { - if bytes.len() != 64 { - bail!("Invalid length: {}, expected 64", bytes.len()); - } - let master = Key::from_slice(&bytes[..32]).unwrap(); - let secret = SecretKey::from_slice(&bytes[32..]).unwrap(); - let public = secret.public_key(); - Ok(Self { - master, - secret, - public, - }) - } - - // Password sealed keys serialize/deserialize - pub fn password_open(password: &str, blob: &[u8]) -> Result { - let _pubkey = &blob[0..32]; - let kdf_salt = &blob[32..64]; - let password_openned = try_open_encrypted_keys(kdf_salt, password, &blob[64..])?; - - let keys = Self::deserialize(&password_openned)?; - Ok(keys) - } - - pub fn password_seal(&self, password: &str) -> Result> { - let mut kdf_salt = [0u8; 32]; - thread_rng().fill(&mut kdf_salt); - - // Calculate key for password secret box - let password_key = derive_password_key(&kdf_salt, password)?; - - // Seal a secret box that contains our crypto keys - let password_sealed = seal(&self.serialize(), &password_key)?; - - // Create blob - let password_blob = [&self.public[..], &kdf_salt[..], &password_sealed].concat(); - - Ok(password_blob) - } -} - -fn derive_password_key(kdf_salt: &[u8], password: &str) -> Result { - Ok(Key::from_slice(&argon2_kdf(kdf_salt, password.as_bytes(), 32)?).unwrap()) -} - -fn try_open_encrypted_keys( - kdf_salt: &[u8], - password: &str, - encrypted_keys: &[u8], -) -> Result> { - let password_key = derive_password_key(kdf_salt, password)?; - open(encrypted_keys, &password_key) -} - -// ---- UTIL ---- - -pub fn argon2_kdf(salt: &[u8], password: &[u8], output_len: usize) -> Result> { - use argon2::{password_hash, Algorithm, Argon2, ParamsBuilder, PasswordHasher, Version}; - - let params = ParamsBuilder::new() - .output_len(output_len) - .build() - .map_err(|e| anyhow!("Invalid argon2 params: {}", e))?; - let argon2 = Argon2::new(Algorithm::default(), Version::default(), params); - - let b64_salt = base64::engine::general_purpose::STANDARD_NO_PAD.encode(salt); - let valid_salt = password_hash::Salt::from_b64(&b64_salt) - .map_err(|e| anyhow!("Invalid salt, error {}", e))?; - let hash = argon2 - .hash_password(password, valid_salt) - .map_err(|e| anyhow!("Unable to hash: {}", e))?; - - let hash = hash.hash.ok_or(anyhow!("Missing output"))?; - assert!(hash.len() == output_len); - Ok(hash.as_bytes().to_vec()) -} diff --git a/src/login/static_provider.rs b/src/login/static_provider.rs deleted file mode 100644 index 79626df..0000000 --- a/src/login/static_provider.rs +++ /dev/null @@ -1,189 +0,0 @@ -use std::collections::HashMap; -use std::path::PathBuf; -use std::sync::Arc; -use tokio::signal::unix::{signal, SignalKind}; -use tokio::sync::watch; - -use anyhow::{anyhow, bail, Result}; -use async_trait::async_trait; - -use crate::config::*; -use crate::login::*; -use crate::storage; - -pub struct ContextualUserEntry { - pub username: String, - pub config: UserEntry, -} - -#[derive(Default)] -pub struct UserDatabase { - users: HashMap>, - users_by_email: HashMap>, -} - -pub struct StaticLoginProvider { - user_db: watch::Receiver, - in_memory_store: storage::in_memory::MemDb, - garage_store: storage::garage::GarageRoot, -} - -pub async fn update_user_list(config: PathBuf, up: watch::Sender) -> Result<()> { - let mut stream = signal(SignalKind::user_defined1()) - .expect("failed to install SIGUSR1 signal hander for reload"); - - loop { - let ulist: UserList = match read_config(config.clone()) { - Ok(x) => x, - Err(e) => { - tracing::warn!(path=%config.as_path().to_string_lossy(), error=%e, "Unable to load config"); - stream.recv().await; - continue; - } - }; - - let users = ulist - .into_iter() - .map(|(username, config)| { - ( - username.clone(), - Arc::new(ContextualUserEntry { username, config }), - ) - }) - .collect::>(); - - let mut users_by_email = HashMap::new(); - for (_, u) in users.iter() { - for m in u.config.email_addresses.iter() { - if users_by_email.contains_key(m) { - tracing::warn!("Several users have the same email address: {}", m); - stream.recv().await; - continue; - } - users_by_email.insert(m.clone(), u.clone()); - } - } - - tracing::info!("{} users loaded", users.len()); - up.send(UserDatabase { - users, - users_by_email, - }) - .context("update user db config")?; - stream.recv().await; - tracing::info!("Received SIGUSR1, reloading"); - } -} - -impl StaticLoginProvider { - pub async fn new(config: LoginStaticConfig) -> Result { - let (tx, mut rx) = watch::channel(UserDatabase::default()); - - tokio::spawn(update_user_list(config.user_list, tx)); - rx.changed().await?; - - Ok(Self { - user_db: rx, - in_memory_store: storage::in_memory::MemDb::new(), - garage_store: storage::garage::GarageRoot::new()?, - }) - } -} - -#[async_trait] -impl LoginProvider for StaticLoginProvider { - async fn login(&self, username: &str, password: &str) -> Result { - tracing::debug!(user=%username, "login"); - let user = { - let user_db = self.user_db.borrow(); - match user_db.users.get(username) { - None => bail!("User {} does not exist", username), - Some(u) => u.clone(), - } - }; - - tracing::debug!(user=%username, "verify password"); - if !verify_password(password, &user.config.password)? { - bail!("Wrong password"); - } - - tracing::debug!(user=%username, "fetch keys"); - let storage: storage::Builder = match &user.config.storage { - StaticStorage::InMemory => self.in_memory_store.builder(username).await, - StaticStorage::Garage(grgconf) => { - self.garage_store.user(storage::garage::GarageConf { - region: grgconf.aws_region.clone(), - k2v_endpoint: grgconf.k2v_endpoint.clone(), - s3_endpoint: grgconf.s3_endpoint.clone(), - aws_access_key_id: grgconf.aws_access_key_id.clone(), - aws_secret_access_key: grgconf.aws_secret_access_key.clone(), - bucket: grgconf.bucket.clone(), - })? - } - }; - - let cr = CryptoRoot(user.config.crypto_root.clone()); - let keys = cr.crypto_keys(password)?; - - tracing::debug!(user=%username, "logged"); - Ok(Credentials { storage, keys }) - } - - async fn public_login(&self, email: &str) -> Result { - let user = { - let user_db = self.user_db.borrow(); - match user_db.users_by_email.get(email) { - None => bail!("Email {} does not exist", email), - Some(u) => u.clone(), - } - }; - tracing::debug!(user=%user.username, "public_login"); - - let storage: storage::Builder = match &user.config.storage { - StaticStorage::InMemory => self.in_memory_store.builder(&user.username).await, - StaticStorage::Garage(grgconf) => { - self.garage_store.user(storage::garage::GarageConf { - region: grgconf.aws_region.clone(), - k2v_endpoint: grgconf.k2v_endpoint.clone(), - s3_endpoint: grgconf.s3_endpoint.clone(), - aws_access_key_id: grgconf.aws_access_key_id.clone(), - aws_secret_access_key: grgconf.aws_secret_access_key.clone(), - bucket: grgconf.bucket.clone(), - })? - } - }; - - let cr = CryptoRoot(user.config.crypto_root.clone()); - let public_key = cr.public_key()?; - - Ok(PublicCredentials { - storage, - public_key, - }) - } -} - -pub fn hash_password(password: &str) -> Result { - use argon2::{ - password_hash::{rand_core::OsRng, PasswordHasher, SaltString}, - Argon2, - }; - let salt = SaltString::generate(&mut OsRng); - let argon2 = Argon2::default(); - Ok(argon2 - .hash_password(password.as_bytes(), &salt) - .map_err(|e| anyhow!("Argon2 error: {}", e))? - .to_string()) -} - -pub fn verify_password(password: &str, hash: &str) -> Result { - use argon2::{ - password_hash::{PasswordHash, PasswordVerifier}, - Argon2, - }; - let parsed_hash = - PasswordHash::new(hash).map_err(|e| anyhow!("Invalid hashed password: {}", e))?; - Ok(Argon2::default() - .verify_password(password.as_bytes(), &parsed_hash) - .is_ok()) -} diff --git a/src/mail/incoming.rs b/src/mail/incoming.rs deleted file mode 100644 index e2ad97d..0000000 --- a/src/mail/incoming.rs +++ /dev/null @@ -1,445 +0,0 @@ -//use std::collections::HashMap; -use std::convert::TryFrom; - -use std::sync::{Arc, Weak}; -use std::time::Duration; - -use anyhow::{anyhow, bail, Result}; -use base64::Engine; -use futures::{future::BoxFuture, FutureExt}; -//use tokio::io::AsyncReadExt; -use tokio::sync::watch; -use tracing::{debug, error, info, warn}; - -use crate::cryptoblob; -use crate::login::{Credentials, PublicCredentials}; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::*; -use crate::user::User; -use crate::mail::IMF; -use crate::storage; -use crate::timestamp::now_msec; - -const INCOMING_PK: &str = "incoming"; -const INCOMING_LOCK_SK: &str = "lock"; -const INCOMING_WATCH_SK: &str = "watch"; - -const MESSAGE_KEY: &str = "message-key"; - -// When a lock is held, it is held for LOCK_DURATION (here 5 minutes) -// It is renewed every LOCK_DURATION/3 -// If we are at 2*LOCK_DURATION/3 and haven't renewed, we assume we -// lost the lock. -const LOCK_DURATION: Duration = Duration::from_secs(300); - -// In addition to checking when notified, also check for new mail every 10 minutes -const MAIL_CHECK_INTERVAL: Duration = Duration::from_secs(600); - -pub async fn incoming_mail_watch_process( - user: Weak, - creds: Credentials, - rx_inbox_id: watch::Receiver>, -) { - if let Err(e) = incoming_mail_watch_process_internal(user, creds, rx_inbox_id).await { - error!("Error in incoming mail watch process: {}", e); - } -} - -async fn incoming_mail_watch_process_internal( - user: Weak, - creds: Credentials, - mut rx_inbox_id: watch::Receiver>, -) -> Result<()> { - let mut lock_held = k2v_lock_loop( - creds.storage.build().await?, - storage::RowRef::new(INCOMING_PK, INCOMING_LOCK_SK), - ); - let storage = creds.storage.build().await?; - - let mut inbox: Option> = None; - let mut incoming_key = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); - - loop { - let maybe_updated_incoming_key = if *lock_held.borrow() { - debug!("incoming lock held"); - - let wait_new_mail = async { - loop { - match storage.row_poll(&incoming_key).await { - Ok(row_val) => break row_val.row_ref, - Err(e) => { - error!("Error in wait_new_mail: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - } - } - }; - - tokio::select! { - inc_k = wait_new_mail => Some(inc_k), - _ = tokio::time::sleep(MAIL_CHECK_INTERVAL) => Some(incoming_key.clone()), - _ = lock_held.changed() => None, - _ = rx_inbox_id.changed() => None, - } - } else { - debug!("incoming lock not held"); - tokio::select! { - _ = lock_held.changed() => None, - _ = rx_inbox_id.changed() => None, - } - }; - - let user = match Weak::upgrade(&user) { - Some(user) => user, - None => { - debug!("User no longer available, exiting incoming loop."); - break; - } - }; - debug!("User still available"); - - // If INBOX no longer is same mailbox, open new mailbox - let inbox_id = *rx_inbox_id.borrow(); - if let Some((id, uidvalidity)) = inbox_id { - if Some(id) != inbox.as_ref().map(|b| b.id) { - match user.open_mailbox_by_id(id, uidvalidity).await { - Ok(mb) => { - inbox = Some(mb); - } - Err(e) => { - inbox = None; - error!("Error when opening inbox ({}): {}", id, e); - tokio::time::sleep(Duration::from_secs(30)).await; - continue; - } - } - } - } - - // If we were able to open INBOX, and we have mail, - // fetch new mail - if let (Some(inbox), Some(updated_incoming_key)) = (&inbox, maybe_updated_incoming_key) { - match handle_incoming_mail(&user, &storage, inbox, &lock_held).await { - Ok(()) => { - incoming_key = updated_incoming_key; - } - Err(e) => { - error!("Could not fetch incoming mail: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - } - } - } - drop(rx_inbox_id); - Ok(()) -} - -async fn handle_incoming_mail( - user: &Arc, - storage: &storage::Store, - inbox: &Arc, - lock_held: &watch::Receiver, -) -> Result<()> { - let mails_res = storage.blob_list("incoming/").await?; - - for object in mails_res { - if !*lock_held.borrow() { - break; - } - let key = object.0; - if let Some(mail_id) = key.strip_prefix("incoming/") { - if let Ok(mail_id) = mail_id.parse::() { - move_incoming_message(user, storage, inbox, mail_id).await?; - } - } - } - - Ok(()) -} - -async fn move_incoming_message( - user: &Arc, - storage: &storage::Store, - inbox: &Arc, - id: UniqueIdent, -) -> Result<()> { - info!("Moving incoming message: {}", id); - - let object_key = format!("incoming/{}", id); - - // 1. Fetch message from S3 - let object = storage.blob_fetch(&storage::BlobRef(object_key)).await?; - - // 1.a decrypt message key from headers - //info!("Object metadata: {:?}", get_result.metadata); - let key_encrypted_b64 = object - .meta - .get(MESSAGE_KEY) - .ok_or(anyhow!("Missing key in metadata"))?; - let key_encrypted = base64::engine::general_purpose::STANDARD.decode(key_encrypted_b64)?; - let message_key = sodiumoxide::crypto::sealedbox::open( - &key_encrypted, - &user.creds.keys.public, - &user.creds.keys.secret, - ) - .map_err(|_| anyhow!("Cannot decrypt message key"))?; - let message_key = - cryptoblob::Key::from_slice(&message_key).ok_or(anyhow!("Invalid message key"))?; - - // 1.b retrieve message body - let obj_body = object.value; - let plain_mail = cryptoblob::open(&obj_body, &message_key) - .map_err(|_| anyhow!("Cannot decrypt email content"))?; - - // 2 parse mail and add to inbox - let msg = IMF::try_from(&plain_mail[..]).map_err(|_| anyhow!("Invalid email body"))?; - inbox - .append_from_s3(msg, id, object.blob_ref.clone(), message_key) - .await?; - - // 3 delete from incoming - storage.blob_rm(&object.blob_ref).await?; - - Ok(()) -} - -// ---- UTIL: K2V locking loop, use this to try to grab a lock using a K2V entry as a signal ---- - -fn k2v_lock_loop(storage: storage::Store, row_ref: storage::RowRef) -> watch::Receiver { - let (held_tx, held_rx) = watch::channel(false); - - tokio::spawn(k2v_lock_loop_internal(storage, row_ref, held_tx)); - - held_rx -} - -#[derive(Clone, Debug)] -enum LockState { - Unknown, - Empty, - Held(UniqueIdent, u64, storage::RowRef), -} - -async fn k2v_lock_loop_internal( - storage: storage::Store, - row_ref: storage::RowRef, - held_tx: watch::Sender, -) { - let (state_tx, mut state_rx) = watch::channel::(LockState::Unknown); - let mut state_rx_2 = state_rx.clone(); - - let our_pid = gen_ident(); - - // Loop 1: watch state of lock in K2V, save that in corresponding watch channel - let watch_lock_loop: BoxFuture> = async { - let mut ct = row_ref.clone(); - loop { - debug!("k2v watch lock loop iter: ct = {:?}", ct); - match storage.row_poll(&ct).await { - Err(e) => { - error!( - "Error in k2v wait value changed: {} ; assuming we no longer hold lock.", - e - ); - state_tx.send(LockState::Unknown)?; - tokio::time::sleep(Duration::from_secs(30)).await; - } - Ok(cv) => { - let mut lock_state = None; - for v in cv.value.iter() { - if let storage::Alternative::Value(vbytes) = v { - if vbytes.len() == 32 { - let ts = u64::from_be_bytes(vbytes[..8].try_into().unwrap()); - let pid = UniqueIdent(vbytes[8..].try_into().unwrap()); - if lock_state - .map(|(pid2, ts2)| ts > ts2 || (ts == ts2 && pid > pid2)) - .unwrap_or(true) - { - lock_state = Some((pid, ts)); - } - } - } - } - let new_ct = cv.row_ref; - - debug!( - "k2v watch lock loop: changed, old ct = {:?}, new ct = {:?}, v = {:?}", - ct, new_ct, lock_state - ); - state_tx.send( - lock_state - .map(|(pid, ts)| LockState::Held(pid, ts, new_ct.clone())) - .unwrap_or(LockState::Empty), - )?; - ct = new_ct; - } - } - } - } - .boxed(); - - // Loop 2: notify user whether we are holding the lock or not - let lock_notify_loop: BoxFuture> = async { - loop { - let now = now_msec(); - let held_with_expiration_time = match &*state_rx.borrow_and_update() { - LockState::Held(pid, ts, _ct) if *pid == our_pid => { - let expiration_time = *ts - (LOCK_DURATION / 3).as_millis() as u64; - if now < expiration_time { - Some(expiration_time) - } else { - None - } - } - _ => None, - }; - let held = held_with_expiration_time.is_some(); - if held != *held_tx.borrow() { - held_tx.send(held)?; - } - - let await_expired = async { - match held_with_expiration_time { - None => futures::future::pending().await, - Some(expiration_time) => { - tokio::time::sleep(Duration::from_millis(expiration_time - now)).await - } - }; - }; - - tokio::select!( - r = state_rx.changed() => { - r?; - } - _ = held_tx.closed() => bail!("held_tx closed, don't need to hold lock anymore"), - _ = await_expired => continue, - ); - } - } - .boxed(); - - // Loop 3: acquire lock when relevant - let take_lock_loop: BoxFuture> = async { - loop { - let now = now_msec(); - let state: LockState = state_rx_2.borrow_and_update().clone(); - let (acquire_at, ct) = match state { - LockState::Unknown => { - // If state of the lock is unknown, don't try to acquire - state_rx_2.changed().await?; - continue; - } - LockState::Empty => (now, None), - LockState::Held(pid, ts, ct) => { - if pid == our_pid { - (ts - (2 * LOCK_DURATION / 3).as_millis() as u64, Some(ct)) - } else { - (ts, Some(ct)) - } - } - }; - - // Wait until it is time to acquire lock - if acquire_at > now { - tokio::select!( - r = state_rx_2.changed() => { - // If lock state changed in the meantime, don't acquire and loop around - r?; - continue; - } - _ = tokio::time::sleep(Duration::from_millis(acquire_at - now)) => () - ); - } - - // Acquire lock - let mut lock = vec![0u8; 32]; - lock[..8].copy_from_slice(&u64::to_be_bytes( - now_msec() + LOCK_DURATION.as_millis() as u64, - )); - lock[8..].copy_from_slice(&our_pid.0); - let row = match ct { - Some(existing) => existing, - None => row_ref.clone(), - }; - if let Err(e) = storage - .row_insert(vec![storage::RowVal::new(row, lock)]) - .await - { - error!("Could not take lock: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - - // Wait for new information to loop back - state_rx_2.changed().await?; - } - } - .boxed(); - - let _ = futures::try_join!(watch_lock_loop, lock_notify_loop, take_lock_loop); - - debug!("lock loop exited, releasing"); - - if !held_tx.is_closed() { - warn!("weird..."); - let _ = held_tx.send(false); - } - - // If lock is ours, release it - let release = match &*state_rx.borrow() { - LockState::Held(pid, _, ct) if *pid == our_pid => Some(ct.clone()), - _ => None, - }; - if let Some(ct) = release { - match storage.row_rm(&storage::Selector::Single(&ct)).await { - Err(e) => warn!("Unable to release lock {:?}: {}", ct, e), - Ok(_) => (), - }; - } -} - -// ---- LMTP SIDE: storing messages encrypted with user's pubkey ---- - -pub struct EncryptedMessage { - key: cryptoblob::Key, - encrypted_body: Vec, -} - -impl EncryptedMessage { - pub fn new(body: Vec) -> Result { - let key = cryptoblob::gen_key(); - let encrypted_body = cryptoblob::seal(&body, &key)?; - Ok(Self { - key, - encrypted_body, - }) - } - - pub async fn deliver_to(self: Arc, creds: PublicCredentials) -> Result<()> { - let storage = creds.storage.build().await?; - - // Get causality token of previous watch key - let query = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); - let watch_ct = match storage.row_fetch(&storage::Selector::Single(&query)).await { - Err(_) => query, - Ok(cv) => cv.into_iter().next().map(|v| v.row_ref).unwrap_or(query), - }; - - // Write mail to encrypted storage - let encrypted_key = - sodiumoxide::crypto::sealedbox::seal(self.key.as_ref(), &creds.public_key); - let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_key); - - let blob_val = storage::BlobVal::new( - storage::BlobRef(format!("incoming/{}", gen_ident())), - self.encrypted_body.clone().into(), - ) - .with_meta(MESSAGE_KEY.to_string(), key_header); - storage.blob_insert(blob_val).await?; - - // Update watch key to signal new mail - let watch_val = storage::RowVal::new(watch_ct.clone(), gen_ident().0.to_vec()); - storage.row_insert(vec![watch_val]).await?; - Ok(()) - } -} diff --git a/src/mail/mailbox.rs b/src/mail/mailbox.rs deleted file mode 100644 index d1a5473..0000000 --- a/src/mail/mailbox.rs +++ /dev/null @@ -1,524 +0,0 @@ -use anyhow::{anyhow, bail, Result}; -use serde::{Deserialize, Serialize}; -use tokio::sync::RwLock; - -use crate::bayou::Bayou; -use crate::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; -use crate::login::Credentials; -use crate::mail::uidindex::*; -use crate::mail::unique_ident::*; -use crate::mail::IMF; -use crate::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; -use crate::timestamp::now_msec; - -pub struct Mailbox { - pub(super) id: UniqueIdent, - mbox: RwLock, -} - -impl Mailbox { - pub(crate) async fn open( - creds: &Credentials, - id: UniqueIdent, - min_uidvalidity: ImapUidvalidity, - ) -> Result { - let index_path = format!("index/{}", id); - let mail_path = format!("mail/{}", id); - - let mut uid_index = Bayou::::new(creds, index_path).await?; - uid_index.sync().await?; - - let uidvalidity = uid_index.state().uidvalidity; - if uidvalidity < min_uidvalidity { - uid_index - .push( - uid_index - .state() - .op_bump_uidvalidity(min_uidvalidity.get() - uidvalidity.get()), - ) - .await?; - } - - // @FIXME reporting through opentelemetry or some logs - // info on the "shape" of the mailbox would be welcomed - /* - dump(&uid_index); - */ - - let mbox = RwLock::new(MailboxInternal { - id, - encryption_key: creds.keys.master.clone(), - storage: creds.storage.build().await?, - uid_index, - mail_path, - }); - - Ok(Self { id, mbox }) - } - - /// Sync data with backing store - pub async fn force_sync(&self) -> Result<()> { - self.mbox.write().await.force_sync().await - } - - /// Sync data with backing store only if changes are detected - /// or last sync is too old - pub async fn opportunistic_sync(&self) -> Result<()> { - self.mbox.write().await.opportunistic_sync().await - } - - /// Block until a sync has been done (due to changes in the event log) - pub async fn notify(&self) -> std::sync::Weak { - self.mbox.read().await.notifier() - } - - // ---- Functions for reading the mailbox ---- - - /// Get a clone of the current UID Index of this mailbox - /// (cloning is cheap so don't hesitate to use this) - pub async fn current_uid_index(&self) -> UidIndex { - self.mbox.read().await.uid_index.state().clone() - } - - /// Fetch the metadata (headers + some more info) of the specified - /// mail IDs - pub async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { - self.mbox.read().await.fetch_meta(ids).await - } - - /// Fetch an entire e-mail - pub async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { - self.mbox.read().await.fetch_full(id, message_key).await - } - - pub async fn frozen(self: &std::sync::Arc) -> super::snapshot::FrozenMailbox { - super::snapshot::FrozenMailbox::new(self.clone()).await - } - - // ---- Functions for changing the mailbox ---- - - /// Add flags to message - pub async fn add_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { - self.mbox.write().await.add_flags(id, flags).await - } - - /// Delete flags from message - pub async fn del_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { - self.mbox.write().await.del_flags(id, flags).await - } - - /// Define the new flags for this message - pub async fn set_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { - self.mbox.write().await.set_flags(id, flags).await - } - - /// Insert an email into the mailbox - pub async fn append<'a>( - &self, - msg: IMF<'a>, - ident: Option, - flags: &[Flag], - ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { - self.mbox.write().await.append(msg, ident, flags).await - } - - /// Insert an email into the mailbox, copying it from an existing S3 object - pub async fn append_from_s3<'a>( - &self, - msg: IMF<'a>, - ident: UniqueIdent, - blob_ref: storage::BlobRef, - message_key: Key, - ) -> Result<()> { - self.mbox - .write() - .await - .append_from_s3(msg, ident, blob_ref, message_key) - .await - } - - /// Delete a message definitively from the mailbox - pub async fn delete<'a>(&self, id: UniqueIdent) -> Result<()> { - self.mbox.write().await.delete(id).await - } - - /// Copy an email from an other Mailbox to this mailbox - /// (use this when possible, as it allows for a certain number of storage optimizations) - pub async fn copy_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result { - if self.id == from.id { - bail!("Cannot copy into same mailbox"); - } - - let (mut selflock, fromlock); - if self.id < from.id { - selflock = self.mbox.write().await; - fromlock = from.mbox.write().await; - } else { - fromlock = from.mbox.write().await; - selflock = self.mbox.write().await; - }; - selflock.copy_from(&fromlock, uuid).await - } - - /// Move an email from an other Mailbox to this mailbox - /// (use this when possible, as it allows for a certain number of storage optimizations) - pub async fn move_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result<()> { - if self.id == from.id { - bail!("Cannot copy move same mailbox"); - } - - let (mut selflock, mut fromlock); - if self.id < from.id { - selflock = self.mbox.write().await; - fromlock = from.mbox.write().await; - } else { - fromlock = from.mbox.write().await; - selflock = self.mbox.write().await; - }; - selflock.move_from(&mut fromlock, uuid).await - } -} - -// ---- - -// Non standard but common flags: -// https://www.iana.org/assignments/imap-jmap-keywords/imap-jmap-keywords.xhtml -struct MailboxInternal { - // 2023-05-15 will probably be used later. - #[allow(dead_code)] - id: UniqueIdent, - mail_path: String, - encryption_key: Key, - storage: Store, - uid_index: Bayou, -} - -impl MailboxInternal { - async fn force_sync(&mut self) -> Result<()> { - self.uid_index.sync().await?; - Ok(()) - } - - async fn opportunistic_sync(&mut self) -> Result<()> { - self.uid_index.opportunistic_sync().await?; - Ok(()) - } - - fn notifier(&self) -> std::sync::Weak { - self.uid_index.notifier() - } - - // ---- Functions for reading the mailbox ---- - - async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { - let ids = ids.iter().map(|x| x.to_string()).collect::>(); - let ops = ids - .iter() - .map(|id| RowRef::new(self.mail_path.as_str(), id.as_str())) - .collect::>(); - let res_vec = self.storage.row_fetch(&Selector::List(ops)).await?; - - let mut meta_vec = vec![]; - for res in res_vec.into_iter() { - let mut meta_opt = None; - - // Resolve conflicts - for v in res.value.iter() { - match v { - storage::Alternative::Tombstone => (), - storage::Alternative::Value(v) => { - let meta = open_deserialize::(v, &self.encryption_key)?; - match meta_opt.as_mut() { - None => { - meta_opt = Some(meta); - } - Some(prevmeta) => { - prevmeta.try_merge(meta)?; - } - } - } - } - } - if let Some(meta) = meta_opt { - meta_vec.push(meta); - } else { - bail!("No valid meta value in k2v for {:?}", res.row_ref); - } - } - - Ok(meta_vec) - } - - async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { - let obj_res = self - .storage - .blob_fetch(&BlobRef(format!("{}/{}", self.mail_path, id))) - .await?; - let body = obj_res.value; - cryptoblob::open(&body, message_key) - } - - // ---- Functions for changing the mailbox ---- - - async fn add_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { - let add_flag_op = self.uid_index.state().op_flag_add(ident, flags.to_vec()); - self.uid_index.push(add_flag_op).await - } - - async fn del_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { - let del_flag_op = self.uid_index.state().op_flag_del(ident, flags.to_vec()); - self.uid_index.push(del_flag_op).await - } - - async fn set_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { - let set_flag_op = self.uid_index.state().op_flag_set(ident, flags.to_vec()); - self.uid_index.push(set_flag_op).await - } - - async fn append( - &mut self, - mail: IMF<'_>, - ident: Option, - flags: &[Flag], - ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { - let ident = ident.unwrap_or_else(gen_ident); - let message_key = gen_key(); - - futures::try_join!( - async { - // Encrypt and save mail body - let message_blob = cryptoblob::seal(mail.raw, &message_key)?; - self.storage - .blob_insert(BlobVal::new( - BlobRef(format!("{}/{}", self.mail_path, ident)), - message_blob, - )) - .await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Save mail meta - let meta = MailMeta { - internaldate: now_msec(), - headers: mail.parsed.raw_headers.to_vec(), - message_key: message_key.clone(), - rfc822_size: mail.raw.len(), - }; - let meta_blob = seal_serialize(&meta, &self.encryption_key)?; - self.storage - .row_insert(vec![RowVal::new( - RowRef::new(&self.mail_path, &ident.to_string()), - meta_blob, - )]) - .await?; - Ok::<_, anyhow::Error>(()) - }, - self.uid_index.opportunistic_sync() - )?; - - // Add mail to Bayou mail index - let uid_state = self.uid_index.state(); - let add_mail_op = uid_state.op_mail_add(ident, flags.to_vec()); - - let uidvalidity = uid_state.uidvalidity; - let (uid, modseq) = match add_mail_op { - UidIndexOp::MailAdd(_, uid, modseq, _) => (uid, modseq), - _ => unreachable!(), - }; - - self.uid_index.push(add_mail_op).await?; - - Ok((uidvalidity, uid, modseq)) - } - - async fn append_from_s3<'a>( - &mut self, - mail: IMF<'a>, - ident: UniqueIdent, - blob_src: storage::BlobRef, - message_key: Key, - ) -> Result<()> { - futures::try_join!( - async { - // Copy mail body from previous location - let blob_dst = BlobRef(format!("{}/{}", self.mail_path, ident)); - self.storage.blob_copy(&blob_src, &blob_dst).await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Save mail meta - let meta = MailMeta { - internaldate: now_msec(), - headers: mail.parsed.raw_headers.to_vec(), - message_key: message_key.clone(), - rfc822_size: mail.raw.len(), - }; - let meta_blob = seal_serialize(&meta, &self.encryption_key)?; - self.storage - .row_insert(vec![RowVal::new( - RowRef::new(&self.mail_path, &ident.to_string()), - meta_blob, - )]) - .await?; - Ok::<_, anyhow::Error>(()) - }, - self.uid_index.opportunistic_sync() - )?; - - // Add mail to Bayou mail index - let add_mail_op = self.uid_index.state().op_mail_add(ident, vec![]); - self.uid_index.push(add_mail_op).await?; - - Ok(()) - } - - async fn delete(&mut self, ident: UniqueIdent) -> Result<()> { - if !self.uid_index.state().table.contains_key(&ident) { - bail!("Cannot delete mail that doesn't exit"); - } - - let del_mail_op = self.uid_index.state().op_mail_del(ident); - self.uid_index.push(del_mail_op).await?; - - futures::try_join!( - async { - // Delete mail body from S3 - self.storage - .blob_rm(&BlobRef(format!("{}/{}", self.mail_path, ident))) - .await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Delete mail meta from K2V - let sk = ident.to_string(); - let res = self - .storage - .row_fetch(&storage::Selector::Single(&RowRef::new( - &self.mail_path, - &sk, - ))) - .await?; - if let Some(row_val) = res.into_iter().next() { - self.storage - .row_rm(&storage::Selector::Single(&row_val.row_ref)) - .await?; - } - Ok::<_, anyhow::Error>(()) - } - )?; - Ok(()) - } - - async fn copy_from( - &mut self, - from: &MailboxInternal, - source_id: UniqueIdent, - ) -> Result { - let new_id = gen_ident(); - self.copy_internal(from, source_id, new_id).await?; - Ok(new_id) - } - - async fn move_from(&mut self, from: &mut MailboxInternal, id: UniqueIdent) -> Result<()> { - self.copy_internal(from, id, id).await?; - from.delete(id).await?; - Ok(()) - } - - async fn copy_internal( - &mut self, - from: &MailboxInternal, - source_id: UniqueIdent, - new_id: UniqueIdent, - ) -> Result<()> { - if self.encryption_key != from.encryption_key { - bail!("Message to be copied/moved does not belong to same account."); - } - - let flags = from - .uid_index - .state() - .table - .get(&source_id) - .ok_or(anyhow!("Source mail not found"))? - .2 - .clone(); - - futures::try_join!( - async { - let dst = BlobRef(format!("{}/{}", self.mail_path, new_id)); - let src = BlobRef(format!("{}/{}", from.mail_path, source_id)); - self.storage.blob_copy(&src, &dst).await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Copy mail meta in K2V - let meta = &from.fetch_meta(&[source_id]).await?[0]; - let meta_blob = seal_serialize(meta, &self.encryption_key)?; - self.storage - .row_insert(vec![RowVal::new( - RowRef::new(&self.mail_path, &new_id.to_string()), - meta_blob, - )]) - .await?; - Ok::<_, anyhow::Error>(()) - }, - self.uid_index.opportunistic_sync(), - )?; - - // Add mail to Bayou mail index - let add_mail_op = self.uid_index.state().op_mail_add(new_id, flags); - self.uid_index.push(add_mail_op).await?; - - Ok(()) - } -} - -// Can be useful to debug so we want this code -// to be available to developers -#[allow(dead_code)] -fn dump(uid_index: &Bayou) { - let s = uid_index.state(); - println!("---- MAILBOX STATE ----"); - println!("UIDVALIDITY {}", s.uidvalidity); - println!("UIDNEXT {}", s.uidnext); - println!("INTERNALSEQ {}", s.internalseq); - for (uid, ident) in s.idx_by_uid.iter() { - println!( - "{} {} {}", - uid, - hex::encode(ident.0), - s.table.get(ident).cloned().unwrap().2.join(", ") - ); - } - println!(); -} - -// ---- - -/// The metadata of a message that is stored in K2V -/// at pk = mail/, sk = -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MailMeta { - /// INTERNALDATE field (milliseconds since epoch) - pub internaldate: u64, - /// Headers of the message - pub headers: Vec, - /// Secret key for decrypting entire message - pub message_key: Key, - /// RFC822 size - pub rfc822_size: usize, -} - -impl MailMeta { - fn try_merge(&mut self, other: Self) -> Result<()> { - if self.headers != other.headers - || self.message_key != other.message_key - || self.rfc822_size != other.rfc822_size - { - bail!("Conflicting MailMeta values."); - } - self.internaldate = std::cmp::max(self.internaldate, other.internaldate); - Ok(()) - } -} diff --git a/src/mail/mod.rs b/src/mail/mod.rs deleted file mode 100644 index 03e85cd..0000000 --- a/src/mail/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -use std::convert::TryFrom; - -pub mod incoming; -pub mod mailbox; -pub mod query; -pub mod snapshot; -pub mod uidindex; -pub mod unique_ident; -pub mod namespace; - -// Internet Message Format -// aka RFC 822 - RFC 2822 - RFC 5322 -// 2023-05-15 don't want to refactor this struct now. -#[allow(clippy::upper_case_acronyms)] -pub struct IMF<'a> { - raw: &'a [u8], - parsed: eml_codec::part::composite::Message<'a>, -} - -impl<'a> TryFrom<&'a [u8]> for IMF<'a> { - type Error = (); - - fn try_from(body: &'a [u8]) -> Result, ()> { - let parsed = eml_codec::parse_message(body).or(Err(()))?.1; - Ok(Self { raw: body, parsed }) - } -} diff --git a/src/mail/namespace.rs b/src/mail/namespace.rs deleted file mode 100644 index 5e67173..0000000 --- a/src/mail/namespace.rs +++ /dev/null @@ -1,209 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::sync::{Arc, Weak}; - -use anyhow::{anyhow, bail, Result}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use tokio::sync::watch; - -use crate::cryptoblob::{open_deserialize, seal_serialize}; -use crate::login::Credentials; -use crate::mail::incoming::incoming_mail_watch_process; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::{gen_ident, UniqueIdent}; -use crate::storage; -use crate::timestamp::now_msec; - -pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; - -/// INBOX is the only mailbox that must always exist. -/// It is created automatically when the account is created. -/// IMAP allows the user to rename INBOX to something else, -/// in this case all messages from INBOX are moved to a mailbox -/// with the new name and the INBOX mailbox still exists and is empty. -/// In our implementation, we indeed move the underlying mailbox -/// to the new name (i.e. the new name has the same id as the previous -/// INBOX), and we create a new empty mailbox for INBOX. -pub const INBOX: &str = "INBOX"; - -/// For convenience purpose, we also create some special mailbox -/// that are described in RFC6154 SPECIAL-USE -/// @FIXME maybe it should be a configuration parameter -/// @FIXME maybe we should have a per-mailbox flag mechanism, either an enum or a string, so we -/// track which mailbox is used for what. -/// @FIXME Junk could be useful but we don't have any antispam solution yet so... -/// @FIXME IMAP supports virtual mailbox. \All or \Flagged are intended to be virtual mailboxes. -/// \Trash might be one, or not one. I don't know what we should do there. -pub const DRAFTS: &str = "Drafts"; -pub const ARCHIVE: &str = "Archive"; -pub const SENT: &str = "Sent"; -pub const TRASH: &str = "Trash"; - -pub(crate) const MAILBOX_LIST_PK: &str = "mailboxes"; -pub(crate) const MAILBOX_LIST_SK: &str = "list"; - -// ---- User's mailbox list (serialized in K2V) ---- - -#[derive(Serialize, Deserialize)] -pub(crate) struct MailboxList(BTreeMap); - -#[derive(Serialize, Deserialize, Clone, Copy, Debug)] -pub(crate) struct MailboxListEntry { - id_lww: (u64, Option), - uidvalidity: ImapUidvalidity, -} - -impl MailboxListEntry { - fn merge(&mut self, other: &Self) { - // Simple CRDT merge rule - if other.id_lww.0 > self.id_lww.0 - || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) - { - self.id_lww = other.id_lww; - } - self.uidvalidity = std::cmp::max(self.uidvalidity, other.uidvalidity); - } -} - -impl MailboxList { - pub(crate) fn new() -> Self { - Self(BTreeMap::new()) - } - - pub(crate) fn merge(&mut self, list2: Self) { - for (k, v) in list2.0.into_iter() { - if let Some(e) = self.0.get_mut(&k) { - e.merge(&v); - } else { - self.0.insert(k, v); - } - } - } - - pub(crate) fn existing_mailbox_names(&self) -> Vec { - self.0 - .iter() - .filter(|(_, v)| v.id_lww.1.is_some()) - .map(|(k, _)| k.to_string()) - .collect() - } - - pub(crate) fn has_mailbox(&self, name: &str) -> bool { - matches!( - self.0.get(name), - Some(MailboxListEntry { - id_lww: (_, Some(_)), - .. - }) - ) - } - - pub(crate) fn get_mailbox(&self, name: &str) -> Option<(ImapUidvalidity, Option)> { - self.0.get(name).map( - |MailboxListEntry { - id_lww: (_, mailbox_id), - uidvalidity, - }| (*uidvalidity, *mailbox_id), - ) - } - - /// Ensures mailbox `name` maps to id `id`. - /// If it already mapped to that, returns None. - /// If a change had to be done, returns Some(new uidvalidity in mailbox). - pub(crate) fn set_mailbox(&mut self, name: &str, id: Option) -> Option { - let (ts, id, uidvalidity) = match self.0.get_mut(name) { - None => { - if id.is_none() { - return None; - } else { - (now_msec(), id, ImapUidvalidity::new(1).unwrap()) - } - } - Some(MailboxListEntry { - id_lww, - uidvalidity, - }) => { - if id_lww.1 == id { - return None; - } else { - ( - std::cmp::max(id_lww.0 + 1, now_msec()), - id, - ImapUidvalidity::new(uidvalidity.get() + 1).unwrap(), - ) - } - } - }; - - self.0.insert( - name.into(), - MailboxListEntry { - id_lww: (ts, id), - uidvalidity, - }, - ); - Some(uidvalidity) - } - - pub(crate) fn update_uidvalidity(&mut self, name: &str, new_uidvalidity: ImapUidvalidity) { - match self.0.get_mut(name) { - None => { - self.0.insert( - name.into(), - MailboxListEntry { - id_lww: (now_msec(), None), - uidvalidity: new_uidvalidity, - }, - ); - } - Some(MailboxListEntry { uidvalidity, .. }) => { - *uidvalidity = std::cmp::max(*uidvalidity, new_uidvalidity); - } - } - } - - pub(crate) fn create_mailbox(&mut self, name: &str) -> CreatedMailbox { - if let Some(MailboxListEntry { - id_lww: (_, Some(id)), - uidvalidity, - }) = self.0.get(name) - { - return CreatedMailbox::Existed(*id, *uidvalidity); - } - - let id = gen_ident(); - let uidvalidity = self.set_mailbox(name, Some(id)).unwrap(); - CreatedMailbox::Created(id, uidvalidity) - } - - pub(crate) fn rename_mailbox(&mut self, old_name: &str, new_name: &str) -> Result<()> { - if let Some((uidvalidity, Some(mbid))) = self.get_mailbox(old_name) { - if self.has_mailbox(new_name) { - bail!( - "Cannot rename {} into {}: {} already exists", - old_name, - new_name, - new_name - ); - } - - self.set_mailbox(old_name, None); - self.set_mailbox(new_name, Some(mbid)); - self.update_uidvalidity(new_name, uidvalidity); - Ok(()) - } else { - bail!( - "Cannot rename {} into {}: {} doesn't exist", - old_name, - new_name, - old_name - ); - } - } -} - -pub(crate) enum CreatedMailbox { - Created(UniqueIdent, ImapUidvalidity), - Existed(UniqueIdent, ImapUidvalidity), -} diff --git a/src/mail/query.rs b/src/mail/query.rs deleted file mode 100644 index 3e6fe99..0000000 --- a/src/mail/query.rs +++ /dev/null @@ -1,137 +0,0 @@ -use super::mailbox::MailMeta; -use super::snapshot::FrozenMailbox; -use super::unique_ident::UniqueIdent; -use anyhow::Result; -use futures::future::FutureExt; -use futures::stream::{BoxStream, Stream, StreamExt}; - -/// Query is in charge of fetching efficiently -/// requested data for a list of emails -pub struct Query<'a, 'b> { - pub frozen: &'a FrozenMailbox, - pub emails: &'b [UniqueIdent], - pub scope: QueryScope, -} - -#[derive(Debug)] -pub enum QueryScope { - Index, - Partial, - Full, -} -impl QueryScope { - pub fn union(&self, other: &QueryScope) -> QueryScope { - match (self, other) { - (QueryScope::Full, _) | (_, QueryScope::Full) => QueryScope::Full, - (QueryScope::Partial, _) | (_, QueryScope::Partial) => QueryScope::Partial, - (QueryScope::Index, QueryScope::Index) => QueryScope::Index, - } - } -} - -//type QueryResultStream = Box>>; - -impl<'a, 'b> Query<'a, 'b> { - pub fn fetch(&self) -> BoxStream> { - match self.scope { - QueryScope::Index => Box::pin( - futures::stream::iter(self.emails) - .map(|&uuid| Ok(QueryResult::IndexResult { uuid })), - ), - QueryScope::Partial => Box::pin(self.partial()), - QueryScope::Full => Box::pin(self.full()), - } - } - - // --- functions below are private *for reasons* - fn partial<'d>(&'d self) -> impl Stream> + 'd + Send { - async move { - let maybe_meta_list: Result> = - self.frozen.mailbox.fetch_meta(self.emails).await; - let list_res = maybe_meta_list - .map(|meta_list| { - meta_list - .into_iter() - .zip(self.emails) - .map(|(metadata, &uuid)| Ok(QueryResult::PartialResult { uuid, metadata })) - .collect() - }) - .unwrap_or_else(|e| vec![Err(e)]); - - futures::stream::iter(list_res) - } - .flatten_stream() - } - - fn full<'d>(&'d self) -> impl Stream> + 'd + Send { - self.partial().then(move |maybe_meta| async move { - let meta = maybe_meta?; - - let content = self - .frozen - .mailbox - .fetch_full( - *meta.uuid(), - &meta - .metadata() - .expect("meta to be PartialResult") - .message_key, - ) - .await?; - - Ok(meta.into_full(content).expect("meta to be PartialResult")) - }) - } -} - -#[derive(Debug, Clone)] -pub enum QueryResult { - IndexResult { - uuid: UniqueIdent, - }, - PartialResult { - uuid: UniqueIdent, - metadata: MailMeta, - }, - FullResult { - uuid: UniqueIdent, - metadata: MailMeta, - content: Vec, - }, -} -impl QueryResult { - pub fn uuid(&self) -> &UniqueIdent { - match self { - Self::IndexResult { uuid, .. } => uuid, - Self::PartialResult { uuid, .. } => uuid, - Self::FullResult { uuid, .. } => uuid, - } - } - - pub fn metadata(&self) -> Option<&MailMeta> { - match self { - Self::IndexResult { .. } => None, - Self::PartialResult { metadata, .. } => Some(metadata), - Self::FullResult { metadata, .. } => Some(metadata), - } - } - - #[allow(dead_code)] - pub fn content(&self) -> Option<&[u8]> { - match self { - Self::FullResult { content, .. } => Some(content), - _ => None, - } - } - - fn into_full(self, content: Vec) -> Option { - match self { - Self::PartialResult { uuid, metadata } => Some(Self::FullResult { - uuid, - metadata, - content, - }), - _ => None, - } - } -} diff --git a/src/mail/snapshot.rs b/src/mail/snapshot.rs deleted file mode 100644 index ed756b5..0000000 --- a/src/mail/snapshot.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::sync::Arc; - -use anyhow::Result; - -use super::mailbox::Mailbox; -use super::query::{Query, QueryScope}; -use super::uidindex::UidIndex; -use super::unique_ident::UniqueIdent; - -/// A Frozen Mailbox has a snapshot of the current mailbox -/// state that is desynchronized with the real mailbox state. -/// It's up to the user to choose when their snapshot must be updated -/// to give useful information to their clients -pub struct FrozenMailbox { - pub mailbox: Arc, - pub snapshot: UidIndex, -} - -impl FrozenMailbox { - /// Create a snapshot from a mailbox, the mailbox + the snapshot - /// becomes the "Frozen Mailbox". - pub async fn new(mailbox: Arc) -> Self { - let state = mailbox.current_uid_index().await; - - Self { - mailbox, - snapshot: state, - } - } - - /// Force the synchronization of the inner mailbox - /// but do not update the local snapshot - pub async fn sync(&self) -> Result<()> { - self.mailbox.opportunistic_sync().await - } - - /// Peek snapshot without updating the frozen mailbox - /// Can be useful if you want to plan some writes - /// while sending a diff to the client later - pub async fn peek(&self) -> UidIndex { - self.mailbox.current_uid_index().await - } - - /// Update the FrozenMailbox local snapshot. - /// Returns the old snapshot, so you can build a diff - pub async fn update(&mut self) -> UidIndex { - let old_snapshot = self.snapshot.clone(); - self.snapshot = self.mailbox.current_uid_index().await; - - old_snapshot - } - - pub fn query<'a, 'b>(&'a self, uuids: &'b [UniqueIdent], scope: QueryScope) -> Query<'a, 'b> { - Query { - frozen: self, - emails: uuids, - scope, - } - } -} diff --git a/src/mail/uidindex.rs b/src/mail/uidindex.rs deleted file mode 100644 index 5a06670..0000000 --- a/src/mail/uidindex.rs +++ /dev/null @@ -1,474 +0,0 @@ -use std::num::{NonZeroU32, NonZeroU64}; - -use im::{HashMap, OrdMap, OrdSet}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -use crate::bayou::*; -use crate::mail::unique_ident::UniqueIdent; - -pub type ModSeq = NonZeroU64; -pub type ImapUid = NonZeroU32; -pub type ImapUidvalidity = NonZeroU32; -pub type Flag = String; -pub type IndexEntry = (ImapUid, ModSeq, Vec); - -/// A UidIndex handles the mutable part of a mailbox -/// It is built by running the event log on it -/// Each applied log generates a new UidIndex by cloning the previous one -/// and applying the event. This is why we use immutable datastructures: -/// they are cheap to clone. -#[derive(Clone)] -pub struct UidIndex { - // Source of trust - pub table: OrdMap, - - // Indexes optimized for queries - pub idx_by_uid: OrdMap, - pub idx_by_modseq: OrdMap, - pub idx_by_flag: FlagIndex, - - // "Public" Counters - pub uidvalidity: ImapUidvalidity, - pub uidnext: ImapUid, - pub highestmodseq: ModSeq, - - // "Internal" Counters - pub internalseq: ImapUid, - pub internalmodseq: ModSeq, -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub enum UidIndexOp { - MailAdd(UniqueIdent, ImapUid, ModSeq, Vec), - MailDel(UniqueIdent), - FlagAdd(UniqueIdent, ModSeq, Vec), - FlagDel(UniqueIdent, ModSeq, Vec), - FlagSet(UniqueIdent, ModSeq, Vec), - BumpUidvalidity(u32), -} - -impl UidIndex { - #[must_use] - pub fn op_mail_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::MailAdd(ident, self.internalseq, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_mail_del(&self, ident: UniqueIdent) -> UidIndexOp { - UidIndexOp::MailDel(ident) - } - - #[must_use] - pub fn op_flag_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::FlagAdd(ident, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_flag_del(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::FlagDel(ident, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_flag_set(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::FlagSet(ident, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_bump_uidvalidity(&self, count: u32) -> UidIndexOp { - UidIndexOp::BumpUidvalidity(count) - } - - // INTERNAL functions to keep state consistent - - fn reg_email(&mut self, ident: UniqueIdent, uid: ImapUid, modseq: ModSeq, flags: &[Flag]) { - // Insert the email in our table - self.table.insert(ident, (uid, modseq, flags.to_owned())); - - // Update the indexes/caches - self.idx_by_uid.insert(uid, ident); - self.idx_by_flag.insert(uid, flags); - self.idx_by_modseq.insert(modseq, ident); - } - - fn unreg_email(&mut self, ident: &UniqueIdent) { - // We do nothing if the mail does not exist - let (uid, modseq, flags) = match self.table.get(ident) { - Some(v) => v, - None => return, - }; - - // Delete all cache entries - self.idx_by_uid.remove(uid); - self.idx_by_flag.remove(*uid, flags); - self.idx_by_modseq.remove(modseq); - - // Remove from source of trust - self.table.remove(ident); - } -} - -impl Default for UidIndex { - fn default() -> Self { - Self { - table: OrdMap::new(), - - idx_by_uid: OrdMap::new(), - idx_by_modseq: OrdMap::new(), - idx_by_flag: FlagIndex::new(), - - uidvalidity: NonZeroU32::new(1).unwrap(), - uidnext: NonZeroU32::new(1).unwrap(), - highestmodseq: NonZeroU64::new(1).unwrap(), - - internalseq: NonZeroU32::new(1).unwrap(), - internalmodseq: NonZeroU64::new(1).unwrap(), - } - } -} - -impl BayouState for UidIndex { - type Op = UidIndexOp; - - fn apply(&self, op: &UidIndexOp) -> Self { - let mut new = self.clone(); - match op { - UidIndexOp::MailAdd(ident, uid, modseq, flags) => { - // Change UIDValidity if there is a UID conflict or a MODSEQ conflict - // @FIXME Need to prove that summing work - // The intuition: we increase the UIDValidity by the number of possible conflicts - if *uid < new.internalseq || *modseq < new.internalmodseq { - let bump_uid = new.internalseq.get() - uid.get(); - let bump_modseq = (new.internalmodseq.get() - modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_uid + bump_modseq).unwrap(); - } - - // Assign the real uid of the email - let new_uid = new.internalseq; - - // Assign the real modseq of the email and its new flags - let new_modseq = new.internalmodseq; - - // Delete the previous entry if any. - // Our proof has no assumption on `ident` uniqueness, - // so we must handle this case even it is very unlikely - // In this case, we overwrite the email. - // Note: assigning a new UID is mandatory. - new.unreg_email(ident); - - // We record our email and update ou caches - new.reg_email(*ident, new_uid, new_modseq, flags); - - // Update counters - new.highestmodseq = new.internalmodseq; - - new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - - new.uidnext = new.internalseq; - } - UidIndexOp::MailDel(ident) => { - // If the email is known locally, we remove its references in all our indexes - new.unreg_email(ident); - - // We update the counter - new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); - } - UidIndexOp::FlagAdd(ident, candidate_modseq, new_flags) => { - if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { - // Bump UIDValidity if required - if *candidate_modseq < new.internalmodseq { - let bump_modseq = - (new.internalmodseq.get() - candidate_modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); - } - - // Add flags to the source of trust and the cache - let mut to_add: Vec = new_flags - .iter() - .filter(|f| !existing_flags.contains(f)) - .cloned() - .collect(); - new.idx_by_flag.insert(*uid, &to_add); - *email_modseq = new.internalmodseq; - new.idx_by_modseq.insert(new.internalmodseq, *ident); - existing_flags.append(&mut to_add); - - // Update counters - new.highestmodseq = new.internalmodseq; - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - } - } - UidIndexOp::FlagDel(ident, candidate_modseq, rm_flags) => { - if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { - // Bump UIDValidity if required - if *candidate_modseq < new.internalmodseq { - let bump_modseq = - (new.internalmodseq.get() - candidate_modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); - } - - // Remove flags from the source of trust and the cache - existing_flags.retain(|x| !rm_flags.contains(x)); - new.idx_by_flag.remove(*uid, rm_flags); - - // Register that email has been modified - new.idx_by_modseq.insert(new.internalmodseq, *ident); - *email_modseq = new.internalmodseq; - - // Update counters - new.highestmodseq = new.internalmodseq; - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - } - } - UidIndexOp::FlagSet(ident, candidate_modseq, new_flags) => { - if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { - // Bump UIDValidity if required - if *candidate_modseq < new.internalmodseq { - let bump_modseq = - (new.internalmodseq.get() - candidate_modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); - } - - // Remove flags from the source of trust and the cache - let (keep_flags, rm_flags): (Vec, Vec) = existing_flags - .iter() - .cloned() - .partition(|x| new_flags.contains(x)); - *existing_flags = keep_flags; - let mut to_add: Vec = new_flags - .iter() - .filter(|f| !existing_flags.contains(f)) - .cloned() - .collect(); - existing_flags.append(&mut to_add); - new.idx_by_flag.remove(*uid, &rm_flags); - new.idx_by_flag.insert(*uid, &to_add); - - // Register that email has been modified - new.idx_by_modseq.insert(new.internalmodseq, *ident); - *email_modseq = new.internalmodseq; - - // Update counters - new.highestmodseq = new.internalmodseq; - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - } - } - UidIndexOp::BumpUidvalidity(count) => { - new.uidvalidity = ImapUidvalidity::new(new.uidvalidity.get() + *count) - .unwrap_or(ImapUidvalidity::new(u32::MAX).unwrap()); - } - } - new - } -} - -// ---- FlagIndex implementation ---- - -#[derive(Clone)] -pub struct FlagIndex(HashMap>); -pub type FlagIter<'a> = im::hashmap::Keys<'a, Flag, OrdSet>; - -impl FlagIndex { - fn new() -> Self { - Self(HashMap::new()) - } - fn insert(&mut self, uid: ImapUid, flags: &[Flag]) { - flags.iter().for_each(|flag| { - self.0 - .entry(flag.clone()) - .or_insert(OrdSet::new()) - .insert(uid); - }); - } - fn remove(&mut self, uid: ImapUid, flags: &[Flag]) { - for flag in flags.iter() { - if let Some(set) = self.0.get_mut(flag) { - set.remove(&uid); - if set.is_empty() { - self.0.remove(flag); - } - } - } - } - - pub fn get(&self, f: &Flag) -> Option<&OrdSet> { - self.0.get(f) - } - - pub fn flags(&self) -> FlagIter { - self.0.keys() - } -} - -// ---- CUSTOM SERIALIZATION AND DESERIALIZATION ---- - -#[derive(Serialize, Deserialize)] -struct UidIndexSerializedRepr { - mails: Vec<(ImapUid, ModSeq, UniqueIdent, Vec)>, - - uidvalidity: ImapUidvalidity, - uidnext: ImapUid, - highestmodseq: ModSeq, - - internalseq: ImapUid, - internalmodseq: ModSeq, -} - -impl<'de> Deserialize<'de> for UidIndex { - fn deserialize(d: D) -> Result - where - D: Deserializer<'de>, - { - let val: UidIndexSerializedRepr = UidIndexSerializedRepr::deserialize(d)?; - - let mut uidindex = UidIndex { - table: OrdMap::new(), - - idx_by_uid: OrdMap::new(), - idx_by_modseq: OrdMap::new(), - idx_by_flag: FlagIndex::new(), - - uidvalidity: val.uidvalidity, - uidnext: val.uidnext, - highestmodseq: val.highestmodseq, - - internalseq: val.internalseq, - internalmodseq: val.internalmodseq, - }; - - val.mails - .iter() - .for_each(|(uid, modseq, uuid, flags)| uidindex.reg_email(*uuid, *uid, *modseq, flags)); - - Ok(uidindex) - } -} - -impl Serialize for UidIndex { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut mails = vec![]; - for (ident, (uid, modseq, flags)) in self.table.iter() { - mails.push((*uid, *modseq, *ident, flags.clone())); - } - - let val = UidIndexSerializedRepr { - mails, - uidvalidity: self.uidvalidity, - uidnext: self.uidnext, - highestmodseq: self.highestmodseq, - internalseq: self.internalseq, - internalmodseq: self.internalmodseq, - }; - - val.serialize(serializer) - } -} - -// ---- TESTS ---- - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_uidindex() { - let mut state = UidIndex::default(); - - // Add message 1 - { - let m = UniqueIdent([0x01; 24]); - let f = vec!["\\Recent".to_string(), "\\Archive".to_string()]; - let ev = state.op_mail_add(m, f); - state = state.apply(&ev); - - // Early checks - assert_eq!(state.table.len(), 1); - let (uid, modseq, flags) = state.table.get(&m).unwrap(); - assert_eq!(*uid, NonZeroU32::new(1).unwrap()); - assert_eq!(*modseq, NonZeroU64::new(1).unwrap()); - assert_eq!(flags.len(), 2); - let ident = state.idx_by_uid.get(&NonZeroU32::new(1).unwrap()).unwrap(); - assert_eq!(&m, ident); - let recent = state.idx_by_flag.0.get("\\Recent").unwrap(); - assert_eq!(recent.len(), 1); - assert_eq!(recent.iter().next().unwrap(), &NonZeroU32::new(1).unwrap()); - assert_eq!(state.uidnext, NonZeroU32::new(2).unwrap()); - assert_eq!(state.uidvalidity, NonZeroU32::new(1).unwrap()); - } - - // Add message 2 - { - let m = UniqueIdent([0x02; 24]); - let f = vec!["\\Seen".to_string(), "\\Archive".to_string()]; - let ev = state.op_mail_add(m, f); - state = state.apply(&ev); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 2); - } - - // Add flags to message 1 - { - let m = UniqueIdent([0x01; 24]); - let f = vec!["Important".to_string(), "$cl_1".to_string()]; - let ev = state.op_flag_add(m, f); - state = state.apply(&ev); - } - - // Delete flags from message 1 - { - let m = UniqueIdent([0x01; 24]); - let f = vec!["\\Recent".to_string()]; - let ev = state.op_flag_del(m, f); - state = state.apply(&ev); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 2); - } - - // Delete message 2 - { - let m = UniqueIdent([0x02; 24]); - let ev = state.op_mail_del(m); - state = state.apply(&ev); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 1); - } - - // Add a message 3 concurrent to message 1 (trigger a uid validity change) - { - let m = UniqueIdent([0x03; 24]); - let f = vec!["\\Archive".to_string(), "\\Recent".to_string()]; - let ev = UidIndexOp::MailAdd( - m, - NonZeroU32::new(1).unwrap(), - NonZeroU64::new(1).unwrap(), - f, - ); - state = state.apply(&ev); - } - - // Checks - { - assert_eq!(state.table.len(), 2); - assert!(state.uidvalidity > NonZeroU32::new(1).unwrap()); - - let (last_uid, ident) = state.idx_by_uid.get_max().unwrap(); - assert_eq!(ident, &UniqueIdent([0x03; 24])); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 2); - let mut iter = archive.iter(); - assert_eq!(iter.next().unwrap(), &NonZeroU32::new(1).unwrap()); - assert_eq!(iter.next().unwrap(), last_uid); - } - } -} diff --git a/src/mail/unique_ident.rs b/src/mail/unique_ident.rs deleted file mode 100644 index 0e629db..0000000 --- a/src/mail/unique_ident.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::str::FromStr; -use std::sync::atomic::{AtomicU64, Ordering}; - -use lazy_static::lazy_static; -use rand::prelude::*; -use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::timestamp::now_msec; - -/// An internal Mail Identifier is composed of two components: -/// - a process identifier, 128 bits, itself composed of: -/// - the timestamp of when the process started, 64 bits -/// - a 64-bit random number -/// - a sequence number, 64 bits -/// They are not part of the protocol but an internal representation -/// required by Aerogramme. -/// Their main property is to be unique without having to rely -/// on synchronization between IMAP processes. -#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub struct UniqueIdent(pub [u8; 24]); - -struct IdentGenerator { - pid: u128, - sn: AtomicU64, -} - -impl IdentGenerator { - fn new() -> Self { - let time = now_msec() as u128; - let rand = thread_rng().gen::() as u128; - Self { - pid: (time << 64) | rand, - sn: AtomicU64::new(0), - } - } - - fn gen(&self) -> UniqueIdent { - let sn = self.sn.fetch_add(1, Ordering::Relaxed); - let mut res = [0u8; 24]; - res[0..16].copy_from_slice(&u128::to_be_bytes(self.pid)); - res[16..24].copy_from_slice(&u64::to_be_bytes(sn)); - UniqueIdent(res) - } -} - -lazy_static! { - static ref GENERATOR: IdentGenerator = IdentGenerator::new(); -} - -pub fn gen_ident() -> UniqueIdent { - GENERATOR.gen() -} - -// -- serde -- - -impl<'de> Deserialize<'de> for UniqueIdent { - fn deserialize(d: D) -> Result - where - D: Deserializer<'de>, - { - let v = String::deserialize(d)?; - UniqueIdent::from_str(&v).map_err(D::Error::custom) - } -} - -impl Serialize for UniqueIdent { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl std::fmt::Display for UniqueIdent { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -impl std::fmt::Debug for UniqueIdent { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -impl FromStr for UniqueIdent { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - let bytes = hex::decode(s).map_err(|_| "invalid hex")?; - - if bytes.len() != 24 { - return Err("bad length"); - } - - let mut tmp = [0u8; 24]; - tmp[..].copy_from_slice(&bytes); - Ok(UniqueIdent(tmp)) - } -} diff --git a/src/main.rs b/src/main.rs deleted file mode 100644 index 43b4dca..0000000 --- a/src/main.rs +++ /dev/null @@ -1,407 +0,0 @@ -use std::io::Read; -use std::path::PathBuf; - -use anyhow::{bail, Context, Result}; -use clap::{Parser, Subcommand}; -use nix::{sys::signal, unistd::Pid}; - -use aerogramme::config::*; -use aerogramme::login::{static_provider::*, *}; -use aerogramme::server::Server; - -#[derive(Parser, Debug)] -#[clap(author, version, about, long_about = None)] -struct Args { - #[clap(subcommand)] - command: Command, - - /// A special mode dedicated to developers, NOT INTENDED FOR PRODUCTION - #[clap(long)] - dev: bool, - - #[clap( - short, - long, - env = "AEROGRAMME_CONFIG", - default_value = "aerogramme.toml" - )] - /// Path to the main Aerogramme configuration file - config_file: PathBuf, -} - -#[derive(Subcommand, Debug)] -enum Command { - #[clap(subcommand)] - /// A daemon to be run by the end user, on a personal device - Companion(CompanionCommand), - - #[clap(subcommand)] - /// A daemon to be run by the service provider, on a server - Provider(ProviderCommand), - - #[clap(subcommand)] - /// Specific tooling, should not be part of a normal workflow, for debug & experimentation only - Tools(ToolsCommand), - //Test, -} - -#[derive(Subcommand, Debug)] -enum ToolsCommand { - /// Manage crypto roots - #[clap(subcommand)] - CryptoRoot(CryptoRootCommand), - - PasswordHash { - #[clap(env = "AEROGRAMME_PASSWORD")] - maybe_password: Option, - }, -} - -#[derive(Subcommand, Debug)] -enum CryptoRootCommand { - /// Generate a new crypto-root protected with a password - New { - #[clap(env = "AEROGRAMME_PASSWORD")] - maybe_password: Option, - }, - /// Generate a new clear text crypto-root, store it securely! - NewClearText, - /// Change the password of a crypto key - ChangePassword { - #[clap(env = "AEROGRAMME_OLD_PASSWORD")] - maybe_old_password: Option, - - #[clap(env = "AEROGRAMME_NEW_PASSWORD")] - maybe_new_password: Option, - - #[clap(short, long, env = "AEROGRAMME_CRYPTO_ROOT")] - crypto_root: String, - }, - /// From a given crypto-key, derive one containing only the public key - DeriveIncoming { - #[clap(short, long, env = "AEROGRAMME_CRYPTO_ROOT")] - crypto_root: String, - }, -} - -#[derive(Subcommand, Debug)] -enum CompanionCommand { - /// Runs the IMAP proxy - Daemon, - Reload { - #[clap(short, long, env = "AEROGRAMME_PID")] - pid: Option, - }, - Wizard, - #[clap(subcommand)] - Account(AccountManagement), -} - -#[derive(Subcommand, Debug)] -enum ProviderCommand { - /// Runs the IMAP+LMTP server daemon - Daemon, - /// Reload the daemon - Reload { - #[clap(short, long, env = "AEROGRAMME_PID")] - pid: Option, - }, - /// Manage static accounts - #[clap(subcommand)] - Account(AccountManagement), -} - -#[derive(Subcommand, Debug)] -enum AccountManagement { - /// Add an account - Add { - #[clap(short, long)] - login: String, - #[clap(short, long)] - setup: PathBuf, - }, - /// Delete an account - Delete { - #[clap(short, long)] - login: String, - }, - /// Change password for a given account - ChangePassword { - #[clap(env = "AEROGRAMME_OLD_PASSWORD")] - maybe_old_password: Option, - - #[clap(env = "AEROGRAMME_NEW_PASSWORD")] - maybe_new_password: Option, - - #[clap(short, long)] - login: String, - }, -} - -#[cfg(tokio_unstable)] -fn tracer() { - console_subscriber::init(); -} - -#[cfg(not(tokio_unstable))] -fn tracer() { - tracing_subscriber::fmt::init(); -} - -#[tokio::main] -async fn main() -> Result<()> { - if std::env::var("RUST_LOG").is_err() { - std::env::set_var("RUST_LOG", "main=info,aerogramme=info,k2v_client=info") - } - - // Abort on panic (same behavior as in Go) - std::panic::set_hook(Box::new(|panic_info| { - eprintln!("{}", panic_info); - eprintln!("{:?}", backtrace::Backtrace::new()); - std::process::abort(); - })); - - tracer(); - - let args = Args::parse(); - let any_config = if args.dev { - use std::net::*; - AnyConfig::Provider(ProviderConfig { - pid: None, - imap: None, - imap_unsecure: Some(ImapUnsecureConfig { - bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1143), - }), - dav_unsecure: Some(DavUnsecureConfig { - bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 8087), - }), - lmtp: Some(LmtpConfig { - bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1025), - hostname: "example.tld".to_string(), - }), - auth: Some(AuthConfig { - bind_addr: SocketAddr::new( - IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), - 12345, - ), - }), - users: UserManagement::Demo, - }) - } else { - read_config(args.config_file)? - }; - - match (&args.command, any_config) { - (Command::Companion(subcommand), AnyConfig::Companion(config)) => match subcommand { - CompanionCommand::Daemon => { - let server = Server::from_companion_config(config).await?; - server.run().await?; - } - CompanionCommand::Reload { pid } => reload(*pid, config.pid)?, - CompanionCommand::Wizard => { - unimplemented!(); - } - CompanionCommand::Account(cmd) => { - let user_file = config.users.user_list; - account_management(&args.command, cmd, user_file)?; - } - }, - (Command::Provider(subcommand), AnyConfig::Provider(config)) => match subcommand { - ProviderCommand::Daemon => { - let server = Server::from_provider_config(config).await?; - server.run().await?; - } - ProviderCommand::Reload { pid } => reload(*pid, config.pid)?, - ProviderCommand::Account(cmd) => { - let user_file = match config.users { - UserManagement::Static(conf) => conf.user_list, - _ => { - panic!("Only static account management is supported from Aerogramme.") - } - }; - account_management(&args.command, cmd, user_file)?; - } - }, - (Command::Provider(_), AnyConfig::Companion(_)) => { - bail!("Your want to run a 'Provider' command but your configuration file has role 'Companion'."); - } - (Command::Companion(_), AnyConfig::Provider(_)) => { - bail!("Your want to run a 'Companion' command but your configuration file has role 'Provider'."); - } - (Command::Tools(subcommand), _) => match subcommand { - ToolsCommand::PasswordHash { maybe_password } => { - let password = match maybe_password { - Some(pwd) => pwd.clone(), - None => rpassword::prompt_password("Enter password: ")?, - }; - println!("{}", hash_password(&password)?); - } - ToolsCommand::CryptoRoot(crcommand) => match crcommand { - CryptoRootCommand::New { maybe_password } => { - let password = match maybe_password { - Some(pwd) => pwd.clone(), - None => { - let password = rpassword::prompt_password("Enter password: ")?; - let password_confirm = - rpassword::prompt_password("Confirm password: ")?; - if password != password_confirm { - bail!("Passwords don't match."); - } - password - } - }; - let crypto_keys = CryptoKeys::init(); - let cr = CryptoRoot::create_pass(&password, &crypto_keys)?; - println!("{}", cr.0); - } - CryptoRootCommand::NewClearText => { - let crypto_keys = CryptoKeys::init(); - let cr = CryptoRoot::create_cleartext(&crypto_keys); - println!("{}", cr.0); - } - CryptoRootCommand::ChangePassword { - maybe_old_password, - maybe_new_password, - crypto_root, - } => { - let old_password = match maybe_old_password { - Some(pwd) => pwd.to_string(), - None => rpassword::prompt_password("Enter old password: ")?, - }; - - let new_password = match maybe_new_password { - Some(pwd) => pwd.to_string(), - None => { - let password = rpassword::prompt_password("Enter new password: ")?; - let password_confirm = - rpassword::prompt_password("Confirm new password: ")?; - if password != password_confirm { - bail!("Passwords don't match."); - } - password - } - }; - - let keys = CryptoRoot(crypto_root.to_string()).crypto_keys(&old_password)?; - let cr = CryptoRoot::create_pass(&new_password, &keys)?; - println!("{}", cr.0); - } - CryptoRootCommand::DeriveIncoming { crypto_root } => { - let pubkey = CryptoRoot(crypto_root.to_string()).public_key()?; - let cr = CryptoRoot::create_incoming(&pubkey); - println!("{}", cr.0); - } - }, - }, - } - - Ok(()) -} - -fn reload(pid: Option, pid_path: Option) -> Result<()> { - let final_pid = match (pid, pid_path) { - (Some(pid), _) => pid, - (_, Some(path)) => { - let mut f = std::fs::OpenOptions::new().read(true).open(path)?; - let mut pidstr = String::new(); - f.read_to_string(&mut pidstr)?; - pidstr.parse::()? - } - _ => bail!("Unable to infer your daemon's PID"), - }; - let pid = Pid::from_raw(final_pid); - signal::kill(pid, signal::Signal::SIGUSR1)?; - Ok(()) -} - -fn account_management(root: &Command, cmd: &AccountManagement, users: PathBuf) -> Result<()> { - let mut ulist: UserList = - read_config(users.clone()).context(format!("'{:?}' must be a user database", users))?; - - match cmd { - AccountManagement::Add { login, setup } => { - tracing::debug!(user = login, "will-create"); - let stp: SetupEntry = read_config(setup.clone()) - .context(format!("'{:?}' must be a setup file", setup))?; - tracing::debug!(user = login, "loaded setup entry"); - - let password = match stp.clear_password { - Some(pwd) => pwd, - None => { - let password = rpassword::prompt_password("Enter password: ")?; - let password_confirm = rpassword::prompt_password("Confirm password: ")?; - if password != password_confirm { - bail!("Passwords don't match."); - } - password - } - }; - - let crypto_keys = CryptoKeys::init(); - let crypto_root = match root { - Command::Provider(_) => CryptoRoot::create_pass(&password, &crypto_keys)?, - Command::Companion(_) => CryptoRoot::create_cleartext(&crypto_keys), - _ => unreachable!(), - }; - - let hash = hash_password(password.as_str()).context("unable to hash password")?; - - ulist.insert( - login.clone(), - UserEntry { - email_addresses: stp.email_addresses, - password: hash, - crypto_root: crypto_root.0, - storage: stp.storage, - }, - ); - - write_config(users.clone(), &ulist)?; - } - AccountManagement::Delete { login } => { - tracing::debug!(user = login, "will-delete"); - ulist.remove(login); - write_config(users.clone(), &ulist)?; - } - AccountManagement::ChangePassword { - maybe_old_password, - maybe_new_password, - login, - } => { - let mut user = ulist.remove(login).context("user must exist first")?; - - let old_password = match maybe_old_password { - Some(pwd) => pwd.to_string(), - None => rpassword::prompt_password("Enter old password: ")?, - }; - - if !verify_password(&old_password, &user.password)? { - bail!(format!("invalid password for login {}", login)); - } - - let crypto_keys = CryptoRoot(user.crypto_root).crypto_keys(&old_password)?; - - let new_password = match maybe_new_password { - Some(pwd) => pwd.to_string(), - None => { - let password = rpassword::prompt_password("Enter new password: ")?; - let password_confirm = rpassword::prompt_password("Confirm new password: ")?; - if password != password_confirm { - bail!("Passwords don't match."); - } - password - } - }; - let new_hash = hash_password(&new_password)?; - let new_crypto_root = CryptoRoot::create_pass(&new_password, &crypto_keys)?; - - user.password = new_hash; - user.crypto_root = new_crypto_root.0; - - ulist.insert(login.clone(), user); - write_config(users.clone(), &ulist)?; - } - }; - - Ok(()) -} diff --git a/src/server.rs b/src/server.rs deleted file mode 100644 index 09e91ad..0000000 --- a/src/server.rs +++ /dev/null @@ -1,147 +0,0 @@ -use std::io::Write; -use std::path::PathBuf; -use std::sync::Arc; - -use anyhow::Result; -use futures::try_join; -use log::*; -use tokio::sync::watch; - -use crate::auth; -use crate::config::*; -use crate::dav; -use crate::imap; -use crate::lmtp::*; -use crate::login::ArcLoginProvider; -use crate::login::{demo_provider::*, ldap_provider::*, static_provider::*}; - -pub struct Server { - lmtp_server: Option>, - imap_unsecure_server: Option, - imap_server: Option, - auth_server: Option, - dav_unsecure_server: Option, - pid_file: Option, -} - -impl Server { - pub async fn from_companion_config(config: CompanionConfig) -> Result { - tracing::info!("Init as companion"); - let login = Arc::new(StaticLoginProvider::new(config.users).await?); - - let lmtp_server = None; - let imap_unsecure_server = Some(imap::new_unsecure(config.imap, login.clone())); - Ok(Self { - lmtp_server, - imap_unsecure_server, - imap_server: None, - auth_server: None, - dav_unsecure_server: None, - pid_file: config.pid, - }) - } - - pub async fn from_provider_config(config: ProviderConfig) -> Result { - tracing::info!("Init as provider"); - let login: ArcLoginProvider = match config.users { - UserManagement::Demo => Arc::new(DemoLoginProvider::new()), - UserManagement::Static(x) => Arc::new(StaticLoginProvider::new(x).await?), - UserManagement::Ldap(x) => Arc::new(LdapLoginProvider::new(x)?), - }; - - let lmtp_server = config.lmtp.map(|lmtp| LmtpServer::new(lmtp, login.clone())); - let imap_unsecure_server = config - .imap_unsecure - .map(|imap| imap::new_unsecure(imap, login.clone())); - let imap_server = config - .imap - .map(|imap| imap::new(imap, login.clone())) - .transpose()?; - let auth_server = config - .auth - .map(|auth| auth::AuthServer::new(auth, login.clone())); - let dav_unsecure_server = config - .dav_unsecure - .map(|dav_config| dav::new_unsecure(dav_config, login.clone())); - - Ok(Self { - lmtp_server, - imap_unsecure_server, - imap_server, - dav_unsecure_server, - auth_server, - pid_file: config.pid, - }) - } - - pub async fn run(self) -> Result<()> { - let pid = std::process::id(); - tracing::info!(pid = pid, "Starting main loops"); - - // write the pid file - if let Some(pid_file) = self.pid_file { - let mut file = std::fs::OpenOptions::new() - .write(true) - .create(true) - .truncate(true) - .open(pid_file)?; - file.write_all(pid.to_string().as_bytes())?; - drop(file); - } - - let (exit_signal, provoke_exit) = watch_ctrl_c(); - let _exit_on_err = move |err: anyhow::Error| { - error!("Error: {}", err); - let _ = provoke_exit.send(true); - }; - - try_join!( - async { - match self.lmtp_server.as_ref() { - None => Ok(()), - Some(s) => s.run(exit_signal.clone()).await, - } - }, - async { - match self.imap_unsecure_server { - None => Ok(()), - Some(s) => s.run(exit_signal.clone()).await, - } - }, - async { - match self.imap_server { - None => Ok(()), - Some(s) => s.run(exit_signal.clone()).await, - } - }, - async { - match self.auth_server { - None => Ok(()), - Some(a) => a.run(exit_signal.clone()).await, - } - }, - async { - match self.dav_unsecure_server { - None => Ok(()), - Some(s) => s.run(exit_signal.clone()).await, - } - } - )?; - - Ok(()) - } -} - -pub fn watch_ctrl_c() -> (watch::Receiver, Arc>) { - let (send_cancel, watch_cancel) = watch::channel(false); - let send_cancel = Arc::new(send_cancel); - let send_cancel_2 = send_cancel.clone(); - tokio::spawn(async move { - tokio::signal::ctrl_c() - .await - .expect("failed to install CTRL+C signal handler"); - info!("Received CTRL+C, shutting down."); - send_cancel.send(true).unwrap(); - }); - (watch_cancel, send_cancel_2) -} diff --git a/src/storage/garage.rs b/src/storage/garage.rs deleted file mode 100644 index 7152764..0000000 --- a/src/storage/garage.rs +++ /dev/null @@ -1,538 +0,0 @@ -use aws_sdk_s3::{self as s3, error::SdkError, operation::get_object::GetObjectError}; -use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder; -use aws_smithy_runtime_api::client::http::SharedHttpClient; -use hyper_rustls::HttpsConnector; -use hyper_util::client::legacy::{connect::HttpConnector, Client as HttpClient}; -use hyper_util::rt::TokioExecutor; -use serde::Serialize; - -use crate::storage::*; - -pub struct GarageRoot { - k2v_http: HttpClient, k2v_client::Body>, - aws_http: SharedHttpClient, -} - -impl GarageRoot { - pub fn new() -> anyhow::Result { - let connector = hyper_rustls::HttpsConnectorBuilder::new() - .with_native_roots()? - .https_or_http() - .enable_http1() - .enable_http2() - .build(); - let k2v_http = HttpClient::builder(TokioExecutor::new()).build(connector); - let aws_http = HyperClientBuilder::new().build_https(); - Ok(Self { k2v_http, aws_http }) - } - - pub fn user(&self, conf: GarageConf) -> anyhow::Result> { - let mut unicity: Vec = vec![]; - unicity.extend_from_slice(file!().as_bytes()); - unicity.append(&mut rmp_serde::to_vec(&conf)?); - - Ok(Arc::new(GarageUser { - conf, - aws_http: self.aws_http.clone(), - k2v_http: self.k2v_http.clone(), - unicity, - })) - } -} - -#[derive(Clone, Debug, Serialize)] -pub struct GarageConf { - pub region: String, - pub s3_endpoint: String, - pub k2v_endpoint: String, - pub aws_access_key_id: String, - pub aws_secret_access_key: String, - pub bucket: String, -} - -//@FIXME we should get rid of this builder -//and allocate a S3 + K2V client only once per user -//(and using a shared HTTP client) -#[derive(Clone, Debug)] -pub struct GarageUser { - conf: GarageConf, - aws_http: SharedHttpClient, - k2v_http: HttpClient, k2v_client::Body>, - unicity: Vec, -} - -#[async_trait] -impl IBuilder for GarageUser { - async fn build(&self) -> Result { - let s3_creds = s3::config::Credentials::new( - self.conf.aws_access_key_id.clone(), - self.conf.aws_secret_access_key.clone(), - None, - None, - "aerogramme", - ); - - let sdk_config = aws_config::from_env() - .region(aws_config::Region::new(self.conf.region.clone())) - .credentials_provider(s3_creds) - .http_client(self.aws_http.clone()) - .endpoint_url(self.conf.s3_endpoint.clone()) - .load() - .await; - - let s3_config = aws_sdk_s3::config::Builder::from(&sdk_config) - .force_path_style(true) - .build(); - - let s3_client = aws_sdk_s3::Client::from_conf(s3_config); - - let k2v_config = k2v_client::K2vClientConfig { - endpoint: self.conf.k2v_endpoint.clone(), - region: self.conf.region.clone(), - aws_access_key_id: self.conf.aws_access_key_id.clone(), - aws_secret_access_key: self.conf.aws_secret_access_key.clone(), - bucket: self.conf.bucket.clone(), - user_agent: None, - }; - - let k2v_client = - match k2v_client::K2vClient::new_with_client(k2v_config, self.k2v_http.clone()) { - Err(e) => { - tracing::error!("unable to build k2v client: {}", e); - return Err(StorageError::Internal); - } - Ok(v) => v, - }; - - Ok(Box::new(GarageStore { - bucket: self.conf.bucket.clone(), - s3: s3_client, - k2v: k2v_client, - })) - } - fn unique(&self) -> UnicityBuffer { - UnicityBuffer(self.unicity.clone()) - } -} - -pub struct GarageStore { - bucket: String, - s3: s3::Client, - k2v: k2v_client::K2vClient, -} - -fn causal_to_row_val(row_ref: RowRef, causal_value: k2v_client::CausalValue) -> RowVal { - let new_row_ref = row_ref.with_causality(causal_value.causality.into()); - let row_values = causal_value - .value - .into_iter() - .map(|k2v_value| match k2v_value { - k2v_client::K2vValue::Tombstone => Alternative::Tombstone, - k2v_client::K2vValue::Value(v) => Alternative::Value(v), - }) - .collect::>(); - - RowVal { - row_ref: new_row_ref, - value: row_values, - } -} - -#[async_trait] -impl IStore for GarageStore { - async fn row_fetch<'a>(&self, select: &Selector<'a>) -> Result, StorageError> { - tracing::trace!(select=%select, command="row_fetch"); - let (pk_list, batch_op) = match select { - Selector::Range { - shard, - sort_begin, - sort_end, - } => ( - vec![shard.to_string()], - vec![k2v_client::BatchReadOp { - partition_key: shard, - filter: k2v_client::Filter { - start: Some(sort_begin), - end: Some(sort_end), - ..k2v_client::Filter::default() - }, - ..k2v_client::BatchReadOp::default() - }], - ), - Selector::List(row_ref_list) => ( - row_ref_list - .iter() - .map(|row_ref| row_ref.uid.shard.to_string()) - .collect::>(), - row_ref_list - .iter() - .map(|row_ref| k2v_client::BatchReadOp { - partition_key: &row_ref.uid.shard, - filter: k2v_client::Filter { - start: Some(&row_ref.uid.sort), - ..k2v_client::Filter::default() - }, - single_item: true, - ..k2v_client::BatchReadOp::default() - }) - .collect::>(), - ), - Selector::Prefix { shard, sort_prefix } => ( - vec![shard.to_string()], - vec![k2v_client::BatchReadOp { - partition_key: shard, - filter: k2v_client::Filter { - prefix: Some(sort_prefix), - ..k2v_client::Filter::default() - }, - ..k2v_client::BatchReadOp::default() - }], - ), - Selector::Single(row_ref) => { - let causal_value = match self - .k2v - .read_item(&row_ref.uid.shard, &row_ref.uid.sort) - .await - { - Err(k2v_client::Error::NotFound) => { - tracing::debug!( - "K2V item not found shard={}, sort={}, bucket={}", - row_ref.uid.shard, - row_ref.uid.sort, - self.bucket, - ); - return Err(StorageError::NotFound); - } - Err(e) => { - tracing::error!( - "K2V read item shard={}, sort={}, bucket={} failed: {}", - row_ref.uid.shard, - row_ref.uid.sort, - self.bucket, - e - ); - return Err(StorageError::Internal); - } - Ok(v) => v, - }; - - let row_val = causal_to_row_val((*row_ref).clone(), causal_value); - return Ok(vec![row_val]); - } - }; - - let all_raw_res = match self.k2v.read_batch(&batch_op).await { - Err(e) => { - tracing::error!( - "k2v read batch failed for {:?}, bucket {} with err: {}", - select, - self.bucket, - e - ); - return Err(StorageError::Internal); - } - Ok(v) => v, - }; - //println!("fetch res -> {:?}", all_raw_res); - - let row_vals = - all_raw_res - .into_iter() - .zip(pk_list.into_iter()) - .fold(vec![], |mut acc, (page, pk)| { - page.items - .into_iter() - .map(|(sk, cv)| causal_to_row_val(RowRef::new(&pk, &sk), cv)) - .for_each(|rr| acc.push(rr)); - - acc - }); - tracing::debug!(fetch_count = row_vals.len(), command = "row_fetch"); - - Ok(row_vals) - } - async fn row_rm<'a>(&self, select: &Selector<'a>) -> Result<(), StorageError> { - tracing::trace!(select=%select, command="row_rm"); - let del_op = match select { - Selector::Range { - shard, - sort_begin, - sort_end, - } => vec![k2v_client::BatchDeleteOp { - partition_key: shard, - prefix: None, - start: Some(sort_begin), - end: Some(sort_end), - single_item: false, - }], - Selector::List(row_ref_list) => { - // Insert null values with causality token = delete - let batch_op = row_ref_list - .iter() - .map(|v| k2v_client::BatchInsertOp { - partition_key: &v.uid.shard, - sort_key: &v.uid.sort, - causality: v.causality.clone().map(|ct| ct.into()), - value: k2v_client::K2vValue::Tombstone, - }) - .collect::>(); - - return match self.k2v.insert_batch(&batch_op).await { - Err(e) => { - tracing::error!("Unable to delete the list of values: {}", e); - Err(StorageError::Internal) - } - Ok(_) => Ok(()), - }; - } - Selector::Prefix { shard, sort_prefix } => vec![k2v_client::BatchDeleteOp { - partition_key: shard, - prefix: Some(sort_prefix), - start: None, - end: None, - single_item: false, - }], - Selector::Single(row_ref) => { - // Insert null values with causality token = delete - let batch_op = vec![k2v_client::BatchInsertOp { - partition_key: &row_ref.uid.shard, - sort_key: &row_ref.uid.sort, - causality: row_ref.causality.clone().map(|ct| ct.into()), - value: k2v_client::K2vValue::Tombstone, - }]; - - return match self.k2v.insert_batch(&batch_op).await { - Err(e) => { - tracing::error!("Unable to delete the list of values: {}", e); - Err(StorageError::Internal) - } - Ok(_) => Ok(()), - }; - } - }; - - // Finally here we only have prefix & range - match self.k2v.delete_batch(&del_op).await { - Err(e) => { - tracing::error!("delete batch error: {}", e); - Err(StorageError::Internal) - } - Ok(_) => Ok(()), - } - } - - async fn row_insert(&self, values: Vec) -> Result<(), StorageError> { - tracing::trace!(entries=%values.iter().map(|v| v.row_ref.to_string()).collect::>().join(","), command="row_insert"); - let batch_ops = values - .iter() - .map(|v| k2v_client::BatchInsertOp { - partition_key: &v.row_ref.uid.shard, - sort_key: &v.row_ref.uid.sort, - causality: v.row_ref.causality.clone().map(|ct| ct.into()), - value: v - .value - .iter() - .next() - .map(|cv| match cv { - Alternative::Value(buff) => k2v_client::K2vValue::Value(buff.clone()), - Alternative::Tombstone => k2v_client::K2vValue::Tombstone, - }) - .unwrap_or(k2v_client::K2vValue::Tombstone), - }) - .collect::>(); - - match self.k2v.insert_batch(&batch_ops).await { - Err(e) => { - tracing::error!("k2v can't insert some value: {}", e); - Err(StorageError::Internal) - } - Ok(v) => Ok(v), - } - } - async fn row_poll(&self, value: &RowRef) -> Result { - tracing::trace!(entry=%value, command="row_poll"); - loop { - if let Some(ct) = &value.causality { - match self - .k2v - .poll_item(&value.uid.shard, &value.uid.sort, ct.clone().into(), None) - .await - { - Err(e) => { - tracing::error!("Unable to poll item: {}", e); - return Err(StorageError::Internal); - } - Ok(None) => continue, - Ok(Some(cv)) => return Ok(causal_to_row_val(value.clone(), cv)), - } - } else { - match self.k2v.read_item(&value.uid.shard, &value.uid.sort).await { - Err(k2v_client::Error::NotFound) => { - self.k2v - .insert_item(&value.uid.shard, &value.uid.sort, vec![0u8], None) - .await - .map_err(|e| { - tracing::error!("Unable to insert item in polling logic: {}", e); - StorageError::Internal - })?; - } - Err(e) => { - tracing::error!("Unable to read item in polling logic: {}", e); - return Err(StorageError::Internal); - } - Ok(cv) => return Ok(causal_to_row_val(value.clone(), cv)), - } - } - } - } - - async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result { - tracing::trace!(entry=%blob_ref, command="blob_fetch"); - let maybe_out = self - .s3 - .get_object() - .bucket(self.bucket.to_string()) - .key(blob_ref.0.to_string()) - .send() - .await; - - let object_output = match maybe_out { - Ok(output) => output, - Err(SdkError::ServiceError(x)) => match x.err() { - GetObjectError::NoSuchKey(_) => return Err(StorageError::NotFound), - e => { - tracing::warn!("Blob Fetch Error, Service Error: {}", e); - return Err(StorageError::Internal); - } - }, - Err(e) => { - tracing::warn!("Blob Fetch Error, {}", e); - return Err(StorageError::Internal); - } - }; - - let buffer = match object_output.body.collect().await { - Ok(aggreg) => aggreg.to_vec(), - Err(e) => { - tracing::warn!("Fetching body failed with {}", e); - return Err(StorageError::Internal); - } - }; - - let mut bv = BlobVal::new(blob_ref.clone(), buffer); - if let Some(meta) = object_output.metadata { - bv.meta = meta; - } - tracing::debug!("Fetched {}/{}", self.bucket, blob_ref.0); - Ok(bv) - } - async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError> { - tracing::trace!(entry=%blob_val.blob_ref, command="blob_insert"); - let streamable_value = s3::primitives::ByteStream::from(blob_val.value); - - let maybe_send = self - .s3 - .put_object() - .bucket(self.bucket.to_string()) - .key(blob_val.blob_ref.0.to_string()) - .set_metadata(Some(blob_val.meta)) - .body(streamable_value) - .send() - .await; - - match maybe_send { - Err(e) => { - tracing::error!("unable to send object: {}", e); - Err(StorageError::Internal) - } - Ok(_) => { - tracing::debug!("Inserted {}/{}", self.bucket, blob_val.blob_ref.0); - Ok(()) - } - } - } - async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError> { - tracing::trace!(src=%src, dst=%dst, command="blob_copy"); - let maybe_copy = self - .s3 - .copy_object() - .bucket(self.bucket.to_string()) - .key(dst.0.clone()) - .copy_source(format!("/{}/{}", self.bucket.to_string(), src.0.clone())) - .send() - .await; - - match maybe_copy { - Err(e) => { - tracing::error!( - "unable to copy object {} to {} (bucket: {}), error: {}", - src.0, - dst.0, - self.bucket, - e - ); - Err(StorageError::Internal) - } - Ok(_) => { - tracing::debug!("copied {} to {} (bucket: {})", src.0, dst.0, self.bucket); - Ok(()) - } - } - } - async fn blob_list(&self, prefix: &str) -> Result, StorageError> { - tracing::trace!(prefix = prefix, command = "blob_list"); - let maybe_list = self - .s3 - .list_objects_v2() - .bucket(self.bucket.to_string()) - .prefix(prefix) - .into_paginator() - .send() - .try_collect() - .await; - - match maybe_list { - Err(e) => { - tracing::error!( - "listing prefix {} on bucket {} failed: {}", - prefix, - self.bucket, - e - ); - Err(StorageError::Internal) - } - Ok(pagin_list_out) => Ok(pagin_list_out - .into_iter() - .map(|list_out| list_out.contents.unwrap_or(vec![])) - .flatten() - .map(|obj| BlobRef(obj.key.unwrap_or(String::new()))) - .collect::>()), - } - } - async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError> { - tracing::trace!(entry=%blob_ref, command="blob_rm"); - let maybe_delete = self - .s3 - .delete_object() - .bucket(self.bucket.to_string()) - .key(blob_ref.0.clone()) - .send() - .await; - - match maybe_delete { - Err(e) => { - tracing::error!( - "unable to delete {} (bucket: {}), error {}", - blob_ref.0, - self.bucket, - e - ); - Err(StorageError::Internal) - } - Ok(_) => { - tracing::debug!("deleted {} (bucket: {})", blob_ref.0, self.bucket); - Ok(()) - } - } - } -} diff --git a/src/storage/in_memory.rs b/src/storage/in_memory.rs deleted file mode 100644 index 3c3a94c..0000000 --- a/src/storage/in_memory.rs +++ /dev/null @@ -1,334 +0,0 @@ -use crate::storage::*; -use std::collections::{BTreeMap, HashMap}; -use std::ops::Bound::{self, Excluded, Included, Unbounded}; -use std::sync::{Arc, RwLock}; -use tokio::sync::Notify; - -/// This implementation is very inneficient, and not completely correct -/// Indeed, when the connector is dropped, the memory is freed. -/// It means that when a user disconnects, its data are lost. -/// It's intended only for basic debugging, do not use it for advanced tests... - -#[derive(Debug, Default)] -pub struct MemDb(tokio::sync::Mutex>>); -impl MemDb { - pub fn new() -> Self { - Self(tokio::sync::Mutex::new(HashMap::new())) - } - - pub async fn builder(&self, username: &str) -> Arc { - let mut global_storage = self.0.lock().await; - global_storage - .entry(username.to_string()) - .or_insert(MemBuilder::new(username)) - .clone() - } -} - -#[derive(Debug, Clone)] -enum InternalData { - Tombstone, - Value(Vec), -} -impl InternalData { - fn to_alternative(&self) -> Alternative { - match self { - Self::Tombstone => Alternative::Tombstone, - Self::Value(x) => Alternative::Value(x.clone()), - } - } -} - -#[derive(Debug)] -struct InternalRowVal { - data: Vec, - version: u64, - change: Arc, -} -impl std::default::Default for InternalRowVal { - fn default() -> Self { - Self { - data: vec![], - version: 1, - change: Arc::new(Notify::new()), - } - } -} -impl InternalRowVal { - fn concurrent_values(&self) -> Vec { - self.data.iter().map(InternalData::to_alternative).collect() - } - - fn to_row_val(&self, row_ref: RowRef) -> RowVal { - RowVal { - row_ref: row_ref.with_causality(self.version.to_string()), - value: self.concurrent_values(), - } - } -} - -#[derive(Debug, Default, Clone)] -struct InternalBlobVal { - data: Vec, - metadata: HashMap, -} -impl InternalBlobVal { - fn to_blob_val(&self, bref: &BlobRef) -> BlobVal { - BlobVal { - blob_ref: bref.clone(), - meta: self.metadata.clone(), - value: self.data.clone(), - } - } -} - -type ArcRow = Arc>>>; -type ArcBlob = Arc>>; - -#[derive(Clone, Debug)] -pub struct MemBuilder { - unicity: Vec, - row: ArcRow, - blob: ArcBlob, -} - -impl MemBuilder { - pub fn new(user: &str) -> Arc { - tracing::debug!("initialize membuilder for {}", user); - let mut unicity: Vec = vec![]; - unicity.extend_from_slice(file!().as_bytes()); - unicity.extend_from_slice(user.as_bytes()); - Arc::new(Self { - unicity, - row: Arc::new(RwLock::new(HashMap::new())), - blob: Arc::new(RwLock::new(BTreeMap::new())), - }) - } -} - -#[async_trait] -impl IBuilder for MemBuilder { - async fn build(&self) -> Result { - Ok(Box::new(MemStore { - row: self.row.clone(), - blob: self.blob.clone(), - })) - } - - fn unique(&self) -> UnicityBuffer { - UnicityBuffer(self.unicity.clone()) - } -} - -pub struct MemStore { - row: ArcRow, - blob: ArcBlob, -} - -fn prefix_last_bound(prefix: &str) -> Bound { - let mut sort_end = prefix.to_string(); - match sort_end.pop() { - None => Unbounded, - Some(ch) => { - let nc = char::from_u32(ch as u32 + 1).unwrap(); - sort_end.push(nc); - Excluded(sort_end) - } - } -} - -impl MemStore { - fn row_rm_single(&self, entry: &RowRef) -> Result<(), StorageError> { - tracing::trace!(entry=%entry, command="row_rm_single"); - let mut store = self.row.write().or(Err(StorageError::Internal))?; - let shard = &entry.uid.shard; - let sort = &entry.uid.sort; - - let cauz = match entry.causality.as_ref().map(|v| v.parse::()) { - Some(Ok(v)) => v, - _ => 0, - }; - - let bt = store.entry(shard.to_string()).or_default(); - let intval = bt.entry(sort.to_string()).or_default(); - - if cauz == intval.version { - intval.data.clear(); - } - intval.data.push(InternalData::Tombstone); - intval.version += 1; - intval.change.notify_waiters(); - - Ok(()) - } -} - -#[async_trait] -impl IStore for MemStore { - async fn row_fetch<'a>(&self, select: &Selector<'a>) -> Result, StorageError> { - tracing::trace!(select=%select, command="row_fetch"); - let store = self.row.read().or(Err(StorageError::Internal))?; - - match select { - Selector::Range { - shard, - sort_begin, - sort_end, - } => Ok(store - .get(*shard) - .unwrap_or(&BTreeMap::new()) - .range(( - Included(sort_begin.to_string()), - Excluded(sort_end.to_string()), - )) - .map(|(k, v)| v.to_row_val(RowRef::new(shard, k))) - .collect::>()), - Selector::List(rlist) => { - let mut acc = vec![]; - for row_ref in rlist { - let maybe_intval = store - .get(&row_ref.uid.shard) - .map(|v| v.get(&row_ref.uid.sort)) - .flatten(); - if let Some(intval) = maybe_intval { - acc.push(intval.to_row_val(row_ref.clone())); - } - } - Ok(acc) - } - Selector::Prefix { shard, sort_prefix } => { - let last_bound = prefix_last_bound(sort_prefix); - - Ok(store - .get(*shard) - .unwrap_or(&BTreeMap::new()) - .range((Included(sort_prefix.to_string()), last_bound)) - .map(|(k, v)| v.to_row_val(RowRef::new(shard, k))) - .collect::>()) - } - Selector::Single(row_ref) => { - let intval = store - .get(&row_ref.uid.shard) - .ok_or(StorageError::NotFound)? - .get(&row_ref.uid.sort) - .ok_or(StorageError::NotFound)?; - Ok(vec![intval.to_row_val((*row_ref).clone())]) - } - } - } - - async fn row_rm<'a>(&self, select: &Selector<'a>) -> Result<(), StorageError> { - tracing::trace!(select=%select, command="row_rm"); - - let values = match select { - Selector::Range { .. } | Selector::Prefix { .. } => self - .row_fetch(select) - .await? - .into_iter() - .map(|rv| rv.row_ref) - .collect::>(), - Selector::List(rlist) => rlist.clone(), - Selector::Single(row_ref) => vec![(*row_ref).clone()], - }; - - for v in values.into_iter() { - self.row_rm_single(&v)?; - } - Ok(()) - } - - async fn row_insert(&self, values: Vec) -> Result<(), StorageError> { - tracing::trace!(entries=%values.iter().map(|v| v.row_ref.to_string()).collect::>().join(","), command="row_insert"); - let mut store = self.row.write().or(Err(StorageError::Internal))?; - for v in values.into_iter() { - let shard = v.row_ref.uid.shard; - let sort = v.row_ref.uid.sort; - - let val = match v.value.into_iter().next() { - Some(Alternative::Value(x)) => x, - _ => vec![], - }; - - let cauz = match v.row_ref.causality.map(|v| v.parse::()) { - Some(Ok(v)) => v, - _ => 0, - }; - - let bt = store.entry(shard).or_default(); - let intval = bt.entry(sort).or_default(); - - if cauz == intval.version { - intval.data.clear(); - } - intval.data.push(InternalData::Value(val)); - intval.version += 1; - intval.change.notify_waiters(); - } - Ok(()) - } - async fn row_poll(&self, value: &RowRef) -> Result { - tracing::trace!(entry=%value, command="row_poll"); - let shard = &value.uid.shard; - let sort = &value.uid.sort; - let cauz = match value.causality.as_ref().map(|v| v.parse::()) { - Some(Ok(v)) => v, - _ => 0, - }; - - let notify_me = { - let mut store = self.row.write().or(Err(StorageError::Internal))?; - let bt = store.entry(shard.to_string()).or_default(); - let intval = bt.entry(sort.to_string()).or_default(); - - if intval.version != cauz { - return Ok(intval.to_row_val(value.clone())); - } - intval.change.clone() - }; - - notify_me.notified().await; - - let res = self.row_fetch(&Selector::Single(value)).await?; - res.into_iter().next().ok_or(StorageError::NotFound) - } - - async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result { - tracing::trace!(entry=%blob_ref, command="blob_fetch"); - let store = self.blob.read().or(Err(StorageError::Internal))?; - store - .get(&blob_ref.0) - .ok_or(StorageError::NotFound) - .map(|v| v.to_blob_val(blob_ref)) - } - async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError> { - tracing::trace!(entry=%blob_val.blob_ref, command="blob_insert"); - let mut store = self.blob.write().or(Err(StorageError::Internal))?; - let entry = store.entry(blob_val.blob_ref.0.clone()).or_default(); - entry.data = blob_val.value.clone(); - entry.metadata = blob_val.meta.clone(); - Ok(()) - } - async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError> { - tracing::trace!(src=%src, dst=%dst, command="blob_copy"); - let mut store = self.blob.write().or(Err(StorageError::Internal))?; - let blob_src = store.entry(src.0.clone()).or_default().clone(); - store.insert(dst.0.clone(), blob_src); - Ok(()) - } - async fn blob_list(&self, prefix: &str) -> Result, StorageError> { - tracing::trace!(prefix = prefix, command = "blob_list"); - let store = self.blob.read().or(Err(StorageError::Internal))?; - let last_bound = prefix_last_bound(prefix); - let blist = store - .range((Included(prefix.to_string()), last_bound)) - .map(|(k, _)| BlobRef(k.to_string())) - .collect::>(); - Ok(blist) - } - async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError> { - tracing::trace!(entry=%blob_ref, command="blob_rm"); - let mut store = self.blob.write().or(Err(StorageError::Internal))?; - store.remove(&blob_ref.0); - Ok(()) - } -} diff --git a/src/storage/mod.rs b/src/storage/mod.rs deleted file mode 100644 index 1f86f71..0000000 --- a/src/storage/mod.rs +++ /dev/null @@ -1,179 +0,0 @@ -/* - * - * This abstraction goal is to leverage all the semantic of Garage K2V+S3, - * to be as tailored as possible to it ; it aims to be a zero-cost abstraction - * compared to when we where directly using the K2V+S3 client. - * - * My idea: we can encapsulate the causality token - * into the object system so it is not exposed. - */ - -pub mod garage; -pub mod in_memory; - -use async_trait::async_trait; -use std::collections::HashMap; -use std::hash::Hash; -use std::sync::Arc; - -#[derive(Debug, Clone)] -pub enum Alternative { - Tombstone, - Value(Vec), -} -type ConcurrentValues = Vec; - -#[derive(Debug, Clone)] -pub enum StorageError { - NotFound, - Internal, -} -impl std::fmt::Display for StorageError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str("Storage Error: ")?; - match self { - Self::NotFound => f.write_str("Item not found"), - Self::Internal => f.write_str("An internal error occured"), - } - } -} -impl std::error::Error for StorageError {} - -#[derive(Debug, Clone, PartialEq)] -pub struct RowUid { - pub shard: String, - pub sort: String, -} - -#[derive(Debug, Clone, PartialEq)] -pub struct RowRef { - pub uid: RowUid, - pub causality: Option, -} -impl std::fmt::Display for RowRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "RowRef({}, {}, {:?})", - self.uid.shard, self.uid.sort, self.causality - ) - } -} - -impl RowRef { - pub fn new(shard: &str, sort: &str) -> Self { - Self { - uid: RowUid { - shard: shard.to_string(), - sort: sort.to_string(), - }, - causality: None, - } - } - pub fn with_causality(mut self, causality: String) -> Self { - self.causality = Some(causality); - self - } -} - -#[derive(Debug, Clone)] -pub struct RowVal { - pub row_ref: RowRef, - pub value: ConcurrentValues, -} - -impl RowVal { - pub fn new(row_ref: RowRef, value: Vec) -> Self { - Self { - row_ref, - value: vec![Alternative::Value(value)], - } - } -} - -#[derive(Debug, Clone)] -pub struct BlobRef(pub String); -impl std::fmt::Display for BlobRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "BlobRef({})", self.0) - } -} - -#[derive(Debug, Clone)] -pub struct BlobVal { - pub blob_ref: BlobRef, - pub meta: HashMap, - pub value: Vec, -} -impl BlobVal { - pub fn new(blob_ref: BlobRef, value: Vec) -> Self { - Self { - blob_ref, - value, - meta: HashMap::new(), - } - } - - pub fn with_meta(mut self, k: String, v: String) -> Self { - self.meta.insert(k, v); - self - } -} - -#[derive(Debug)] -pub enum Selector<'a> { - Range { - shard: &'a str, - sort_begin: &'a str, - sort_end: &'a str, - }, - List(Vec), // list of (shard_key, sort_key) - #[allow(dead_code)] - Prefix { - shard: &'a str, - sort_prefix: &'a str, - }, - Single(&'a RowRef), -} -impl<'a> std::fmt::Display for Selector<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Range { - shard, - sort_begin, - sort_end, - } => write!(f, "Range({}, [{}, {}[)", shard, sort_begin, sort_end), - Self::List(list) => write!(f, "List({:?})", list), - Self::Prefix { shard, sort_prefix } => write!(f, "Prefix({}, {})", shard, sort_prefix), - Self::Single(row_ref) => write!(f, "Single({})", row_ref), - } - } -} - -#[async_trait] -pub trait IStore { - async fn row_fetch<'a>(&self, select: &Selector<'a>) -> Result, StorageError>; - async fn row_rm<'a>(&self, select: &Selector<'a>) -> Result<(), StorageError>; - async fn row_insert(&self, values: Vec) -> Result<(), StorageError>; - async fn row_poll(&self, value: &RowRef) -> Result; - - async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result; - async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError>; - async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError>; - async fn blob_list(&self, prefix: &str) -> Result, StorageError>; - async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError>; -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct UnicityBuffer(Vec); - -#[async_trait] -pub trait IBuilder: std::fmt::Debug { - async fn build(&self) -> Result; - - /// Returns an opaque buffer that uniquely identifies this builder - fn unique(&self) -> UnicityBuffer; -} - -pub type Builder = Arc; -pub type Store = Box; diff --git a/src/timestamp.rs b/src/timestamp.rs deleted file mode 100644 index 76cb74b..0000000 --- a/src/timestamp.rs +++ /dev/null @@ -1,65 +0,0 @@ -use rand::prelude::*; -use std::str::FromStr; -use std::time::{SystemTime, UNIX_EPOCH}; - -/// Returns milliseconds since UNIX Epoch -pub fn now_msec() -> u64 { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("Fix your clock :o") - .as_millis() as u64 -} - -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] -pub struct Timestamp { - pub msec: u64, - pub rand: u64, -} - -impl Timestamp { - #[allow(dead_code)] - // 2023-05-15 try to make clippy happy and not sure if this fn will be used in the future. - pub fn now() -> Self { - let mut rng = thread_rng(); - Self { - msec: now_msec(), - rand: rng.gen::(), - } - } - - pub fn after(other: &Self) -> Self { - let mut rng = thread_rng(); - Self { - msec: std::cmp::max(now_msec(), other.msec + 1), - rand: rng.gen::(), - } - } - - pub fn zero() -> Self { - Self { msec: 0, rand: 0 } - } -} - -impl ToString for Timestamp { - fn to_string(&self) -> String { - let mut bytes = [0u8; 16]; - bytes[0..8].copy_from_slice(&u64::to_be_bytes(self.msec)); - bytes[8..16].copy_from_slice(&u64::to_be_bytes(self.rand)); - hex::encode(bytes) - } -} - -impl FromStr for Timestamp { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - let bytes = hex::decode(s).map_err(|_| "invalid hex")?; - if bytes.len() != 16 { - return Err("bad length"); - } - Ok(Self { - msec: u64::from_be_bytes(bytes[0..8].try_into().unwrap()), - rand: u64::from_be_bytes(bytes[8..16].try_into().unwrap()), - }) - } -} diff --git a/src/user.rs b/src/user.rs deleted file mode 100644 index a38b9c1..0000000 --- a/src/user.rs +++ /dev/null @@ -1,313 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::sync::{Arc, Weak}; - -use anyhow::{anyhow, bail, Result}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use tokio::sync::watch; - -use crate::cryptoblob::{open_deserialize, seal_serialize}; -use crate::login::Credentials; -use crate::mail::incoming::incoming_mail_watch_process; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::{gen_ident, UniqueIdent}; -use crate::storage; -use crate::timestamp::now_msec; - -use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; - -//@FIXME User should be totally rewriten -//to extract the local mailbox list -//to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) - -pub struct User { - pub username: String, - pub creds: Credentials, - pub storage: storage::Store, - pub mailboxes: std::sync::Mutex>>, - - tx_inbox_id: watch::Sender>, -} - -impl User { - pub async fn new(username: String, creds: Credentials) -> Result> { - let cache_key = (username.clone(), creds.storage.unique()); - - { - let cache = USER_CACHE.lock().unwrap(); - if let Some(u) = cache.get(&cache_key).and_then(Weak::upgrade) { - return Ok(u); - } - } - - let user = Self::open(username, creds).await?; - - let mut cache = USER_CACHE.lock().unwrap(); - if let Some(concurrent_user) = cache.get(&cache_key).and_then(Weak::upgrade) { - drop(user); - Ok(concurrent_user) - } else { - cache.insert(cache_key, Arc::downgrade(&user)); - Ok(user) - } - } - - /// Lists user's available mailboxes - pub async fn list_mailboxes(&self) -> Result> { - let (list, _ct) = self.load_mailbox_list().await?; - Ok(list.existing_mailbox_names()) - } - - /// Opens an existing mailbox given its IMAP name. - pub async fn open_mailbox(&self, name: &str) -> Result>> { - let (mut list, ct) = self.load_mailbox_list().await?; - - //@FIXME it could be a trace or an opentelemtry trace thing. - // Be careful to not leak sensible data - /* - eprintln!("List of mailboxes:"); - for ent in list.0.iter() { - eprintln!(" - {:?}", ent); - } - */ - - if let Some((uidvalidity, Some(mbid))) = list.get_mailbox(name) { - let mb = self.open_mailbox_by_id(mbid, uidvalidity).await?; - let mb_uidvalidity = mb.current_uid_index().await.uidvalidity; - if mb_uidvalidity > uidvalidity { - list.update_uidvalidity(name, mb_uidvalidity); - self.save_mailbox_list(&list, ct).await?; - } - Ok(Some(mb)) - } else { - Ok(None) - } - } - - /// Check whether mailbox exists - pub async fn has_mailbox(&self, name: &str) -> Result { - let (list, _ct) = self.load_mailbox_list().await?; - Ok(list.has_mailbox(name)) - } - - /// Creates a new mailbox in the user's IMAP namespace. - pub async fn create_mailbox(&self, name: &str) -> Result<()> { - if name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", name); - } - - let (mut list, ct) = self.load_mailbox_list().await?; - match list.create_mailbox(name) { - CreatedMailbox::Created(_, _) => { - self.save_mailbox_list(&list, ct).await?; - Ok(()) - } - CreatedMailbox::Existed(_, _) => Err(anyhow!("Mailbox {} already exists", name)), - } - } - - /// Deletes a mailbox in the user's IMAP namespace. - pub async fn delete_mailbox(&self, name: &str) -> Result<()> { - if name == INBOX { - bail!("Cannot delete INBOX"); - } - - let (mut list, ct) = self.load_mailbox_list().await?; - if list.has_mailbox(name) { - //@TODO: actually delete mailbox contents - list.set_mailbox(name, None); - self.save_mailbox_list(&list, ct).await?; - Ok(()) - } else { - bail!("Mailbox {} does not exist", name); - } - } - - /// Renames a mailbox in the user's IMAP namespace. - pub async fn rename_mailbox(&self, old_name: &str, new_name: &str) -> Result<()> { - let (mut list, ct) = self.load_mailbox_list().await?; - - if old_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", old_name); - } - if new_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", new_name); - } - - if old_name == INBOX { - list.rename_mailbox(old_name, new_name)?; - if !self.ensure_inbox_exists(&mut list, &ct).await? { - self.save_mailbox_list(&list, ct).await?; - } - } else { - let names = list.existing_mailbox_names(); - - let old_name_w_delim = format!("{}{}", old_name, MAILBOX_HIERARCHY_DELIMITER); - let new_name_w_delim = format!("{}{}", new_name, MAILBOX_HIERARCHY_DELIMITER); - - if names - .iter() - .any(|x| x == new_name || x.starts_with(&new_name_w_delim)) - { - bail!("Mailbox {} already exists", new_name); - } - - for name in names.iter() { - if name == old_name { - list.rename_mailbox(name, new_name)?; - } else if let Some(tail) = name.strip_prefix(&old_name_w_delim) { - let nnew = format!("{}{}", new_name_w_delim, tail); - list.rename_mailbox(name, &nnew)?; - } - } - - self.save_mailbox_list(&list, ct).await?; - } - Ok(()) - } - - // ---- Internal user & mailbox management ---- - - async fn open(username: String, creds: Credentials) -> Result> { - let storage = creds.storage.build().await?; - - let (tx_inbox_id, rx_inbox_id) = watch::channel(None); - - let user = Arc::new(Self { - username, - creds: creds.clone(), - storage, - tx_inbox_id, - mailboxes: std::sync::Mutex::new(HashMap::new()), - }); - - // Ensure INBOX exists (done inside load_mailbox_list) - user.load_mailbox_list().await?; - - tokio::spawn(incoming_mail_watch_process( - Arc::downgrade(&user), - user.creds.clone(), - rx_inbox_id, - )); - - Ok(user) - } - - pub(super) async fn open_mailbox_by_id( - &self, - id: UniqueIdent, - min_uidvalidity: ImapUidvalidity, - ) -> Result> { - { - let cache = self.mailboxes.lock().unwrap(); - if let Some(mb) = cache.get(&id).and_then(Weak::upgrade) { - return Ok(mb); - } - } - - let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); - - let mut cache = self.mailboxes.lock().unwrap(); - if let Some(concurrent_mb) = cache.get(&id).and_then(Weak::upgrade) { - drop(mb); // we worked for nothing but at least we didn't starve someone else - Ok(concurrent_mb) - } else { - cache.insert(id, Arc::downgrade(&mb)); - Ok(mb) - } - } - - // ---- Mailbox list management ---- - - async fn load_mailbox_list(&self) -> Result<(MailboxList, Option)> { - let row_ref = storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK); - let (mut list, row) = match self - .storage - .row_fetch(&storage::Selector::Single(&row_ref)) - .await - { - Err(storage::StorageError::NotFound) => (MailboxList::new(), None), - Err(e) => return Err(e.into()), - Ok(rv) => { - let mut list = MailboxList::new(); - let (row_ref, row_vals) = match rv.into_iter().next() { - Some(row_val) => (row_val.row_ref, row_val.value), - None => (row_ref, vec![]), - }; - - for v in row_vals { - if let storage::Alternative::Value(vbytes) = v { - let list2 = - open_deserialize::(&vbytes, &self.creds.keys.master)?; - list.merge(list2); - } - } - (list, Some(row_ref)) - } - }; - - let is_default_mbx_missing = [DRAFTS, ARCHIVE, SENT, TRASH] - .iter() - .map(|mbx| list.create_mailbox(mbx)) - .fold(false, |acc, r| { - acc || matches!(r, CreatedMailbox::Created(..)) - }); - let is_inbox_missing = self.ensure_inbox_exists(&mut list, &row).await?; - if is_default_mbx_missing && !is_inbox_missing { - // It's the only case where we created some mailboxes and not saved them - // So we save them! - self.save_mailbox_list(&list, row.clone()).await?; - } - - Ok((list, row)) - } - - async fn ensure_inbox_exists( - &self, - list: &mut MailboxList, - ct: &Option, - ) -> Result { - // If INBOX doesn't exist, create a new mailbox with that name - // and save new mailbox list. - // Also, ensure that the mpsc::watch that keeps track of the - // inbox id is up-to-date. - let saved; - let (inbox_id, inbox_uidvalidity) = match list.create_mailbox(INBOX) { - CreatedMailbox::Created(i, v) => { - self.save_mailbox_list(list, ct.clone()).await?; - saved = true; - (i, v) - } - CreatedMailbox::Existed(i, v) => { - saved = false; - (i, v) - } - }; - let inbox_id = Some((inbox_id, inbox_uidvalidity)); - if *self.tx_inbox_id.borrow() != inbox_id { - self.tx_inbox_id.send(inbox_id).unwrap(); - } - - Ok(saved) - } - - async fn save_mailbox_list( - &self, - list: &MailboxList, - ct: Option, - ) -> Result<()> { - let list_blob = seal_serialize(list, &self.creds.keys.master)?; - let rref = ct.unwrap_or(storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK)); - let row_val = storage::RowVal::new(rref, list_blob); - self.storage.row_insert(vec![row_val]).await?; - Ok(()) - } -} - -// ---- User cache ---- - -lazy_static! { - static ref USER_CACHE: std::sync::Mutex>> = - std::sync::Mutex::new(HashMap::new()); -} -- cgit v1.2.3 From 1edf0b15ecaa73d55bb72c6f3c6e25d4f231f322 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 08:43:28 +0100 Subject: Re-enable collections --- Cargo.lock | 137 ++++++++ Cargo.toml | 4 +- aero-bayou/Cargo.toml | 2 + aero-bayou/src/lib.rs | 8 +- aero-collections/Cargo.toml | 24 ++ aero-collections/mail/incoming.rs | 445 ------------------------- aero-collections/mail/mailbox.rs | 524 ----------------------------- aero-collections/mail/mod.rs | 27 -- aero-collections/mail/namespace.rs | 209 ------------ aero-collections/mail/query.rs | 137 -------- aero-collections/mail/snapshot.rs | 60 ---- aero-collections/mail/uidindex.rs | 474 --------------------------- aero-collections/mail/unique_ident.rs | 101 ------ aero-collections/src/calendar/mod.rs | 1 + aero-collections/src/lib.rs | 3 + aero-collections/src/mail/incoming.rs | 443 +++++++++++++++++++++++++ aero-collections/src/mail/mailbox.rs | 525 ++++++++++++++++++++++++++++++ aero-collections/src/mail/mod.rs | 25 ++ aero-collections/src/mail/namespace.rs | 202 ++++++++++++ aero-collections/src/mail/query.rs | 137 ++++++++ aero-collections/src/mail/snapshot.rs | 60 ++++ aero-collections/src/mail/uidindex.rs | 474 +++++++++++++++++++++++++++ aero-collections/src/mail/unique_ident.rs | 101 ++++++ aero-collections/src/user.rs | 311 ++++++++++++++++++ aero-collections/user.rs | 313 ------------------ 25 files changed, 2451 insertions(+), 2296 deletions(-) create mode 100644 aero-collections/Cargo.toml delete mode 100644 aero-collections/mail/incoming.rs delete mode 100644 aero-collections/mail/mailbox.rs delete mode 100644 aero-collections/mail/mod.rs delete mode 100644 aero-collections/mail/namespace.rs delete mode 100644 aero-collections/mail/query.rs delete mode 100644 aero-collections/mail/snapshot.rs delete mode 100644 aero-collections/mail/uidindex.rs delete mode 100644 aero-collections/mail/unique_ident.rs create mode 100644 aero-collections/src/calendar/mod.rs create mode 100644 aero-collections/src/lib.rs create mode 100644 aero-collections/src/mail/incoming.rs create mode 100644 aero-collections/src/mail/mailbox.rs create mode 100644 aero-collections/src/mail/mod.rs create mode 100644 aero-collections/src/mail/namespace.rs create mode 100644 aero-collections/src/mail/query.rs create mode 100644 aero-collections/src/mail/snapshot.rs create mode 100644 aero-collections/src/mail/uidindex.rs create mode 100644 aero-collections/src/mail/unique_ident.rs create mode 100644 aero-collections/src/user.rs delete mode 100644 aero-collections/user.rs diff --git a/Cargo.lock b/Cargo.lock index 20b9d95..387615f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,10 +23,32 @@ version = "0.3.0" dependencies = [ "aero-user", "anyhow", + "hex", "log", "rand", "serde", "tokio", + "tracing", +] + +[[package]] +name = "aero-collections" +version = "0.3.0" +dependencies = [ + "aero-bayou", + "aero-user", + "anyhow", + "base64 0.21.7", + "eml-codec", + "futures", + "hex", + "im", + "lazy_static", + "rand", + "serde", + "sodiumoxide", + "tokio", + "tracing", ] [[package]] @@ -93,6 +115,21 @@ dependencies = [ "tokio", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anyhow" version = "1.0.79" @@ -616,6 +653,15 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + [[package]] name = "blake2" version = "0.10.6" @@ -684,7 +730,12 @@ version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", "num-traits", + "wasm-bindgen", + "windows-targets 0.52.0", ] [[package]] @@ -887,6 +938,27 @@ dependencies = [ "zeroize", ] +[[package]] +name = "eml-codec" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4499124d87abce26a57ef96ece800fa8babc38fbedd81c607c340ae83d46d2e" +dependencies = [ + "base64 0.21.7", + "chrono", + "encoding_rs", + "nom 7.1.3", +] + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -1284,6 +1356,29 @@ dependencies = [ "tracing", ] +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "idna" version = "0.5.0" @@ -1294,6 +1389,20 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + [[package]] name = "indexmap" version = "2.2.5" @@ -1716,6 +1825,15 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core", +] + [[package]] name = "regex-lite" version = "0.1.5" @@ -2066,6 +2184,16 @@ dependencies = [ "rand_core", ] +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + [[package]] name = "slab" version = "0.4.9" @@ -2601,6 +2729,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-sys" version = "0.48.0" diff --git a/Cargo.toml b/Cargo.toml index 56d5cf3..5654322 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ members = [ "aero-sasl", "aero-dav", "aero-dav/fuzz", -# "aero-collections", + "aero-collections", # "aero-proto", # "aerogramme", ] @@ -19,7 +19,7 @@ aero-user = { version = "0.3.0", path = "aero-user" } aero-bayou = { version = "0.3.0", path = "aero-bayou" } aero-sasl = { version = "0.3.0", path = "aero-sasl" } aero-dav = { version = "0.3.0", path = "aero-dav" } -#aero-collections = { version = "0.3.0", path = "aero-collections" } +aero-collections = { version = "0.3.0", path = "aero-collections" } #aero-proto = { version = "0.3.0", path = "aero-proto" } #aerogramme = { version = "0.3.0", path = "aerogramme" } diff --git a/aero-bayou/Cargo.toml b/aero-bayou/Cargo.toml index d271f4a..cade216 100644 --- a/aero-bayou/Cargo.toml +++ b/aero-bayou/Cargo.toml @@ -10,6 +10,8 @@ description = "A simplified version of Bayou by Terry et al. (ACM SIGOPS 1995)" aero-user.workspace = true anyhow.workspace = true +hex.workspace = true +tracing.workspace = true log.workspace = true rand.workspace = true serde.workspace = true diff --git a/aero-bayou/src/lib.rs b/aero-bayou/src/lib.rs index 7756964..159dbbf 100644 --- a/aero-bayou/src/lib.rs +++ b/aero-bayou/src/lib.rs @@ -1,4 +1,4 @@ -mod timestamp +pub mod timestamp; use std::sync::{Arc, Weak}; use std::time::{Duration, Instant}; @@ -9,9 +9,9 @@ use rand::prelude::*; use serde::{Deserialize, Serialize}; use tokio::sync::{watch, Notify}; -use aero_foundations::cryptoblob::*; -use aero_foundations::login::Credentials; -use aero_foundations::storage; +use aero_user::cryptoblob::*; +use aero_user::login::Credentials; +use aero_user::storage; use crate::timestamp::*; diff --git a/aero-collections/Cargo.toml b/aero-collections/Cargo.toml new file mode 100644 index 0000000..90d285e --- /dev/null +++ b/aero-collections/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "aero-collections" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "Aerogramme own representation of the different objects it manipulates" + +[dependencies] +aero-user.workspace = true +aero-bayou.workspace = true + +anyhow.workspace = true +base64.workspace = true +futures.workspace = true +lazy_static.workspace = true +serde.workspace = true +hex.workspace = true +tokio.workspace = true +tracing.workspace = true +rand.workspace = true +im.workspace = true +sodiumoxide.workspace = true +eml-codec.workspace = true diff --git a/aero-collections/mail/incoming.rs b/aero-collections/mail/incoming.rs deleted file mode 100644 index e2ad97d..0000000 --- a/aero-collections/mail/incoming.rs +++ /dev/null @@ -1,445 +0,0 @@ -//use std::collections::HashMap; -use std::convert::TryFrom; - -use std::sync::{Arc, Weak}; -use std::time::Duration; - -use anyhow::{anyhow, bail, Result}; -use base64::Engine; -use futures::{future::BoxFuture, FutureExt}; -//use tokio::io::AsyncReadExt; -use tokio::sync::watch; -use tracing::{debug, error, info, warn}; - -use crate::cryptoblob; -use crate::login::{Credentials, PublicCredentials}; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::*; -use crate::user::User; -use crate::mail::IMF; -use crate::storage; -use crate::timestamp::now_msec; - -const INCOMING_PK: &str = "incoming"; -const INCOMING_LOCK_SK: &str = "lock"; -const INCOMING_WATCH_SK: &str = "watch"; - -const MESSAGE_KEY: &str = "message-key"; - -// When a lock is held, it is held for LOCK_DURATION (here 5 minutes) -// It is renewed every LOCK_DURATION/3 -// If we are at 2*LOCK_DURATION/3 and haven't renewed, we assume we -// lost the lock. -const LOCK_DURATION: Duration = Duration::from_secs(300); - -// In addition to checking when notified, also check for new mail every 10 minutes -const MAIL_CHECK_INTERVAL: Duration = Duration::from_secs(600); - -pub async fn incoming_mail_watch_process( - user: Weak, - creds: Credentials, - rx_inbox_id: watch::Receiver>, -) { - if let Err(e) = incoming_mail_watch_process_internal(user, creds, rx_inbox_id).await { - error!("Error in incoming mail watch process: {}", e); - } -} - -async fn incoming_mail_watch_process_internal( - user: Weak, - creds: Credentials, - mut rx_inbox_id: watch::Receiver>, -) -> Result<()> { - let mut lock_held = k2v_lock_loop( - creds.storage.build().await?, - storage::RowRef::new(INCOMING_PK, INCOMING_LOCK_SK), - ); - let storage = creds.storage.build().await?; - - let mut inbox: Option> = None; - let mut incoming_key = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); - - loop { - let maybe_updated_incoming_key = if *lock_held.borrow() { - debug!("incoming lock held"); - - let wait_new_mail = async { - loop { - match storage.row_poll(&incoming_key).await { - Ok(row_val) => break row_val.row_ref, - Err(e) => { - error!("Error in wait_new_mail: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - } - } - }; - - tokio::select! { - inc_k = wait_new_mail => Some(inc_k), - _ = tokio::time::sleep(MAIL_CHECK_INTERVAL) => Some(incoming_key.clone()), - _ = lock_held.changed() => None, - _ = rx_inbox_id.changed() => None, - } - } else { - debug!("incoming lock not held"); - tokio::select! { - _ = lock_held.changed() => None, - _ = rx_inbox_id.changed() => None, - } - }; - - let user = match Weak::upgrade(&user) { - Some(user) => user, - None => { - debug!("User no longer available, exiting incoming loop."); - break; - } - }; - debug!("User still available"); - - // If INBOX no longer is same mailbox, open new mailbox - let inbox_id = *rx_inbox_id.borrow(); - if let Some((id, uidvalidity)) = inbox_id { - if Some(id) != inbox.as_ref().map(|b| b.id) { - match user.open_mailbox_by_id(id, uidvalidity).await { - Ok(mb) => { - inbox = Some(mb); - } - Err(e) => { - inbox = None; - error!("Error when opening inbox ({}): {}", id, e); - tokio::time::sleep(Duration::from_secs(30)).await; - continue; - } - } - } - } - - // If we were able to open INBOX, and we have mail, - // fetch new mail - if let (Some(inbox), Some(updated_incoming_key)) = (&inbox, maybe_updated_incoming_key) { - match handle_incoming_mail(&user, &storage, inbox, &lock_held).await { - Ok(()) => { - incoming_key = updated_incoming_key; - } - Err(e) => { - error!("Could not fetch incoming mail: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - } - } - } - drop(rx_inbox_id); - Ok(()) -} - -async fn handle_incoming_mail( - user: &Arc, - storage: &storage::Store, - inbox: &Arc, - lock_held: &watch::Receiver, -) -> Result<()> { - let mails_res = storage.blob_list("incoming/").await?; - - for object in mails_res { - if !*lock_held.borrow() { - break; - } - let key = object.0; - if let Some(mail_id) = key.strip_prefix("incoming/") { - if let Ok(mail_id) = mail_id.parse::() { - move_incoming_message(user, storage, inbox, mail_id).await?; - } - } - } - - Ok(()) -} - -async fn move_incoming_message( - user: &Arc, - storage: &storage::Store, - inbox: &Arc, - id: UniqueIdent, -) -> Result<()> { - info!("Moving incoming message: {}", id); - - let object_key = format!("incoming/{}", id); - - // 1. Fetch message from S3 - let object = storage.blob_fetch(&storage::BlobRef(object_key)).await?; - - // 1.a decrypt message key from headers - //info!("Object metadata: {:?}", get_result.metadata); - let key_encrypted_b64 = object - .meta - .get(MESSAGE_KEY) - .ok_or(anyhow!("Missing key in metadata"))?; - let key_encrypted = base64::engine::general_purpose::STANDARD.decode(key_encrypted_b64)?; - let message_key = sodiumoxide::crypto::sealedbox::open( - &key_encrypted, - &user.creds.keys.public, - &user.creds.keys.secret, - ) - .map_err(|_| anyhow!("Cannot decrypt message key"))?; - let message_key = - cryptoblob::Key::from_slice(&message_key).ok_or(anyhow!("Invalid message key"))?; - - // 1.b retrieve message body - let obj_body = object.value; - let plain_mail = cryptoblob::open(&obj_body, &message_key) - .map_err(|_| anyhow!("Cannot decrypt email content"))?; - - // 2 parse mail and add to inbox - let msg = IMF::try_from(&plain_mail[..]).map_err(|_| anyhow!("Invalid email body"))?; - inbox - .append_from_s3(msg, id, object.blob_ref.clone(), message_key) - .await?; - - // 3 delete from incoming - storage.blob_rm(&object.blob_ref).await?; - - Ok(()) -} - -// ---- UTIL: K2V locking loop, use this to try to grab a lock using a K2V entry as a signal ---- - -fn k2v_lock_loop(storage: storage::Store, row_ref: storage::RowRef) -> watch::Receiver { - let (held_tx, held_rx) = watch::channel(false); - - tokio::spawn(k2v_lock_loop_internal(storage, row_ref, held_tx)); - - held_rx -} - -#[derive(Clone, Debug)] -enum LockState { - Unknown, - Empty, - Held(UniqueIdent, u64, storage::RowRef), -} - -async fn k2v_lock_loop_internal( - storage: storage::Store, - row_ref: storage::RowRef, - held_tx: watch::Sender, -) { - let (state_tx, mut state_rx) = watch::channel::(LockState::Unknown); - let mut state_rx_2 = state_rx.clone(); - - let our_pid = gen_ident(); - - // Loop 1: watch state of lock in K2V, save that in corresponding watch channel - let watch_lock_loop: BoxFuture> = async { - let mut ct = row_ref.clone(); - loop { - debug!("k2v watch lock loop iter: ct = {:?}", ct); - match storage.row_poll(&ct).await { - Err(e) => { - error!( - "Error in k2v wait value changed: {} ; assuming we no longer hold lock.", - e - ); - state_tx.send(LockState::Unknown)?; - tokio::time::sleep(Duration::from_secs(30)).await; - } - Ok(cv) => { - let mut lock_state = None; - for v in cv.value.iter() { - if let storage::Alternative::Value(vbytes) = v { - if vbytes.len() == 32 { - let ts = u64::from_be_bytes(vbytes[..8].try_into().unwrap()); - let pid = UniqueIdent(vbytes[8..].try_into().unwrap()); - if lock_state - .map(|(pid2, ts2)| ts > ts2 || (ts == ts2 && pid > pid2)) - .unwrap_or(true) - { - lock_state = Some((pid, ts)); - } - } - } - } - let new_ct = cv.row_ref; - - debug!( - "k2v watch lock loop: changed, old ct = {:?}, new ct = {:?}, v = {:?}", - ct, new_ct, lock_state - ); - state_tx.send( - lock_state - .map(|(pid, ts)| LockState::Held(pid, ts, new_ct.clone())) - .unwrap_or(LockState::Empty), - )?; - ct = new_ct; - } - } - } - } - .boxed(); - - // Loop 2: notify user whether we are holding the lock or not - let lock_notify_loop: BoxFuture> = async { - loop { - let now = now_msec(); - let held_with_expiration_time = match &*state_rx.borrow_and_update() { - LockState::Held(pid, ts, _ct) if *pid == our_pid => { - let expiration_time = *ts - (LOCK_DURATION / 3).as_millis() as u64; - if now < expiration_time { - Some(expiration_time) - } else { - None - } - } - _ => None, - }; - let held = held_with_expiration_time.is_some(); - if held != *held_tx.borrow() { - held_tx.send(held)?; - } - - let await_expired = async { - match held_with_expiration_time { - None => futures::future::pending().await, - Some(expiration_time) => { - tokio::time::sleep(Duration::from_millis(expiration_time - now)).await - } - }; - }; - - tokio::select!( - r = state_rx.changed() => { - r?; - } - _ = held_tx.closed() => bail!("held_tx closed, don't need to hold lock anymore"), - _ = await_expired => continue, - ); - } - } - .boxed(); - - // Loop 3: acquire lock when relevant - let take_lock_loop: BoxFuture> = async { - loop { - let now = now_msec(); - let state: LockState = state_rx_2.borrow_and_update().clone(); - let (acquire_at, ct) = match state { - LockState::Unknown => { - // If state of the lock is unknown, don't try to acquire - state_rx_2.changed().await?; - continue; - } - LockState::Empty => (now, None), - LockState::Held(pid, ts, ct) => { - if pid == our_pid { - (ts - (2 * LOCK_DURATION / 3).as_millis() as u64, Some(ct)) - } else { - (ts, Some(ct)) - } - } - }; - - // Wait until it is time to acquire lock - if acquire_at > now { - tokio::select!( - r = state_rx_2.changed() => { - // If lock state changed in the meantime, don't acquire and loop around - r?; - continue; - } - _ = tokio::time::sleep(Duration::from_millis(acquire_at - now)) => () - ); - } - - // Acquire lock - let mut lock = vec![0u8; 32]; - lock[..8].copy_from_slice(&u64::to_be_bytes( - now_msec() + LOCK_DURATION.as_millis() as u64, - )); - lock[8..].copy_from_slice(&our_pid.0); - let row = match ct { - Some(existing) => existing, - None => row_ref.clone(), - }; - if let Err(e) = storage - .row_insert(vec![storage::RowVal::new(row, lock)]) - .await - { - error!("Could not take lock: {}", e); - tokio::time::sleep(Duration::from_secs(30)).await; - } - - // Wait for new information to loop back - state_rx_2.changed().await?; - } - } - .boxed(); - - let _ = futures::try_join!(watch_lock_loop, lock_notify_loop, take_lock_loop); - - debug!("lock loop exited, releasing"); - - if !held_tx.is_closed() { - warn!("weird..."); - let _ = held_tx.send(false); - } - - // If lock is ours, release it - let release = match &*state_rx.borrow() { - LockState::Held(pid, _, ct) if *pid == our_pid => Some(ct.clone()), - _ => None, - }; - if let Some(ct) = release { - match storage.row_rm(&storage::Selector::Single(&ct)).await { - Err(e) => warn!("Unable to release lock {:?}: {}", ct, e), - Ok(_) => (), - }; - } -} - -// ---- LMTP SIDE: storing messages encrypted with user's pubkey ---- - -pub struct EncryptedMessage { - key: cryptoblob::Key, - encrypted_body: Vec, -} - -impl EncryptedMessage { - pub fn new(body: Vec) -> Result { - let key = cryptoblob::gen_key(); - let encrypted_body = cryptoblob::seal(&body, &key)?; - Ok(Self { - key, - encrypted_body, - }) - } - - pub async fn deliver_to(self: Arc, creds: PublicCredentials) -> Result<()> { - let storage = creds.storage.build().await?; - - // Get causality token of previous watch key - let query = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); - let watch_ct = match storage.row_fetch(&storage::Selector::Single(&query)).await { - Err(_) => query, - Ok(cv) => cv.into_iter().next().map(|v| v.row_ref).unwrap_or(query), - }; - - // Write mail to encrypted storage - let encrypted_key = - sodiumoxide::crypto::sealedbox::seal(self.key.as_ref(), &creds.public_key); - let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_key); - - let blob_val = storage::BlobVal::new( - storage::BlobRef(format!("incoming/{}", gen_ident())), - self.encrypted_body.clone().into(), - ) - .with_meta(MESSAGE_KEY.to_string(), key_header); - storage.blob_insert(blob_val).await?; - - // Update watch key to signal new mail - let watch_val = storage::RowVal::new(watch_ct.clone(), gen_ident().0.to_vec()); - storage.row_insert(vec![watch_val]).await?; - Ok(()) - } -} diff --git a/aero-collections/mail/mailbox.rs b/aero-collections/mail/mailbox.rs deleted file mode 100644 index d1a5473..0000000 --- a/aero-collections/mail/mailbox.rs +++ /dev/null @@ -1,524 +0,0 @@ -use anyhow::{anyhow, bail, Result}; -use serde::{Deserialize, Serialize}; -use tokio::sync::RwLock; - -use crate::bayou::Bayou; -use crate::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; -use crate::login::Credentials; -use crate::mail::uidindex::*; -use crate::mail::unique_ident::*; -use crate::mail::IMF; -use crate::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; -use crate::timestamp::now_msec; - -pub struct Mailbox { - pub(super) id: UniqueIdent, - mbox: RwLock, -} - -impl Mailbox { - pub(crate) async fn open( - creds: &Credentials, - id: UniqueIdent, - min_uidvalidity: ImapUidvalidity, - ) -> Result { - let index_path = format!("index/{}", id); - let mail_path = format!("mail/{}", id); - - let mut uid_index = Bayou::::new(creds, index_path).await?; - uid_index.sync().await?; - - let uidvalidity = uid_index.state().uidvalidity; - if uidvalidity < min_uidvalidity { - uid_index - .push( - uid_index - .state() - .op_bump_uidvalidity(min_uidvalidity.get() - uidvalidity.get()), - ) - .await?; - } - - // @FIXME reporting through opentelemetry or some logs - // info on the "shape" of the mailbox would be welcomed - /* - dump(&uid_index); - */ - - let mbox = RwLock::new(MailboxInternal { - id, - encryption_key: creds.keys.master.clone(), - storage: creds.storage.build().await?, - uid_index, - mail_path, - }); - - Ok(Self { id, mbox }) - } - - /// Sync data with backing store - pub async fn force_sync(&self) -> Result<()> { - self.mbox.write().await.force_sync().await - } - - /// Sync data with backing store only if changes are detected - /// or last sync is too old - pub async fn opportunistic_sync(&self) -> Result<()> { - self.mbox.write().await.opportunistic_sync().await - } - - /// Block until a sync has been done (due to changes in the event log) - pub async fn notify(&self) -> std::sync::Weak { - self.mbox.read().await.notifier() - } - - // ---- Functions for reading the mailbox ---- - - /// Get a clone of the current UID Index of this mailbox - /// (cloning is cheap so don't hesitate to use this) - pub async fn current_uid_index(&self) -> UidIndex { - self.mbox.read().await.uid_index.state().clone() - } - - /// Fetch the metadata (headers + some more info) of the specified - /// mail IDs - pub async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { - self.mbox.read().await.fetch_meta(ids).await - } - - /// Fetch an entire e-mail - pub async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { - self.mbox.read().await.fetch_full(id, message_key).await - } - - pub async fn frozen(self: &std::sync::Arc) -> super::snapshot::FrozenMailbox { - super::snapshot::FrozenMailbox::new(self.clone()).await - } - - // ---- Functions for changing the mailbox ---- - - /// Add flags to message - pub async fn add_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { - self.mbox.write().await.add_flags(id, flags).await - } - - /// Delete flags from message - pub async fn del_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { - self.mbox.write().await.del_flags(id, flags).await - } - - /// Define the new flags for this message - pub async fn set_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { - self.mbox.write().await.set_flags(id, flags).await - } - - /// Insert an email into the mailbox - pub async fn append<'a>( - &self, - msg: IMF<'a>, - ident: Option, - flags: &[Flag], - ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { - self.mbox.write().await.append(msg, ident, flags).await - } - - /// Insert an email into the mailbox, copying it from an existing S3 object - pub async fn append_from_s3<'a>( - &self, - msg: IMF<'a>, - ident: UniqueIdent, - blob_ref: storage::BlobRef, - message_key: Key, - ) -> Result<()> { - self.mbox - .write() - .await - .append_from_s3(msg, ident, blob_ref, message_key) - .await - } - - /// Delete a message definitively from the mailbox - pub async fn delete<'a>(&self, id: UniqueIdent) -> Result<()> { - self.mbox.write().await.delete(id).await - } - - /// Copy an email from an other Mailbox to this mailbox - /// (use this when possible, as it allows for a certain number of storage optimizations) - pub async fn copy_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result { - if self.id == from.id { - bail!("Cannot copy into same mailbox"); - } - - let (mut selflock, fromlock); - if self.id < from.id { - selflock = self.mbox.write().await; - fromlock = from.mbox.write().await; - } else { - fromlock = from.mbox.write().await; - selflock = self.mbox.write().await; - }; - selflock.copy_from(&fromlock, uuid).await - } - - /// Move an email from an other Mailbox to this mailbox - /// (use this when possible, as it allows for a certain number of storage optimizations) - pub async fn move_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result<()> { - if self.id == from.id { - bail!("Cannot copy move same mailbox"); - } - - let (mut selflock, mut fromlock); - if self.id < from.id { - selflock = self.mbox.write().await; - fromlock = from.mbox.write().await; - } else { - fromlock = from.mbox.write().await; - selflock = self.mbox.write().await; - }; - selflock.move_from(&mut fromlock, uuid).await - } -} - -// ---- - -// Non standard but common flags: -// https://www.iana.org/assignments/imap-jmap-keywords/imap-jmap-keywords.xhtml -struct MailboxInternal { - // 2023-05-15 will probably be used later. - #[allow(dead_code)] - id: UniqueIdent, - mail_path: String, - encryption_key: Key, - storage: Store, - uid_index: Bayou, -} - -impl MailboxInternal { - async fn force_sync(&mut self) -> Result<()> { - self.uid_index.sync().await?; - Ok(()) - } - - async fn opportunistic_sync(&mut self) -> Result<()> { - self.uid_index.opportunistic_sync().await?; - Ok(()) - } - - fn notifier(&self) -> std::sync::Weak { - self.uid_index.notifier() - } - - // ---- Functions for reading the mailbox ---- - - async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { - let ids = ids.iter().map(|x| x.to_string()).collect::>(); - let ops = ids - .iter() - .map(|id| RowRef::new(self.mail_path.as_str(), id.as_str())) - .collect::>(); - let res_vec = self.storage.row_fetch(&Selector::List(ops)).await?; - - let mut meta_vec = vec![]; - for res in res_vec.into_iter() { - let mut meta_opt = None; - - // Resolve conflicts - for v in res.value.iter() { - match v { - storage::Alternative::Tombstone => (), - storage::Alternative::Value(v) => { - let meta = open_deserialize::(v, &self.encryption_key)?; - match meta_opt.as_mut() { - None => { - meta_opt = Some(meta); - } - Some(prevmeta) => { - prevmeta.try_merge(meta)?; - } - } - } - } - } - if let Some(meta) = meta_opt { - meta_vec.push(meta); - } else { - bail!("No valid meta value in k2v for {:?}", res.row_ref); - } - } - - Ok(meta_vec) - } - - async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { - let obj_res = self - .storage - .blob_fetch(&BlobRef(format!("{}/{}", self.mail_path, id))) - .await?; - let body = obj_res.value; - cryptoblob::open(&body, message_key) - } - - // ---- Functions for changing the mailbox ---- - - async fn add_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { - let add_flag_op = self.uid_index.state().op_flag_add(ident, flags.to_vec()); - self.uid_index.push(add_flag_op).await - } - - async fn del_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { - let del_flag_op = self.uid_index.state().op_flag_del(ident, flags.to_vec()); - self.uid_index.push(del_flag_op).await - } - - async fn set_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { - let set_flag_op = self.uid_index.state().op_flag_set(ident, flags.to_vec()); - self.uid_index.push(set_flag_op).await - } - - async fn append( - &mut self, - mail: IMF<'_>, - ident: Option, - flags: &[Flag], - ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { - let ident = ident.unwrap_or_else(gen_ident); - let message_key = gen_key(); - - futures::try_join!( - async { - // Encrypt and save mail body - let message_blob = cryptoblob::seal(mail.raw, &message_key)?; - self.storage - .blob_insert(BlobVal::new( - BlobRef(format!("{}/{}", self.mail_path, ident)), - message_blob, - )) - .await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Save mail meta - let meta = MailMeta { - internaldate: now_msec(), - headers: mail.parsed.raw_headers.to_vec(), - message_key: message_key.clone(), - rfc822_size: mail.raw.len(), - }; - let meta_blob = seal_serialize(&meta, &self.encryption_key)?; - self.storage - .row_insert(vec![RowVal::new( - RowRef::new(&self.mail_path, &ident.to_string()), - meta_blob, - )]) - .await?; - Ok::<_, anyhow::Error>(()) - }, - self.uid_index.opportunistic_sync() - )?; - - // Add mail to Bayou mail index - let uid_state = self.uid_index.state(); - let add_mail_op = uid_state.op_mail_add(ident, flags.to_vec()); - - let uidvalidity = uid_state.uidvalidity; - let (uid, modseq) = match add_mail_op { - UidIndexOp::MailAdd(_, uid, modseq, _) => (uid, modseq), - _ => unreachable!(), - }; - - self.uid_index.push(add_mail_op).await?; - - Ok((uidvalidity, uid, modseq)) - } - - async fn append_from_s3<'a>( - &mut self, - mail: IMF<'a>, - ident: UniqueIdent, - blob_src: storage::BlobRef, - message_key: Key, - ) -> Result<()> { - futures::try_join!( - async { - // Copy mail body from previous location - let blob_dst = BlobRef(format!("{}/{}", self.mail_path, ident)); - self.storage.blob_copy(&blob_src, &blob_dst).await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Save mail meta - let meta = MailMeta { - internaldate: now_msec(), - headers: mail.parsed.raw_headers.to_vec(), - message_key: message_key.clone(), - rfc822_size: mail.raw.len(), - }; - let meta_blob = seal_serialize(&meta, &self.encryption_key)?; - self.storage - .row_insert(vec![RowVal::new( - RowRef::new(&self.mail_path, &ident.to_string()), - meta_blob, - )]) - .await?; - Ok::<_, anyhow::Error>(()) - }, - self.uid_index.opportunistic_sync() - )?; - - // Add mail to Bayou mail index - let add_mail_op = self.uid_index.state().op_mail_add(ident, vec![]); - self.uid_index.push(add_mail_op).await?; - - Ok(()) - } - - async fn delete(&mut self, ident: UniqueIdent) -> Result<()> { - if !self.uid_index.state().table.contains_key(&ident) { - bail!("Cannot delete mail that doesn't exit"); - } - - let del_mail_op = self.uid_index.state().op_mail_del(ident); - self.uid_index.push(del_mail_op).await?; - - futures::try_join!( - async { - // Delete mail body from S3 - self.storage - .blob_rm(&BlobRef(format!("{}/{}", self.mail_path, ident))) - .await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Delete mail meta from K2V - let sk = ident.to_string(); - let res = self - .storage - .row_fetch(&storage::Selector::Single(&RowRef::new( - &self.mail_path, - &sk, - ))) - .await?; - if let Some(row_val) = res.into_iter().next() { - self.storage - .row_rm(&storage::Selector::Single(&row_val.row_ref)) - .await?; - } - Ok::<_, anyhow::Error>(()) - } - )?; - Ok(()) - } - - async fn copy_from( - &mut self, - from: &MailboxInternal, - source_id: UniqueIdent, - ) -> Result { - let new_id = gen_ident(); - self.copy_internal(from, source_id, new_id).await?; - Ok(new_id) - } - - async fn move_from(&mut self, from: &mut MailboxInternal, id: UniqueIdent) -> Result<()> { - self.copy_internal(from, id, id).await?; - from.delete(id).await?; - Ok(()) - } - - async fn copy_internal( - &mut self, - from: &MailboxInternal, - source_id: UniqueIdent, - new_id: UniqueIdent, - ) -> Result<()> { - if self.encryption_key != from.encryption_key { - bail!("Message to be copied/moved does not belong to same account."); - } - - let flags = from - .uid_index - .state() - .table - .get(&source_id) - .ok_or(anyhow!("Source mail not found"))? - .2 - .clone(); - - futures::try_join!( - async { - let dst = BlobRef(format!("{}/{}", self.mail_path, new_id)); - let src = BlobRef(format!("{}/{}", from.mail_path, source_id)); - self.storage.blob_copy(&src, &dst).await?; - Ok::<_, anyhow::Error>(()) - }, - async { - // Copy mail meta in K2V - let meta = &from.fetch_meta(&[source_id]).await?[0]; - let meta_blob = seal_serialize(meta, &self.encryption_key)?; - self.storage - .row_insert(vec![RowVal::new( - RowRef::new(&self.mail_path, &new_id.to_string()), - meta_blob, - )]) - .await?; - Ok::<_, anyhow::Error>(()) - }, - self.uid_index.opportunistic_sync(), - )?; - - // Add mail to Bayou mail index - let add_mail_op = self.uid_index.state().op_mail_add(new_id, flags); - self.uid_index.push(add_mail_op).await?; - - Ok(()) - } -} - -// Can be useful to debug so we want this code -// to be available to developers -#[allow(dead_code)] -fn dump(uid_index: &Bayou) { - let s = uid_index.state(); - println!("---- MAILBOX STATE ----"); - println!("UIDVALIDITY {}", s.uidvalidity); - println!("UIDNEXT {}", s.uidnext); - println!("INTERNALSEQ {}", s.internalseq); - for (uid, ident) in s.idx_by_uid.iter() { - println!( - "{} {} {}", - uid, - hex::encode(ident.0), - s.table.get(ident).cloned().unwrap().2.join(", ") - ); - } - println!(); -} - -// ---- - -/// The metadata of a message that is stored in K2V -/// at pk = mail/, sk = -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MailMeta { - /// INTERNALDATE field (milliseconds since epoch) - pub internaldate: u64, - /// Headers of the message - pub headers: Vec, - /// Secret key for decrypting entire message - pub message_key: Key, - /// RFC822 size - pub rfc822_size: usize, -} - -impl MailMeta { - fn try_merge(&mut self, other: Self) -> Result<()> { - if self.headers != other.headers - || self.message_key != other.message_key - || self.rfc822_size != other.rfc822_size - { - bail!("Conflicting MailMeta values."); - } - self.internaldate = std::cmp::max(self.internaldate, other.internaldate); - Ok(()) - } -} diff --git a/aero-collections/mail/mod.rs b/aero-collections/mail/mod.rs deleted file mode 100644 index 03e85cd..0000000 --- a/aero-collections/mail/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -use std::convert::TryFrom; - -pub mod incoming; -pub mod mailbox; -pub mod query; -pub mod snapshot; -pub mod uidindex; -pub mod unique_ident; -pub mod namespace; - -// Internet Message Format -// aka RFC 822 - RFC 2822 - RFC 5322 -// 2023-05-15 don't want to refactor this struct now. -#[allow(clippy::upper_case_acronyms)] -pub struct IMF<'a> { - raw: &'a [u8], - parsed: eml_codec::part::composite::Message<'a>, -} - -impl<'a> TryFrom<&'a [u8]> for IMF<'a> { - type Error = (); - - fn try_from(body: &'a [u8]) -> Result, ()> { - let parsed = eml_codec::parse_message(body).or(Err(()))?.1; - Ok(Self { raw: body, parsed }) - } -} diff --git a/aero-collections/mail/namespace.rs b/aero-collections/mail/namespace.rs deleted file mode 100644 index 5e67173..0000000 --- a/aero-collections/mail/namespace.rs +++ /dev/null @@ -1,209 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::sync::{Arc, Weak}; - -use anyhow::{anyhow, bail, Result}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use tokio::sync::watch; - -use crate::cryptoblob::{open_deserialize, seal_serialize}; -use crate::login::Credentials; -use crate::mail::incoming::incoming_mail_watch_process; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::{gen_ident, UniqueIdent}; -use crate::storage; -use crate::timestamp::now_msec; - -pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; - -/// INBOX is the only mailbox that must always exist. -/// It is created automatically when the account is created. -/// IMAP allows the user to rename INBOX to something else, -/// in this case all messages from INBOX are moved to a mailbox -/// with the new name and the INBOX mailbox still exists and is empty. -/// In our implementation, we indeed move the underlying mailbox -/// to the new name (i.e. the new name has the same id as the previous -/// INBOX), and we create a new empty mailbox for INBOX. -pub const INBOX: &str = "INBOX"; - -/// For convenience purpose, we also create some special mailbox -/// that are described in RFC6154 SPECIAL-USE -/// @FIXME maybe it should be a configuration parameter -/// @FIXME maybe we should have a per-mailbox flag mechanism, either an enum or a string, so we -/// track which mailbox is used for what. -/// @FIXME Junk could be useful but we don't have any antispam solution yet so... -/// @FIXME IMAP supports virtual mailbox. \All or \Flagged are intended to be virtual mailboxes. -/// \Trash might be one, or not one. I don't know what we should do there. -pub const DRAFTS: &str = "Drafts"; -pub const ARCHIVE: &str = "Archive"; -pub const SENT: &str = "Sent"; -pub const TRASH: &str = "Trash"; - -pub(crate) const MAILBOX_LIST_PK: &str = "mailboxes"; -pub(crate) const MAILBOX_LIST_SK: &str = "list"; - -// ---- User's mailbox list (serialized in K2V) ---- - -#[derive(Serialize, Deserialize)] -pub(crate) struct MailboxList(BTreeMap); - -#[derive(Serialize, Deserialize, Clone, Copy, Debug)] -pub(crate) struct MailboxListEntry { - id_lww: (u64, Option), - uidvalidity: ImapUidvalidity, -} - -impl MailboxListEntry { - fn merge(&mut self, other: &Self) { - // Simple CRDT merge rule - if other.id_lww.0 > self.id_lww.0 - || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) - { - self.id_lww = other.id_lww; - } - self.uidvalidity = std::cmp::max(self.uidvalidity, other.uidvalidity); - } -} - -impl MailboxList { - pub(crate) fn new() -> Self { - Self(BTreeMap::new()) - } - - pub(crate) fn merge(&mut self, list2: Self) { - for (k, v) in list2.0.into_iter() { - if let Some(e) = self.0.get_mut(&k) { - e.merge(&v); - } else { - self.0.insert(k, v); - } - } - } - - pub(crate) fn existing_mailbox_names(&self) -> Vec { - self.0 - .iter() - .filter(|(_, v)| v.id_lww.1.is_some()) - .map(|(k, _)| k.to_string()) - .collect() - } - - pub(crate) fn has_mailbox(&self, name: &str) -> bool { - matches!( - self.0.get(name), - Some(MailboxListEntry { - id_lww: (_, Some(_)), - .. - }) - ) - } - - pub(crate) fn get_mailbox(&self, name: &str) -> Option<(ImapUidvalidity, Option)> { - self.0.get(name).map( - |MailboxListEntry { - id_lww: (_, mailbox_id), - uidvalidity, - }| (*uidvalidity, *mailbox_id), - ) - } - - /// Ensures mailbox `name` maps to id `id`. - /// If it already mapped to that, returns None. - /// If a change had to be done, returns Some(new uidvalidity in mailbox). - pub(crate) fn set_mailbox(&mut self, name: &str, id: Option) -> Option { - let (ts, id, uidvalidity) = match self.0.get_mut(name) { - None => { - if id.is_none() { - return None; - } else { - (now_msec(), id, ImapUidvalidity::new(1).unwrap()) - } - } - Some(MailboxListEntry { - id_lww, - uidvalidity, - }) => { - if id_lww.1 == id { - return None; - } else { - ( - std::cmp::max(id_lww.0 + 1, now_msec()), - id, - ImapUidvalidity::new(uidvalidity.get() + 1).unwrap(), - ) - } - } - }; - - self.0.insert( - name.into(), - MailboxListEntry { - id_lww: (ts, id), - uidvalidity, - }, - ); - Some(uidvalidity) - } - - pub(crate) fn update_uidvalidity(&mut self, name: &str, new_uidvalidity: ImapUidvalidity) { - match self.0.get_mut(name) { - None => { - self.0.insert( - name.into(), - MailboxListEntry { - id_lww: (now_msec(), None), - uidvalidity: new_uidvalidity, - }, - ); - } - Some(MailboxListEntry { uidvalidity, .. }) => { - *uidvalidity = std::cmp::max(*uidvalidity, new_uidvalidity); - } - } - } - - pub(crate) fn create_mailbox(&mut self, name: &str) -> CreatedMailbox { - if let Some(MailboxListEntry { - id_lww: (_, Some(id)), - uidvalidity, - }) = self.0.get(name) - { - return CreatedMailbox::Existed(*id, *uidvalidity); - } - - let id = gen_ident(); - let uidvalidity = self.set_mailbox(name, Some(id)).unwrap(); - CreatedMailbox::Created(id, uidvalidity) - } - - pub(crate) fn rename_mailbox(&mut self, old_name: &str, new_name: &str) -> Result<()> { - if let Some((uidvalidity, Some(mbid))) = self.get_mailbox(old_name) { - if self.has_mailbox(new_name) { - bail!( - "Cannot rename {} into {}: {} already exists", - old_name, - new_name, - new_name - ); - } - - self.set_mailbox(old_name, None); - self.set_mailbox(new_name, Some(mbid)); - self.update_uidvalidity(new_name, uidvalidity); - Ok(()) - } else { - bail!( - "Cannot rename {} into {}: {} doesn't exist", - old_name, - new_name, - old_name - ); - } - } -} - -pub(crate) enum CreatedMailbox { - Created(UniqueIdent, ImapUidvalidity), - Existed(UniqueIdent, ImapUidvalidity), -} diff --git a/aero-collections/mail/query.rs b/aero-collections/mail/query.rs deleted file mode 100644 index 3e6fe99..0000000 --- a/aero-collections/mail/query.rs +++ /dev/null @@ -1,137 +0,0 @@ -use super::mailbox::MailMeta; -use super::snapshot::FrozenMailbox; -use super::unique_ident::UniqueIdent; -use anyhow::Result; -use futures::future::FutureExt; -use futures::stream::{BoxStream, Stream, StreamExt}; - -/// Query is in charge of fetching efficiently -/// requested data for a list of emails -pub struct Query<'a, 'b> { - pub frozen: &'a FrozenMailbox, - pub emails: &'b [UniqueIdent], - pub scope: QueryScope, -} - -#[derive(Debug)] -pub enum QueryScope { - Index, - Partial, - Full, -} -impl QueryScope { - pub fn union(&self, other: &QueryScope) -> QueryScope { - match (self, other) { - (QueryScope::Full, _) | (_, QueryScope::Full) => QueryScope::Full, - (QueryScope::Partial, _) | (_, QueryScope::Partial) => QueryScope::Partial, - (QueryScope::Index, QueryScope::Index) => QueryScope::Index, - } - } -} - -//type QueryResultStream = Box>>; - -impl<'a, 'b> Query<'a, 'b> { - pub fn fetch(&self) -> BoxStream> { - match self.scope { - QueryScope::Index => Box::pin( - futures::stream::iter(self.emails) - .map(|&uuid| Ok(QueryResult::IndexResult { uuid })), - ), - QueryScope::Partial => Box::pin(self.partial()), - QueryScope::Full => Box::pin(self.full()), - } - } - - // --- functions below are private *for reasons* - fn partial<'d>(&'d self) -> impl Stream> + 'd + Send { - async move { - let maybe_meta_list: Result> = - self.frozen.mailbox.fetch_meta(self.emails).await; - let list_res = maybe_meta_list - .map(|meta_list| { - meta_list - .into_iter() - .zip(self.emails) - .map(|(metadata, &uuid)| Ok(QueryResult::PartialResult { uuid, metadata })) - .collect() - }) - .unwrap_or_else(|e| vec![Err(e)]); - - futures::stream::iter(list_res) - } - .flatten_stream() - } - - fn full<'d>(&'d self) -> impl Stream> + 'd + Send { - self.partial().then(move |maybe_meta| async move { - let meta = maybe_meta?; - - let content = self - .frozen - .mailbox - .fetch_full( - *meta.uuid(), - &meta - .metadata() - .expect("meta to be PartialResult") - .message_key, - ) - .await?; - - Ok(meta.into_full(content).expect("meta to be PartialResult")) - }) - } -} - -#[derive(Debug, Clone)] -pub enum QueryResult { - IndexResult { - uuid: UniqueIdent, - }, - PartialResult { - uuid: UniqueIdent, - metadata: MailMeta, - }, - FullResult { - uuid: UniqueIdent, - metadata: MailMeta, - content: Vec, - }, -} -impl QueryResult { - pub fn uuid(&self) -> &UniqueIdent { - match self { - Self::IndexResult { uuid, .. } => uuid, - Self::PartialResult { uuid, .. } => uuid, - Self::FullResult { uuid, .. } => uuid, - } - } - - pub fn metadata(&self) -> Option<&MailMeta> { - match self { - Self::IndexResult { .. } => None, - Self::PartialResult { metadata, .. } => Some(metadata), - Self::FullResult { metadata, .. } => Some(metadata), - } - } - - #[allow(dead_code)] - pub fn content(&self) -> Option<&[u8]> { - match self { - Self::FullResult { content, .. } => Some(content), - _ => None, - } - } - - fn into_full(self, content: Vec) -> Option { - match self { - Self::PartialResult { uuid, metadata } => Some(Self::FullResult { - uuid, - metadata, - content, - }), - _ => None, - } - } -} diff --git a/aero-collections/mail/snapshot.rs b/aero-collections/mail/snapshot.rs deleted file mode 100644 index ed756b5..0000000 --- a/aero-collections/mail/snapshot.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::sync::Arc; - -use anyhow::Result; - -use super::mailbox::Mailbox; -use super::query::{Query, QueryScope}; -use super::uidindex::UidIndex; -use super::unique_ident::UniqueIdent; - -/// A Frozen Mailbox has a snapshot of the current mailbox -/// state that is desynchronized with the real mailbox state. -/// It's up to the user to choose when their snapshot must be updated -/// to give useful information to their clients -pub struct FrozenMailbox { - pub mailbox: Arc, - pub snapshot: UidIndex, -} - -impl FrozenMailbox { - /// Create a snapshot from a mailbox, the mailbox + the snapshot - /// becomes the "Frozen Mailbox". - pub async fn new(mailbox: Arc) -> Self { - let state = mailbox.current_uid_index().await; - - Self { - mailbox, - snapshot: state, - } - } - - /// Force the synchronization of the inner mailbox - /// but do not update the local snapshot - pub async fn sync(&self) -> Result<()> { - self.mailbox.opportunistic_sync().await - } - - /// Peek snapshot without updating the frozen mailbox - /// Can be useful if you want to plan some writes - /// while sending a diff to the client later - pub async fn peek(&self) -> UidIndex { - self.mailbox.current_uid_index().await - } - - /// Update the FrozenMailbox local snapshot. - /// Returns the old snapshot, so you can build a diff - pub async fn update(&mut self) -> UidIndex { - let old_snapshot = self.snapshot.clone(); - self.snapshot = self.mailbox.current_uid_index().await; - - old_snapshot - } - - pub fn query<'a, 'b>(&'a self, uuids: &'b [UniqueIdent], scope: QueryScope) -> Query<'a, 'b> { - Query { - frozen: self, - emails: uuids, - scope, - } - } -} diff --git a/aero-collections/mail/uidindex.rs b/aero-collections/mail/uidindex.rs deleted file mode 100644 index 5a06670..0000000 --- a/aero-collections/mail/uidindex.rs +++ /dev/null @@ -1,474 +0,0 @@ -use std::num::{NonZeroU32, NonZeroU64}; - -use im::{HashMap, OrdMap, OrdSet}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -use crate::bayou::*; -use crate::mail::unique_ident::UniqueIdent; - -pub type ModSeq = NonZeroU64; -pub type ImapUid = NonZeroU32; -pub type ImapUidvalidity = NonZeroU32; -pub type Flag = String; -pub type IndexEntry = (ImapUid, ModSeq, Vec); - -/// A UidIndex handles the mutable part of a mailbox -/// It is built by running the event log on it -/// Each applied log generates a new UidIndex by cloning the previous one -/// and applying the event. This is why we use immutable datastructures: -/// they are cheap to clone. -#[derive(Clone)] -pub struct UidIndex { - // Source of trust - pub table: OrdMap, - - // Indexes optimized for queries - pub idx_by_uid: OrdMap, - pub idx_by_modseq: OrdMap, - pub idx_by_flag: FlagIndex, - - // "Public" Counters - pub uidvalidity: ImapUidvalidity, - pub uidnext: ImapUid, - pub highestmodseq: ModSeq, - - // "Internal" Counters - pub internalseq: ImapUid, - pub internalmodseq: ModSeq, -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub enum UidIndexOp { - MailAdd(UniqueIdent, ImapUid, ModSeq, Vec), - MailDel(UniqueIdent), - FlagAdd(UniqueIdent, ModSeq, Vec), - FlagDel(UniqueIdent, ModSeq, Vec), - FlagSet(UniqueIdent, ModSeq, Vec), - BumpUidvalidity(u32), -} - -impl UidIndex { - #[must_use] - pub fn op_mail_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::MailAdd(ident, self.internalseq, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_mail_del(&self, ident: UniqueIdent) -> UidIndexOp { - UidIndexOp::MailDel(ident) - } - - #[must_use] - pub fn op_flag_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::FlagAdd(ident, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_flag_del(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::FlagDel(ident, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_flag_set(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { - UidIndexOp::FlagSet(ident, self.internalmodseq, flags) - } - - #[must_use] - pub fn op_bump_uidvalidity(&self, count: u32) -> UidIndexOp { - UidIndexOp::BumpUidvalidity(count) - } - - // INTERNAL functions to keep state consistent - - fn reg_email(&mut self, ident: UniqueIdent, uid: ImapUid, modseq: ModSeq, flags: &[Flag]) { - // Insert the email in our table - self.table.insert(ident, (uid, modseq, flags.to_owned())); - - // Update the indexes/caches - self.idx_by_uid.insert(uid, ident); - self.idx_by_flag.insert(uid, flags); - self.idx_by_modseq.insert(modseq, ident); - } - - fn unreg_email(&mut self, ident: &UniqueIdent) { - // We do nothing if the mail does not exist - let (uid, modseq, flags) = match self.table.get(ident) { - Some(v) => v, - None => return, - }; - - // Delete all cache entries - self.idx_by_uid.remove(uid); - self.idx_by_flag.remove(*uid, flags); - self.idx_by_modseq.remove(modseq); - - // Remove from source of trust - self.table.remove(ident); - } -} - -impl Default for UidIndex { - fn default() -> Self { - Self { - table: OrdMap::new(), - - idx_by_uid: OrdMap::new(), - idx_by_modseq: OrdMap::new(), - idx_by_flag: FlagIndex::new(), - - uidvalidity: NonZeroU32::new(1).unwrap(), - uidnext: NonZeroU32::new(1).unwrap(), - highestmodseq: NonZeroU64::new(1).unwrap(), - - internalseq: NonZeroU32::new(1).unwrap(), - internalmodseq: NonZeroU64::new(1).unwrap(), - } - } -} - -impl BayouState for UidIndex { - type Op = UidIndexOp; - - fn apply(&self, op: &UidIndexOp) -> Self { - let mut new = self.clone(); - match op { - UidIndexOp::MailAdd(ident, uid, modseq, flags) => { - // Change UIDValidity if there is a UID conflict or a MODSEQ conflict - // @FIXME Need to prove that summing work - // The intuition: we increase the UIDValidity by the number of possible conflicts - if *uid < new.internalseq || *modseq < new.internalmodseq { - let bump_uid = new.internalseq.get() - uid.get(); - let bump_modseq = (new.internalmodseq.get() - modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_uid + bump_modseq).unwrap(); - } - - // Assign the real uid of the email - let new_uid = new.internalseq; - - // Assign the real modseq of the email and its new flags - let new_modseq = new.internalmodseq; - - // Delete the previous entry if any. - // Our proof has no assumption on `ident` uniqueness, - // so we must handle this case even it is very unlikely - // In this case, we overwrite the email. - // Note: assigning a new UID is mandatory. - new.unreg_email(ident); - - // We record our email and update ou caches - new.reg_email(*ident, new_uid, new_modseq, flags); - - // Update counters - new.highestmodseq = new.internalmodseq; - - new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - - new.uidnext = new.internalseq; - } - UidIndexOp::MailDel(ident) => { - // If the email is known locally, we remove its references in all our indexes - new.unreg_email(ident); - - // We update the counter - new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); - } - UidIndexOp::FlagAdd(ident, candidate_modseq, new_flags) => { - if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { - // Bump UIDValidity if required - if *candidate_modseq < new.internalmodseq { - let bump_modseq = - (new.internalmodseq.get() - candidate_modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); - } - - // Add flags to the source of trust and the cache - let mut to_add: Vec = new_flags - .iter() - .filter(|f| !existing_flags.contains(f)) - .cloned() - .collect(); - new.idx_by_flag.insert(*uid, &to_add); - *email_modseq = new.internalmodseq; - new.idx_by_modseq.insert(new.internalmodseq, *ident); - existing_flags.append(&mut to_add); - - // Update counters - new.highestmodseq = new.internalmodseq; - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - } - } - UidIndexOp::FlagDel(ident, candidate_modseq, rm_flags) => { - if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { - // Bump UIDValidity if required - if *candidate_modseq < new.internalmodseq { - let bump_modseq = - (new.internalmodseq.get() - candidate_modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); - } - - // Remove flags from the source of trust and the cache - existing_flags.retain(|x| !rm_flags.contains(x)); - new.idx_by_flag.remove(*uid, rm_flags); - - // Register that email has been modified - new.idx_by_modseq.insert(new.internalmodseq, *ident); - *email_modseq = new.internalmodseq; - - // Update counters - new.highestmodseq = new.internalmodseq; - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - } - } - UidIndexOp::FlagSet(ident, candidate_modseq, new_flags) => { - if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { - // Bump UIDValidity if required - if *candidate_modseq < new.internalmodseq { - let bump_modseq = - (new.internalmodseq.get() - candidate_modseq.get()) as u32; - new.uidvalidity = - NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); - } - - // Remove flags from the source of trust and the cache - let (keep_flags, rm_flags): (Vec, Vec) = existing_flags - .iter() - .cloned() - .partition(|x| new_flags.contains(x)); - *existing_flags = keep_flags; - let mut to_add: Vec = new_flags - .iter() - .filter(|f| !existing_flags.contains(f)) - .cloned() - .collect(); - existing_flags.append(&mut to_add); - new.idx_by_flag.remove(*uid, &rm_flags); - new.idx_by_flag.insert(*uid, &to_add); - - // Register that email has been modified - new.idx_by_modseq.insert(new.internalmodseq, *ident); - *email_modseq = new.internalmodseq; - - // Update counters - new.highestmodseq = new.internalmodseq; - new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); - } - } - UidIndexOp::BumpUidvalidity(count) => { - new.uidvalidity = ImapUidvalidity::new(new.uidvalidity.get() + *count) - .unwrap_or(ImapUidvalidity::new(u32::MAX).unwrap()); - } - } - new - } -} - -// ---- FlagIndex implementation ---- - -#[derive(Clone)] -pub struct FlagIndex(HashMap>); -pub type FlagIter<'a> = im::hashmap::Keys<'a, Flag, OrdSet>; - -impl FlagIndex { - fn new() -> Self { - Self(HashMap::new()) - } - fn insert(&mut self, uid: ImapUid, flags: &[Flag]) { - flags.iter().for_each(|flag| { - self.0 - .entry(flag.clone()) - .or_insert(OrdSet::new()) - .insert(uid); - }); - } - fn remove(&mut self, uid: ImapUid, flags: &[Flag]) { - for flag in flags.iter() { - if let Some(set) = self.0.get_mut(flag) { - set.remove(&uid); - if set.is_empty() { - self.0.remove(flag); - } - } - } - } - - pub fn get(&self, f: &Flag) -> Option<&OrdSet> { - self.0.get(f) - } - - pub fn flags(&self) -> FlagIter { - self.0.keys() - } -} - -// ---- CUSTOM SERIALIZATION AND DESERIALIZATION ---- - -#[derive(Serialize, Deserialize)] -struct UidIndexSerializedRepr { - mails: Vec<(ImapUid, ModSeq, UniqueIdent, Vec)>, - - uidvalidity: ImapUidvalidity, - uidnext: ImapUid, - highestmodseq: ModSeq, - - internalseq: ImapUid, - internalmodseq: ModSeq, -} - -impl<'de> Deserialize<'de> for UidIndex { - fn deserialize(d: D) -> Result - where - D: Deserializer<'de>, - { - let val: UidIndexSerializedRepr = UidIndexSerializedRepr::deserialize(d)?; - - let mut uidindex = UidIndex { - table: OrdMap::new(), - - idx_by_uid: OrdMap::new(), - idx_by_modseq: OrdMap::new(), - idx_by_flag: FlagIndex::new(), - - uidvalidity: val.uidvalidity, - uidnext: val.uidnext, - highestmodseq: val.highestmodseq, - - internalseq: val.internalseq, - internalmodseq: val.internalmodseq, - }; - - val.mails - .iter() - .for_each(|(uid, modseq, uuid, flags)| uidindex.reg_email(*uuid, *uid, *modseq, flags)); - - Ok(uidindex) - } -} - -impl Serialize for UidIndex { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut mails = vec![]; - for (ident, (uid, modseq, flags)) in self.table.iter() { - mails.push((*uid, *modseq, *ident, flags.clone())); - } - - let val = UidIndexSerializedRepr { - mails, - uidvalidity: self.uidvalidity, - uidnext: self.uidnext, - highestmodseq: self.highestmodseq, - internalseq: self.internalseq, - internalmodseq: self.internalmodseq, - }; - - val.serialize(serializer) - } -} - -// ---- TESTS ---- - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_uidindex() { - let mut state = UidIndex::default(); - - // Add message 1 - { - let m = UniqueIdent([0x01; 24]); - let f = vec!["\\Recent".to_string(), "\\Archive".to_string()]; - let ev = state.op_mail_add(m, f); - state = state.apply(&ev); - - // Early checks - assert_eq!(state.table.len(), 1); - let (uid, modseq, flags) = state.table.get(&m).unwrap(); - assert_eq!(*uid, NonZeroU32::new(1).unwrap()); - assert_eq!(*modseq, NonZeroU64::new(1).unwrap()); - assert_eq!(flags.len(), 2); - let ident = state.idx_by_uid.get(&NonZeroU32::new(1).unwrap()).unwrap(); - assert_eq!(&m, ident); - let recent = state.idx_by_flag.0.get("\\Recent").unwrap(); - assert_eq!(recent.len(), 1); - assert_eq!(recent.iter().next().unwrap(), &NonZeroU32::new(1).unwrap()); - assert_eq!(state.uidnext, NonZeroU32::new(2).unwrap()); - assert_eq!(state.uidvalidity, NonZeroU32::new(1).unwrap()); - } - - // Add message 2 - { - let m = UniqueIdent([0x02; 24]); - let f = vec!["\\Seen".to_string(), "\\Archive".to_string()]; - let ev = state.op_mail_add(m, f); - state = state.apply(&ev); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 2); - } - - // Add flags to message 1 - { - let m = UniqueIdent([0x01; 24]); - let f = vec!["Important".to_string(), "$cl_1".to_string()]; - let ev = state.op_flag_add(m, f); - state = state.apply(&ev); - } - - // Delete flags from message 1 - { - let m = UniqueIdent([0x01; 24]); - let f = vec!["\\Recent".to_string()]; - let ev = state.op_flag_del(m, f); - state = state.apply(&ev); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 2); - } - - // Delete message 2 - { - let m = UniqueIdent([0x02; 24]); - let ev = state.op_mail_del(m); - state = state.apply(&ev); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 1); - } - - // Add a message 3 concurrent to message 1 (trigger a uid validity change) - { - let m = UniqueIdent([0x03; 24]); - let f = vec!["\\Archive".to_string(), "\\Recent".to_string()]; - let ev = UidIndexOp::MailAdd( - m, - NonZeroU32::new(1).unwrap(), - NonZeroU64::new(1).unwrap(), - f, - ); - state = state.apply(&ev); - } - - // Checks - { - assert_eq!(state.table.len(), 2); - assert!(state.uidvalidity > NonZeroU32::new(1).unwrap()); - - let (last_uid, ident) = state.idx_by_uid.get_max().unwrap(); - assert_eq!(ident, &UniqueIdent([0x03; 24])); - - let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); - assert_eq!(archive.len(), 2); - let mut iter = archive.iter(); - assert_eq!(iter.next().unwrap(), &NonZeroU32::new(1).unwrap()); - assert_eq!(iter.next().unwrap(), last_uid); - } - } -} diff --git a/aero-collections/mail/unique_ident.rs b/aero-collections/mail/unique_ident.rs deleted file mode 100644 index 0e629db..0000000 --- a/aero-collections/mail/unique_ident.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::str::FromStr; -use std::sync::atomic::{AtomicU64, Ordering}; - -use lazy_static::lazy_static; -use rand::prelude::*; -use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; - -use crate::timestamp::now_msec; - -/// An internal Mail Identifier is composed of two components: -/// - a process identifier, 128 bits, itself composed of: -/// - the timestamp of when the process started, 64 bits -/// - a 64-bit random number -/// - a sequence number, 64 bits -/// They are not part of the protocol but an internal representation -/// required by Aerogramme. -/// Their main property is to be unique without having to rely -/// on synchronization between IMAP processes. -#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub struct UniqueIdent(pub [u8; 24]); - -struct IdentGenerator { - pid: u128, - sn: AtomicU64, -} - -impl IdentGenerator { - fn new() -> Self { - let time = now_msec() as u128; - let rand = thread_rng().gen::() as u128; - Self { - pid: (time << 64) | rand, - sn: AtomicU64::new(0), - } - } - - fn gen(&self) -> UniqueIdent { - let sn = self.sn.fetch_add(1, Ordering::Relaxed); - let mut res = [0u8; 24]; - res[0..16].copy_from_slice(&u128::to_be_bytes(self.pid)); - res[16..24].copy_from_slice(&u64::to_be_bytes(sn)); - UniqueIdent(res) - } -} - -lazy_static! { - static ref GENERATOR: IdentGenerator = IdentGenerator::new(); -} - -pub fn gen_ident() -> UniqueIdent { - GENERATOR.gen() -} - -// -- serde -- - -impl<'de> Deserialize<'de> for UniqueIdent { - fn deserialize(d: D) -> Result - where - D: Deserializer<'de>, - { - let v = String::deserialize(d)?; - UniqueIdent::from_str(&v).map_err(D::Error::custom) - } -} - -impl Serialize for UniqueIdent { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl std::fmt::Display for UniqueIdent { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -impl std::fmt::Debug for UniqueIdent { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -impl FromStr for UniqueIdent { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - let bytes = hex::decode(s).map_err(|_| "invalid hex")?; - - if bytes.len() != 24 { - return Err("bad length"); - } - - let mut tmp = [0u8; 24]; - tmp[..].copy_from_slice(&bytes); - Ok(UniqueIdent(tmp)) - } -} diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs new file mode 100644 index 0000000..19e3340 --- /dev/null +++ b/aero-collections/src/calendar/mod.rs @@ -0,0 +1 @@ +//@FIXME Event Index diff --git a/aero-collections/src/lib.rs b/aero-collections/src/lib.rs new file mode 100644 index 0000000..adcfc93 --- /dev/null +++ b/aero-collections/src/lib.rs @@ -0,0 +1,3 @@ +pub mod user; +pub mod mail; +pub mod calendar; diff --git a/aero-collections/src/mail/incoming.rs b/aero-collections/src/mail/incoming.rs new file mode 100644 index 0000000..8220461 --- /dev/null +++ b/aero-collections/src/mail/incoming.rs @@ -0,0 +1,443 @@ +use std::sync::{Arc, Weak}; +use std::time::Duration; + +use anyhow::{anyhow, bail, Result}; +use base64::Engine; +use futures::{future::BoxFuture, FutureExt}; +//use tokio::io::AsyncReadExt; +use tokio::sync::watch; +use tracing::{debug, error, info, warn}; + +use aero_user::cryptoblob; +use aero_user::login::{Credentials, PublicCredentials}; +use aero_user::storage; +use aero_bayou::timestamp::now_msec; + +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::*; +use crate::user::User; +use crate::mail::IMF; + +const INCOMING_PK: &str = "incoming"; +const INCOMING_LOCK_SK: &str = "lock"; +const INCOMING_WATCH_SK: &str = "watch"; + +const MESSAGE_KEY: &str = "message-key"; + +// When a lock is held, it is held for LOCK_DURATION (here 5 minutes) +// It is renewed every LOCK_DURATION/3 +// If we are at 2*LOCK_DURATION/3 and haven't renewed, we assume we +// lost the lock. +const LOCK_DURATION: Duration = Duration::from_secs(300); + +// In addition to checking when notified, also check for new mail every 10 minutes +const MAIL_CHECK_INTERVAL: Duration = Duration::from_secs(600); + +pub async fn incoming_mail_watch_process( + user: Weak, + creds: Credentials, + rx_inbox_id: watch::Receiver>, +) { + if let Err(e) = incoming_mail_watch_process_internal(user, creds, rx_inbox_id).await { + error!("Error in incoming mail watch process: {}", e); + } +} + +async fn incoming_mail_watch_process_internal( + user: Weak, + creds: Credentials, + mut rx_inbox_id: watch::Receiver>, +) -> Result<()> { + let mut lock_held = k2v_lock_loop( + creds.storage.build().await?, + storage::RowRef::new(INCOMING_PK, INCOMING_LOCK_SK), + ); + let storage = creds.storage.build().await?; + + let mut inbox: Option> = None; + let mut incoming_key = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); + + loop { + let maybe_updated_incoming_key = if *lock_held.borrow() { + debug!("incoming lock held"); + + let wait_new_mail = async { + loop { + match storage.row_poll(&incoming_key).await { + Ok(row_val) => break row_val.row_ref, + Err(e) => { + error!("Error in wait_new_mail: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + } + } + }; + + tokio::select! { + inc_k = wait_new_mail => Some(inc_k), + _ = tokio::time::sleep(MAIL_CHECK_INTERVAL) => Some(incoming_key.clone()), + _ = lock_held.changed() => None, + _ = rx_inbox_id.changed() => None, + } + } else { + debug!("incoming lock not held"); + tokio::select! { + _ = lock_held.changed() => None, + _ = rx_inbox_id.changed() => None, + } + }; + + let user = match Weak::upgrade(&user) { + Some(user) => user, + None => { + debug!("User no longer available, exiting incoming loop."); + break; + } + }; + debug!("User still available"); + + // If INBOX no longer is same mailbox, open new mailbox + let inbox_id = *rx_inbox_id.borrow(); + if let Some((id, uidvalidity)) = inbox_id { + if Some(id) != inbox.as_ref().map(|b| b.id) { + match user.open_mailbox_by_id(id, uidvalidity).await { + Ok(mb) => { + inbox = Some(mb); + } + Err(e) => { + inbox = None; + error!("Error when opening inbox ({}): {}", id, e); + tokio::time::sleep(Duration::from_secs(30)).await; + continue; + } + } + } + } + + // If we were able to open INBOX, and we have mail, + // fetch new mail + if let (Some(inbox), Some(updated_incoming_key)) = (&inbox, maybe_updated_incoming_key) { + match handle_incoming_mail(&user, &storage, inbox, &lock_held).await { + Ok(()) => { + incoming_key = updated_incoming_key; + } + Err(e) => { + error!("Could not fetch incoming mail: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + } + } + } + drop(rx_inbox_id); + Ok(()) +} + +async fn handle_incoming_mail( + user: &Arc, + storage: &storage::Store, + inbox: &Arc, + lock_held: &watch::Receiver, +) -> Result<()> { + let mails_res = storage.blob_list("incoming/").await?; + + for object in mails_res { + if !*lock_held.borrow() { + break; + } + let key = object.0; + if let Some(mail_id) = key.strip_prefix("incoming/") { + if let Ok(mail_id) = mail_id.parse::() { + move_incoming_message(user, storage, inbox, mail_id).await?; + } + } + } + + Ok(()) +} + +async fn move_incoming_message( + user: &Arc, + storage: &storage::Store, + inbox: &Arc, + id: UniqueIdent, +) -> Result<()> { + info!("Moving incoming message: {}", id); + + let object_key = format!("incoming/{}", id); + + // 1. Fetch message from S3 + let object = storage.blob_fetch(&storage::BlobRef(object_key)).await?; + + // 1.a decrypt message key from headers + //info!("Object metadata: {:?}", get_result.metadata); + let key_encrypted_b64 = object + .meta + .get(MESSAGE_KEY) + .ok_or(anyhow!("Missing key in metadata"))?; + let key_encrypted = base64::engine::general_purpose::STANDARD.decode(key_encrypted_b64)?; + let message_key = sodiumoxide::crypto::sealedbox::open( + &key_encrypted, + &user.creds.keys.public, + &user.creds.keys.secret, + ) + .map_err(|_| anyhow!("Cannot decrypt message key"))?; + let message_key = + cryptoblob::Key::from_slice(&message_key).ok_or(anyhow!("Invalid message key"))?; + + // 1.b retrieve message body + let obj_body = object.value; + let plain_mail = cryptoblob::open(&obj_body, &message_key) + .map_err(|_| anyhow!("Cannot decrypt email content"))?; + + // 2 parse mail and add to inbox + let msg = IMF::try_from(&plain_mail[..]).map_err(|_| anyhow!("Invalid email body"))?; + inbox + .append_from_s3(msg, id, object.blob_ref.clone(), message_key) + .await?; + + // 3 delete from incoming + storage.blob_rm(&object.blob_ref).await?; + + Ok(()) +} + +// ---- UTIL: K2V locking loop, use this to try to grab a lock using a K2V entry as a signal ---- + +fn k2v_lock_loop(storage: storage::Store, row_ref: storage::RowRef) -> watch::Receiver { + let (held_tx, held_rx) = watch::channel(false); + + tokio::spawn(k2v_lock_loop_internal(storage, row_ref, held_tx)); + + held_rx +} + +#[derive(Clone, Debug)] +enum LockState { + Unknown, + Empty, + Held(UniqueIdent, u64, storage::RowRef), +} + +async fn k2v_lock_loop_internal( + storage: storage::Store, + row_ref: storage::RowRef, + held_tx: watch::Sender, +) { + let (state_tx, mut state_rx) = watch::channel::(LockState::Unknown); + let mut state_rx_2 = state_rx.clone(); + + let our_pid = gen_ident(); + + // Loop 1: watch state of lock in K2V, save that in corresponding watch channel + let watch_lock_loop: BoxFuture> = async { + let mut ct = row_ref.clone(); + loop { + debug!("k2v watch lock loop iter: ct = {:?}", ct); + match storage.row_poll(&ct).await { + Err(e) => { + error!( + "Error in k2v wait value changed: {} ; assuming we no longer hold lock.", + e + ); + state_tx.send(LockState::Unknown)?; + tokio::time::sleep(Duration::from_secs(30)).await; + } + Ok(cv) => { + let mut lock_state = None; + for v in cv.value.iter() { + if let storage::Alternative::Value(vbytes) = v { + if vbytes.len() == 32 { + let ts = u64::from_be_bytes(vbytes[..8].try_into().unwrap()); + let pid = UniqueIdent(vbytes[8..].try_into().unwrap()); + if lock_state + .map(|(pid2, ts2)| ts > ts2 || (ts == ts2 && pid > pid2)) + .unwrap_or(true) + { + lock_state = Some((pid, ts)); + } + } + } + } + let new_ct = cv.row_ref; + + debug!( + "k2v watch lock loop: changed, old ct = {:?}, new ct = {:?}, v = {:?}", + ct, new_ct, lock_state + ); + state_tx.send( + lock_state + .map(|(pid, ts)| LockState::Held(pid, ts, new_ct.clone())) + .unwrap_or(LockState::Empty), + )?; + ct = new_ct; + } + } + } + } + .boxed(); + + // Loop 2: notify user whether we are holding the lock or not + let lock_notify_loop: BoxFuture> = async { + loop { + let now = now_msec(); + let held_with_expiration_time = match &*state_rx.borrow_and_update() { + LockState::Held(pid, ts, _ct) if *pid == our_pid => { + let expiration_time = *ts - (LOCK_DURATION / 3).as_millis() as u64; + if now < expiration_time { + Some(expiration_time) + } else { + None + } + } + _ => None, + }; + let held = held_with_expiration_time.is_some(); + if held != *held_tx.borrow() { + held_tx.send(held)?; + } + + let await_expired = async { + match held_with_expiration_time { + None => futures::future::pending().await, + Some(expiration_time) => { + tokio::time::sleep(Duration::from_millis(expiration_time - now)).await + } + }; + }; + + tokio::select!( + r = state_rx.changed() => { + r?; + } + _ = held_tx.closed() => bail!("held_tx closed, don't need to hold lock anymore"), + _ = await_expired => continue, + ); + } + } + .boxed(); + + // Loop 3: acquire lock when relevant + let take_lock_loop: BoxFuture> = async { + loop { + let now = now_msec(); + let state: LockState = state_rx_2.borrow_and_update().clone(); + let (acquire_at, ct) = match state { + LockState::Unknown => { + // If state of the lock is unknown, don't try to acquire + state_rx_2.changed().await?; + continue; + } + LockState::Empty => (now, None), + LockState::Held(pid, ts, ct) => { + if pid == our_pid { + (ts - (2 * LOCK_DURATION / 3).as_millis() as u64, Some(ct)) + } else { + (ts, Some(ct)) + } + } + }; + + // Wait until it is time to acquire lock + if acquire_at > now { + tokio::select!( + r = state_rx_2.changed() => { + // If lock state changed in the meantime, don't acquire and loop around + r?; + continue; + } + _ = tokio::time::sleep(Duration::from_millis(acquire_at - now)) => () + ); + } + + // Acquire lock + let mut lock = vec![0u8; 32]; + lock[..8].copy_from_slice(&u64::to_be_bytes( + now_msec() + LOCK_DURATION.as_millis() as u64, + )); + lock[8..].copy_from_slice(&our_pid.0); + let row = match ct { + Some(existing) => existing, + None => row_ref.clone(), + }; + if let Err(e) = storage + .row_insert(vec![storage::RowVal::new(row, lock)]) + .await + { + error!("Could not take lock: {}", e); + tokio::time::sleep(Duration::from_secs(30)).await; + } + + // Wait for new information to loop back + state_rx_2.changed().await?; + } + } + .boxed(); + + let _ = futures::try_join!(watch_lock_loop, lock_notify_loop, take_lock_loop); + + debug!("lock loop exited, releasing"); + + if !held_tx.is_closed() { + warn!("weird..."); + let _ = held_tx.send(false); + } + + // If lock is ours, release it + let release = match &*state_rx.borrow() { + LockState::Held(pid, _, ct) if *pid == our_pid => Some(ct.clone()), + _ => None, + }; + if let Some(ct) = release { + match storage.row_rm(&storage::Selector::Single(&ct)).await { + Err(e) => warn!("Unable to release lock {:?}: {}", ct, e), + Ok(_) => (), + }; + } +} + +// ---- LMTP SIDE: storing messages encrypted with user's pubkey ---- + +pub struct EncryptedMessage { + key: cryptoblob::Key, + encrypted_body: Vec, +} + +impl EncryptedMessage { + pub fn new(body: Vec) -> Result { + let key = cryptoblob::gen_key(); + let encrypted_body = cryptoblob::seal(&body, &key)?; + Ok(Self { + key, + encrypted_body, + }) + } + + pub async fn deliver_to(self: Arc, creds: PublicCredentials) -> Result<()> { + let storage = creds.storage.build().await?; + + // Get causality token of previous watch key + let query = storage::RowRef::new(INCOMING_PK, INCOMING_WATCH_SK); + let watch_ct = match storage.row_fetch(&storage::Selector::Single(&query)).await { + Err(_) => query, + Ok(cv) => cv.into_iter().next().map(|v| v.row_ref).unwrap_or(query), + }; + + // Write mail to encrypted storage + let encrypted_key = + sodiumoxide::crypto::sealedbox::seal(self.key.as_ref(), &creds.public_key); + let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_key); + + let blob_val = storage::BlobVal::new( + storage::BlobRef(format!("incoming/{}", gen_ident())), + self.encrypted_body.clone().into(), + ) + .with_meta(MESSAGE_KEY.to_string(), key_header); + storage.blob_insert(blob_val).await?; + + // Update watch key to signal new mail + let watch_val = storage::RowVal::new(watch_ct.clone(), gen_ident().0.to_vec()); + storage.row_insert(vec![watch_val]).await?; + Ok(()) + } +} diff --git a/aero-collections/src/mail/mailbox.rs b/aero-collections/src/mail/mailbox.rs new file mode 100644 index 0000000..a767678 --- /dev/null +++ b/aero-collections/src/mail/mailbox.rs @@ -0,0 +1,525 @@ +use anyhow::{anyhow, bail, Result}; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; + +use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; +use aero_user::login::Credentials; +use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; +use aero_bayou::Bayou; +use aero_bayou::timestamp::now_msec; + +use crate::mail::uidindex::*; +use crate::mail::unique_ident::*; +use crate::mail::IMF; + +pub struct Mailbox { + pub(super) id: UniqueIdent, + mbox: RwLock, +} + +impl Mailbox { + pub(crate) async fn open( + creds: &Credentials, + id: UniqueIdent, + min_uidvalidity: ImapUidvalidity, + ) -> Result { + let index_path = format!("index/{}", id); + let mail_path = format!("mail/{}", id); + + let mut uid_index = Bayou::::new(creds, index_path).await?; + uid_index.sync().await?; + + let uidvalidity = uid_index.state().uidvalidity; + if uidvalidity < min_uidvalidity { + uid_index + .push( + uid_index + .state() + .op_bump_uidvalidity(min_uidvalidity.get() - uidvalidity.get()), + ) + .await?; + } + + // @FIXME reporting through opentelemetry or some logs + // info on the "shape" of the mailbox would be welcomed + /* + dump(&uid_index); + */ + + let mbox = RwLock::new(MailboxInternal { + id, + encryption_key: creds.keys.master.clone(), + storage: creds.storage.build().await?, + uid_index, + mail_path, + }); + + Ok(Self { id, mbox }) + } + + /// Sync data with backing store + pub async fn force_sync(&self) -> Result<()> { + self.mbox.write().await.force_sync().await + } + + /// Sync data with backing store only if changes are detected + /// or last sync is too old + pub async fn opportunistic_sync(&self) -> Result<()> { + self.mbox.write().await.opportunistic_sync().await + } + + /// Block until a sync has been done (due to changes in the event log) + pub async fn notify(&self) -> std::sync::Weak { + self.mbox.read().await.notifier() + } + + // ---- Functions for reading the mailbox ---- + + /// Get a clone of the current UID Index of this mailbox + /// (cloning is cheap so don't hesitate to use this) + pub async fn current_uid_index(&self) -> UidIndex { + self.mbox.read().await.uid_index.state().clone() + } + + /// Fetch the metadata (headers + some more info) of the specified + /// mail IDs + pub async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { + self.mbox.read().await.fetch_meta(ids).await + } + + /// Fetch an entire e-mail + pub async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { + self.mbox.read().await.fetch_full(id, message_key).await + } + + pub async fn frozen(self: &std::sync::Arc) -> super::snapshot::FrozenMailbox { + super::snapshot::FrozenMailbox::new(self.clone()).await + } + + // ---- Functions for changing the mailbox ---- + + /// Add flags to message + pub async fn add_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { + self.mbox.write().await.add_flags(id, flags).await + } + + /// Delete flags from message + pub async fn del_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { + self.mbox.write().await.del_flags(id, flags).await + } + + /// Define the new flags for this message + pub async fn set_flags<'a>(&self, id: UniqueIdent, flags: &[Flag]) -> Result<()> { + self.mbox.write().await.set_flags(id, flags).await + } + + /// Insert an email into the mailbox + pub async fn append<'a>( + &self, + msg: IMF<'a>, + ident: Option, + flags: &[Flag], + ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { + self.mbox.write().await.append(msg, ident, flags).await + } + + /// Insert an email into the mailbox, copying it from an existing S3 object + pub async fn append_from_s3<'a>( + &self, + msg: IMF<'a>, + ident: UniqueIdent, + blob_ref: storage::BlobRef, + message_key: Key, + ) -> Result<()> { + self.mbox + .write() + .await + .append_from_s3(msg, ident, blob_ref, message_key) + .await + } + + /// Delete a message definitively from the mailbox + pub async fn delete<'a>(&self, id: UniqueIdent) -> Result<()> { + self.mbox.write().await.delete(id).await + } + + /// Copy an email from an other Mailbox to this mailbox + /// (use this when possible, as it allows for a certain number of storage optimizations) + pub async fn copy_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result { + if self.id == from.id { + bail!("Cannot copy into same mailbox"); + } + + let (mut selflock, fromlock); + if self.id < from.id { + selflock = self.mbox.write().await; + fromlock = from.mbox.write().await; + } else { + fromlock = from.mbox.write().await; + selflock = self.mbox.write().await; + }; + selflock.copy_from(&fromlock, uuid).await + } + + /// Move an email from an other Mailbox to this mailbox + /// (use this when possible, as it allows for a certain number of storage optimizations) + pub async fn move_from(&self, from: &Mailbox, uuid: UniqueIdent) -> Result<()> { + if self.id == from.id { + bail!("Cannot copy move same mailbox"); + } + + let (mut selflock, mut fromlock); + if self.id < from.id { + selflock = self.mbox.write().await; + fromlock = from.mbox.write().await; + } else { + fromlock = from.mbox.write().await; + selflock = self.mbox.write().await; + }; + selflock.move_from(&mut fromlock, uuid).await + } +} + +// ---- + +// Non standard but common flags: +// https://www.iana.org/assignments/imap-jmap-keywords/imap-jmap-keywords.xhtml +struct MailboxInternal { + // 2023-05-15 will probably be used later. + #[allow(dead_code)] + id: UniqueIdent, + mail_path: String, + encryption_key: Key, + storage: Store, + uid_index: Bayou, +} + +impl MailboxInternal { + async fn force_sync(&mut self) -> Result<()> { + self.uid_index.sync().await?; + Ok(()) + } + + async fn opportunistic_sync(&mut self) -> Result<()> { + self.uid_index.opportunistic_sync().await?; + Ok(()) + } + + fn notifier(&self) -> std::sync::Weak { + self.uid_index.notifier() + } + + // ---- Functions for reading the mailbox ---- + + async fn fetch_meta(&self, ids: &[UniqueIdent]) -> Result> { + let ids = ids.iter().map(|x| x.to_string()).collect::>(); + let ops = ids + .iter() + .map(|id| RowRef::new(self.mail_path.as_str(), id.as_str())) + .collect::>(); + let res_vec = self.storage.row_fetch(&Selector::List(ops)).await?; + + let mut meta_vec = vec![]; + for res in res_vec.into_iter() { + let mut meta_opt = None; + + // Resolve conflicts + for v in res.value.iter() { + match v { + storage::Alternative::Tombstone => (), + storage::Alternative::Value(v) => { + let meta = open_deserialize::(v, &self.encryption_key)?; + match meta_opt.as_mut() { + None => { + meta_opt = Some(meta); + } + Some(prevmeta) => { + prevmeta.try_merge(meta)?; + } + } + } + } + } + if let Some(meta) = meta_opt { + meta_vec.push(meta); + } else { + bail!("No valid meta value in k2v for {:?}", res.row_ref); + } + } + + Ok(meta_vec) + } + + async fn fetch_full(&self, id: UniqueIdent, message_key: &Key) -> Result> { + let obj_res = self + .storage + .blob_fetch(&BlobRef(format!("{}/{}", self.mail_path, id))) + .await?; + let body = obj_res.value; + cryptoblob::open(&body, message_key) + } + + // ---- Functions for changing the mailbox ---- + + async fn add_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { + let add_flag_op = self.uid_index.state().op_flag_add(ident, flags.to_vec()); + self.uid_index.push(add_flag_op).await + } + + async fn del_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { + let del_flag_op = self.uid_index.state().op_flag_del(ident, flags.to_vec()); + self.uid_index.push(del_flag_op).await + } + + async fn set_flags(&mut self, ident: UniqueIdent, flags: &[Flag]) -> Result<()> { + let set_flag_op = self.uid_index.state().op_flag_set(ident, flags.to_vec()); + self.uid_index.push(set_flag_op).await + } + + async fn append( + &mut self, + mail: IMF<'_>, + ident: Option, + flags: &[Flag], + ) -> Result<(ImapUidvalidity, ImapUid, ModSeq)> { + let ident = ident.unwrap_or_else(gen_ident); + let message_key = gen_key(); + + futures::try_join!( + async { + // Encrypt and save mail body + let message_blob = cryptoblob::seal(mail.raw, &message_key)?; + self.storage + .blob_insert(BlobVal::new( + BlobRef(format!("{}/{}", self.mail_path, ident)), + message_blob, + )) + .await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Save mail meta + let meta = MailMeta { + internaldate: now_msec(), + headers: mail.parsed.raw_headers.to_vec(), + message_key: message_key.clone(), + rfc822_size: mail.raw.len(), + }; + let meta_blob = seal_serialize(&meta, &self.encryption_key)?; + self.storage + .row_insert(vec![RowVal::new( + RowRef::new(&self.mail_path, &ident.to_string()), + meta_blob, + )]) + .await?; + Ok::<_, anyhow::Error>(()) + }, + self.uid_index.opportunistic_sync() + )?; + + // Add mail to Bayou mail index + let uid_state = self.uid_index.state(); + let add_mail_op = uid_state.op_mail_add(ident, flags.to_vec()); + + let uidvalidity = uid_state.uidvalidity; + let (uid, modseq) = match add_mail_op { + UidIndexOp::MailAdd(_, uid, modseq, _) => (uid, modseq), + _ => unreachable!(), + }; + + self.uid_index.push(add_mail_op).await?; + + Ok((uidvalidity, uid, modseq)) + } + + async fn append_from_s3<'a>( + &mut self, + mail: IMF<'a>, + ident: UniqueIdent, + blob_src: storage::BlobRef, + message_key: Key, + ) -> Result<()> { + futures::try_join!( + async { + // Copy mail body from previous location + let blob_dst = BlobRef(format!("{}/{}", self.mail_path, ident)); + self.storage.blob_copy(&blob_src, &blob_dst).await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Save mail meta + let meta = MailMeta { + internaldate: now_msec(), + headers: mail.parsed.raw_headers.to_vec(), + message_key: message_key.clone(), + rfc822_size: mail.raw.len(), + }; + let meta_blob = seal_serialize(&meta, &self.encryption_key)?; + self.storage + .row_insert(vec![RowVal::new( + RowRef::new(&self.mail_path, &ident.to_string()), + meta_blob, + )]) + .await?; + Ok::<_, anyhow::Error>(()) + }, + self.uid_index.opportunistic_sync() + )?; + + // Add mail to Bayou mail index + let add_mail_op = self.uid_index.state().op_mail_add(ident, vec![]); + self.uid_index.push(add_mail_op).await?; + + Ok(()) + } + + async fn delete(&mut self, ident: UniqueIdent) -> Result<()> { + if !self.uid_index.state().table.contains_key(&ident) { + bail!("Cannot delete mail that doesn't exit"); + } + + let del_mail_op = self.uid_index.state().op_mail_del(ident); + self.uid_index.push(del_mail_op).await?; + + futures::try_join!( + async { + // Delete mail body from S3 + self.storage + .blob_rm(&BlobRef(format!("{}/{}", self.mail_path, ident))) + .await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Delete mail meta from K2V + let sk = ident.to_string(); + let res = self + .storage + .row_fetch(&storage::Selector::Single(&RowRef::new( + &self.mail_path, + &sk, + ))) + .await?; + if let Some(row_val) = res.into_iter().next() { + self.storage + .row_rm(&storage::Selector::Single(&row_val.row_ref)) + .await?; + } + Ok::<_, anyhow::Error>(()) + } + )?; + Ok(()) + } + + async fn copy_from( + &mut self, + from: &MailboxInternal, + source_id: UniqueIdent, + ) -> Result { + let new_id = gen_ident(); + self.copy_internal(from, source_id, new_id).await?; + Ok(new_id) + } + + async fn move_from(&mut self, from: &mut MailboxInternal, id: UniqueIdent) -> Result<()> { + self.copy_internal(from, id, id).await?; + from.delete(id).await?; + Ok(()) + } + + async fn copy_internal( + &mut self, + from: &MailboxInternal, + source_id: UniqueIdent, + new_id: UniqueIdent, + ) -> Result<()> { + if self.encryption_key != from.encryption_key { + bail!("Message to be copied/moved does not belong to same account."); + } + + let flags = from + .uid_index + .state() + .table + .get(&source_id) + .ok_or(anyhow!("Source mail not found"))? + .2 + .clone(); + + futures::try_join!( + async { + let dst = BlobRef(format!("{}/{}", self.mail_path, new_id)); + let src = BlobRef(format!("{}/{}", from.mail_path, source_id)); + self.storage.blob_copy(&src, &dst).await?; + Ok::<_, anyhow::Error>(()) + }, + async { + // Copy mail meta in K2V + let meta = &from.fetch_meta(&[source_id]).await?[0]; + let meta_blob = seal_serialize(meta, &self.encryption_key)?; + self.storage + .row_insert(vec![RowVal::new( + RowRef::new(&self.mail_path, &new_id.to_string()), + meta_blob, + )]) + .await?; + Ok::<_, anyhow::Error>(()) + }, + self.uid_index.opportunistic_sync(), + )?; + + // Add mail to Bayou mail index + let add_mail_op = self.uid_index.state().op_mail_add(new_id, flags); + self.uid_index.push(add_mail_op).await?; + + Ok(()) + } +} + +// Can be useful to debug so we want this code +// to be available to developers +#[allow(dead_code)] +fn dump(uid_index: &Bayou) { + let s = uid_index.state(); + println!("---- MAILBOX STATE ----"); + println!("UIDVALIDITY {}", s.uidvalidity); + println!("UIDNEXT {}", s.uidnext); + println!("INTERNALSEQ {}", s.internalseq); + for (uid, ident) in s.idx_by_uid.iter() { + println!( + "{} {} {}", + uid, + hex::encode(ident.0), + s.table.get(ident).cloned().unwrap().2.join(", ") + ); + } + println!(); +} + +// ---- + +/// The metadata of a message that is stored in K2V +/// at pk = mail/, sk = +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MailMeta { + /// INTERNALDATE field (milliseconds since epoch) + pub internaldate: u64, + /// Headers of the message + pub headers: Vec, + /// Secret key for decrypting entire message + pub message_key: Key, + /// RFC822 size + pub rfc822_size: usize, +} + +impl MailMeta { + fn try_merge(&mut self, other: Self) -> Result<()> { + if self.headers != other.headers + || self.message_key != other.message_key + || self.rfc822_size != other.rfc822_size + { + bail!("Conflicting MailMeta values."); + } + self.internaldate = std::cmp::max(self.internaldate, other.internaldate); + Ok(()) + } +} diff --git a/aero-collections/src/mail/mod.rs b/aero-collections/src/mail/mod.rs new file mode 100644 index 0000000..85361f3 --- /dev/null +++ b/aero-collections/src/mail/mod.rs @@ -0,0 +1,25 @@ +pub mod incoming; +pub mod mailbox; +pub mod query; +pub mod snapshot; +pub mod uidindex; +pub mod unique_ident; +pub mod namespace; + +// Internet Message Format +// aka RFC 822 - RFC 2822 - RFC 5322 +// 2023-05-15 don't want to refactor this struct now. +#[allow(clippy::upper_case_acronyms)] +pub struct IMF<'a> { + raw: &'a [u8], + parsed: eml_codec::part::composite::Message<'a>, +} + +impl<'a> TryFrom<&'a [u8]> for IMF<'a> { + type Error = (); + + fn try_from(body: &'a [u8]) -> Result, ()> { + let parsed = eml_codec::parse_message(body).or(Err(()))?.1; + Ok(Self { raw: body, parsed }) + } +} diff --git a/aero-collections/src/mail/namespace.rs b/aero-collections/src/mail/namespace.rs new file mode 100644 index 0000000..452ac68 --- /dev/null +++ b/aero-collections/src/mail/namespace.rs @@ -0,0 +1,202 @@ +use std::collections::BTreeMap; + +use anyhow::{bail, Result}; +use serde::{Deserialize, Serialize}; + +use aero_bayou::timestamp::now_msec; + +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::{gen_ident, UniqueIdent}; + +pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; + +/// INBOX is the only mailbox that must always exist. +/// It is created automatically when the account is created. +/// IMAP allows the user to rename INBOX to something else, +/// in this case all messages from INBOX are moved to a mailbox +/// with the new name and the INBOX mailbox still exists and is empty. +/// In our implementation, we indeed move the underlying mailbox +/// to the new name (i.e. the new name has the same id as the previous +/// INBOX), and we create a new empty mailbox for INBOX. +pub const INBOX: &str = "INBOX"; + +/// For convenience purpose, we also create some special mailbox +/// that are described in RFC6154 SPECIAL-USE +/// @FIXME maybe it should be a configuration parameter +/// @FIXME maybe we should have a per-mailbox flag mechanism, either an enum or a string, so we +/// track which mailbox is used for what. +/// @FIXME Junk could be useful but we don't have any antispam solution yet so... +/// @FIXME IMAP supports virtual mailbox. \All or \Flagged are intended to be virtual mailboxes. +/// \Trash might be one, or not one. I don't know what we should do there. +pub const DRAFTS: &str = "Drafts"; +pub const ARCHIVE: &str = "Archive"; +pub const SENT: &str = "Sent"; +pub const TRASH: &str = "Trash"; + +pub(crate) const MAILBOX_LIST_PK: &str = "mailboxes"; +pub(crate) const MAILBOX_LIST_SK: &str = "list"; + +// ---- User's mailbox list (serialized in K2V) ---- + +#[derive(Serialize, Deserialize)] +pub(crate) struct MailboxList(BTreeMap); + +#[derive(Serialize, Deserialize, Clone, Copy, Debug)] +pub(crate) struct MailboxListEntry { + id_lww: (u64, Option), + uidvalidity: ImapUidvalidity, +} + +impl MailboxListEntry { + fn merge(&mut self, other: &Self) { + // Simple CRDT merge rule + if other.id_lww.0 > self.id_lww.0 + || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) + { + self.id_lww = other.id_lww; + } + self.uidvalidity = std::cmp::max(self.uidvalidity, other.uidvalidity); + } +} + +impl MailboxList { + pub(crate) fn new() -> Self { + Self(BTreeMap::new()) + } + + pub(crate) fn merge(&mut self, list2: Self) { + for (k, v) in list2.0.into_iter() { + if let Some(e) = self.0.get_mut(&k) { + e.merge(&v); + } else { + self.0.insert(k, v); + } + } + } + + pub(crate) fn existing_mailbox_names(&self) -> Vec { + self.0 + .iter() + .filter(|(_, v)| v.id_lww.1.is_some()) + .map(|(k, _)| k.to_string()) + .collect() + } + + pub(crate) fn has_mailbox(&self, name: &str) -> bool { + matches!( + self.0.get(name), + Some(MailboxListEntry { + id_lww: (_, Some(_)), + .. + }) + ) + } + + pub(crate) fn get_mailbox(&self, name: &str) -> Option<(ImapUidvalidity, Option)> { + self.0.get(name).map( + |MailboxListEntry { + id_lww: (_, mailbox_id), + uidvalidity, + }| (*uidvalidity, *mailbox_id), + ) + } + + /// Ensures mailbox `name` maps to id `id`. + /// If it already mapped to that, returns None. + /// If a change had to be done, returns Some(new uidvalidity in mailbox). + pub(crate) fn set_mailbox(&mut self, name: &str, id: Option) -> Option { + let (ts, id, uidvalidity) = match self.0.get_mut(name) { + None => { + if id.is_none() { + return None; + } else { + (now_msec(), id, ImapUidvalidity::new(1).unwrap()) + } + } + Some(MailboxListEntry { + id_lww, + uidvalidity, + }) => { + if id_lww.1 == id { + return None; + } else { + ( + std::cmp::max(id_lww.0 + 1, now_msec()), + id, + ImapUidvalidity::new(uidvalidity.get() + 1).unwrap(), + ) + } + } + }; + + self.0.insert( + name.into(), + MailboxListEntry { + id_lww: (ts, id), + uidvalidity, + }, + ); + Some(uidvalidity) + } + + pub(crate) fn update_uidvalidity(&mut self, name: &str, new_uidvalidity: ImapUidvalidity) { + match self.0.get_mut(name) { + None => { + self.0.insert( + name.into(), + MailboxListEntry { + id_lww: (now_msec(), None), + uidvalidity: new_uidvalidity, + }, + ); + } + Some(MailboxListEntry { uidvalidity, .. }) => { + *uidvalidity = std::cmp::max(*uidvalidity, new_uidvalidity); + } + } + } + + pub(crate) fn create_mailbox(&mut self, name: &str) -> CreatedMailbox { + if let Some(MailboxListEntry { + id_lww: (_, Some(id)), + uidvalidity, + }) = self.0.get(name) + { + return CreatedMailbox::Existed(*id, *uidvalidity); + } + + let id = gen_ident(); + let uidvalidity = self.set_mailbox(name, Some(id)).unwrap(); + CreatedMailbox::Created(id, uidvalidity) + } + + pub(crate) fn rename_mailbox(&mut self, old_name: &str, new_name: &str) -> Result<()> { + if let Some((uidvalidity, Some(mbid))) = self.get_mailbox(old_name) { + if self.has_mailbox(new_name) { + bail!( + "Cannot rename {} into {}: {} already exists", + old_name, + new_name, + new_name + ); + } + + self.set_mailbox(old_name, None); + self.set_mailbox(new_name, Some(mbid)); + self.update_uidvalidity(new_name, uidvalidity); + Ok(()) + } else { + bail!( + "Cannot rename {} into {}: {} doesn't exist", + old_name, + new_name, + old_name + ); + } + } +} + +pub(crate) enum CreatedMailbox { + Created(UniqueIdent, ImapUidvalidity), + Existed(UniqueIdent, ImapUidvalidity), +} diff --git a/aero-collections/src/mail/query.rs b/aero-collections/src/mail/query.rs new file mode 100644 index 0000000..3e6fe99 --- /dev/null +++ b/aero-collections/src/mail/query.rs @@ -0,0 +1,137 @@ +use super::mailbox::MailMeta; +use super::snapshot::FrozenMailbox; +use super::unique_ident::UniqueIdent; +use anyhow::Result; +use futures::future::FutureExt; +use futures::stream::{BoxStream, Stream, StreamExt}; + +/// Query is in charge of fetching efficiently +/// requested data for a list of emails +pub struct Query<'a, 'b> { + pub frozen: &'a FrozenMailbox, + pub emails: &'b [UniqueIdent], + pub scope: QueryScope, +} + +#[derive(Debug)] +pub enum QueryScope { + Index, + Partial, + Full, +} +impl QueryScope { + pub fn union(&self, other: &QueryScope) -> QueryScope { + match (self, other) { + (QueryScope::Full, _) | (_, QueryScope::Full) => QueryScope::Full, + (QueryScope::Partial, _) | (_, QueryScope::Partial) => QueryScope::Partial, + (QueryScope::Index, QueryScope::Index) => QueryScope::Index, + } + } +} + +//type QueryResultStream = Box>>; + +impl<'a, 'b> Query<'a, 'b> { + pub fn fetch(&self) -> BoxStream> { + match self.scope { + QueryScope::Index => Box::pin( + futures::stream::iter(self.emails) + .map(|&uuid| Ok(QueryResult::IndexResult { uuid })), + ), + QueryScope::Partial => Box::pin(self.partial()), + QueryScope::Full => Box::pin(self.full()), + } + } + + // --- functions below are private *for reasons* + fn partial<'d>(&'d self) -> impl Stream> + 'd + Send { + async move { + let maybe_meta_list: Result> = + self.frozen.mailbox.fetch_meta(self.emails).await; + let list_res = maybe_meta_list + .map(|meta_list| { + meta_list + .into_iter() + .zip(self.emails) + .map(|(metadata, &uuid)| Ok(QueryResult::PartialResult { uuid, metadata })) + .collect() + }) + .unwrap_or_else(|e| vec![Err(e)]); + + futures::stream::iter(list_res) + } + .flatten_stream() + } + + fn full<'d>(&'d self) -> impl Stream> + 'd + Send { + self.partial().then(move |maybe_meta| async move { + let meta = maybe_meta?; + + let content = self + .frozen + .mailbox + .fetch_full( + *meta.uuid(), + &meta + .metadata() + .expect("meta to be PartialResult") + .message_key, + ) + .await?; + + Ok(meta.into_full(content).expect("meta to be PartialResult")) + }) + } +} + +#[derive(Debug, Clone)] +pub enum QueryResult { + IndexResult { + uuid: UniqueIdent, + }, + PartialResult { + uuid: UniqueIdent, + metadata: MailMeta, + }, + FullResult { + uuid: UniqueIdent, + metadata: MailMeta, + content: Vec, + }, +} +impl QueryResult { + pub fn uuid(&self) -> &UniqueIdent { + match self { + Self::IndexResult { uuid, .. } => uuid, + Self::PartialResult { uuid, .. } => uuid, + Self::FullResult { uuid, .. } => uuid, + } + } + + pub fn metadata(&self) -> Option<&MailMeta> { + match self { + Self::IndexResult { .. } => None, + Self::PartialResult { metadata, .. } => Some(metadata), + Self::FullResult { metadata, .. } => Some(metadata), + } + } + + #[allow(dead_code)] + pub fn content(&self) -> Option<&[u8]> { + match self { + Self::FullResult { content, .. } => Some(content), + _ => None, + } + } + + fn into_full(self, content: Vec) -> Option { + match self { + Self::PartialResult { uuid, metadata } => Some(Self::FullResult { + uuid, + metadata, + content, + }), + _ => None, + } + } +} diff --git a/aero-collections/src/mail/snapshot.rs b/aero-collections/src/mail/snapshot.rs new file mode 100644 index 0000000..ed756b5 --- /dev/null +++ b/aero-collections/src/mail/snapshot.rs @@ -0,0 +1,60 @@ +use std::sync::Arc; + +use anyhow::Result; + +use super::mailbox::Mailbox; +use super::query::{Query, QueryScope}; +use super::uidindex::UidIndex; +use super::unique_ident::UniqueIdent; + +/// A Frozen Mailbox has a snapshot of the current mailbox +/// state that is desynchronized with the real mailbox state. +/// It's up to the user to choose when their snapshot must be updated +/// to give useful information to their clients +pub struct FrozenMailbox { + pub mailbox: Arc, + pub snapshot: UidIndex, +} + +impl FrozenMailbox { + /// Create a snapshot from a mailbox, the mailbox + the snapshot + /// becomes the "Frozen Mailbox". + pub async fn new(mailbox: Arc) -> Self { + let state = mailbox.current_uid_index().await; + + Self { + mailbox, + snapshot: state, + } + } + + /// Force the synchronization of the inner mailbox + /// but do not update the local snapshot + pub async fn sync(&self) -> Result<()> { + self.mailbox.opportunistic_sync().await + } + + /// Peek snapshot without updating the frozen mailbox + /// Can be useful if you want to plan some writes + /// while sending a diff to the client later + pub async fn peek(&self) -> UidIndex { + self.mailbox.current_uid_index().await + } + + /// Update the FrozenMailbox local snapshot. + /// Returns the old snapshot, so you can build a diff + pub async fn update(&mut self) -> UidIndex { + let old_snapshot = self.snapshot.clone(); + self.snapshot = self.mailbox.current_uid_index().await; + + old_snapshot + } + + pub fn query<'a, 'b>(&'a self, uuids: &'b [UniqueIdent], scope: QueryScope) -> Query<'a, 'b> { + Query { + frozen: self, + emails: uuids, + scope, + } + } +} diff --git a/aero-collections/src/mail/uidindex.rs b/aero-collections/src/mail/uidindex.rs new file mode 100644 index 0000000..637a1ac --- /dev/null +++ b/aero-collections/src/mail/uidindex.rs @@ -0,0 +1,474 @@ +use std::num::{NonZeroU32, NonZeroU64}; + +use im::{HashMap, OrdMap, OrdSet}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +use aero_bayou::*; +use crate::mail::unique_ident::UniqueIdent; + +pub type ModSeq = NonZeroU64; +pub type ImapUid = NonZeroU32; +pub type ImapUidvalidity = NonZeroU32; +pub type Flag = String; +pub type IndexEntry = (ImapUid, ModSeq, Vec); + +/// A UidIndex handles the mutable part of a mailbox +/// It is built by running the event log on it +/// Each applied log generates a new UidIndex by cloning the previous one +/// and applying the event. This is why we use immutable datastructures: +/// they are cheap to clone. +#[derive(Clone)] +pub struct UidIndex { + // Source of trust + pub table: OrdMap, + + // Indexes optimized for queries + pub idx_by_uid: OrdMap, + pub idx_by_modseq: OrdMap, + pub idx_by_flag: FlagIndex, + + // "Public" Counters + pub uidvalidity: ImapUidvalidity, + pub uidnext: ImapUid, + pub highestmodseq: ModSeq, + + // "Internal" Counters + pub internalseq: ImapUid, + pub internalmodseq: ModSeq, +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub enum UidIndexOp { + MailAdd(UniqueIdent, ImapUid, ModSeq, Vec), + MailDel(UniqueIdent), + FlagAdd(UniqueIdent, ModSeq, Vec), + FlagDel(UniqueIdent, ModSeq, Vec), + FlagSet(UniqueIdent, ModSeq, Vec), + BumpUidvalidity(u32), +} + +impl UidIndex { + #[must_use] + pub fn op_mail_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::MailAdd(ident, self.internalseq, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_mail_del(&self, ident: UniqueIdent) -> UidIndexOp { + UidIndexOp::MailDel(ident) + } + + #[must_use] + pub fn op_flag_add(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::FlagAdd(ident, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_flag_del(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::FlagDel(ident, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_flag_set(&self, ident: UniqueIdent, flags: Vec) -> UidIndexOp { + UidIndexOp::FlagSet(ident, self.internalmodseq, flags) + } + + #[must_use] + pub fn op_bump_uidvalidity(&self, count: u32) -> UidIndexOp { + UidIndexOp::BumpUidvalidity(count) + } + + // INTERNAL functions to keep state consistent + + fn reg_email(&mut self, ident: UniqueIdent, uid: ImapUid, modseq: ModSeq, flags: &[Flag]) { + // Insert the email in our table + self.table.insert(ident, (uid, modseq, flags.to_owned())); + + // Update the indexes/caches + self.idx_by_uid.insert(uid, ident); + self.idx_by_flag.insert(uid, flags); + self.idx_by_modseq.insert(modseq, ident); + } + + fn unreg_email(&mut self, ident: &UniqueIdent) { + // We do nothing if the mail does not exist + let (uid, modseq, flags) = match self.table.get(ident) { + Some(v) => v, + None => return, + }; + + // Delete all cache entries + self.idx_by_uid.remove(uid); + self.idx_by_flag.remove(*uid, flags); + self.idx_by_modseq.remove(modseq); + + // Remove from source of trust + self.table.remove(ident); + } +} + +impl Default for UidIndex { + fn default() -> Self { + Self { + table: OrdMap::new(), + + idx_by_uid: OrdMap::new(), + idx_by_modseq: OrdMap::new(), + idx_by_flag: FlagIndex::new(), + + uidvalidity: NonZeroU32::new(1).unwrap(), + uidnext: NonZeroU32::new(1).unwrap(), + highestmodseq: NonZeroU64::new(1).unwrap(), + + internalseq: NonZeroU32::new(1).unwrap(), + internalmodseq: NonZeroU64::new(1).unwrap(), + } + } +} + +impl BayouState for UidIndex { + type Op = UidIndexOp; + + fn apply(&self, op: &UidIndexOp) -> Self { + let mut new = self.clone(); + match op { + UidIndexOp::MailAdd(ident, uid, modseq, flags) => { + // Change UIDValidity if there is a UID conflict or a MODSEQ conflict + // @FIXME Need to prove that summing work + // The intuition: we increase the UIDValidity by the number of possible conflicts + if *uid < new.internalseq || *modseq < new.internalmodseq { + let bump_uid = new.internalseq.get() - uid.get(); + let bump_modseq = (new.internalmodseq.get() - modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_uid + bump_modseq).unwrap(); + } + + // Assign the real uid of the email + let new_uid = new.internalseq; + + // Assign the real modseq of the email and its new flags + let new_modseq = new.internalmodseq; + + // Delete the previous entry if any. + // Our proof has no assumption on `ident` uniqueness, + // so we must handle this case even it is very unlikely + // In this case, we overwrite the email. + // Note: assigning a new UID is mandatory. + new.unreg_email(ident); + + // We record our email and update ou caches + new.reg_email(*ident, new_uid, new_modseq, flags); + + // Update counters + new.highestmodseq = new.internalmodseq; + + new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + + new.uidnext = new.internalseq; + } + UidIndexOp::MailDel(ident) => { + // If the email is known locally, we remove its references in all our indexes + new.unreg_email(ident); + + // We update the counter + new.internalseq = NonZeroU32::new(new.internalseq.get() + 1).unwrap(); + } + UidIndexOp::FlagAdd(ident, candidate_modseq, new_flags) => { + if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { + // Bump UIDValidity if required + if *candidate_modseq < new.internalmodseq { + let bump_modseq = + (new.internalmodseq.get() - candidate_modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); + } + + // Add flags to the source of trust and the cache + let mut to_add: Vec = new_flags + .iter() + .filter(|f| !existing_flags.contains(f)) + .cloned() + .collect(); + new.idx_by_flag.insert(*uid, &to_add); + *email_modseq = new.internalmodseq; + new.idx_by_modseq.insert(new.internalmodseq, *ident); + existing_flags.append(&mut to_add); + + // Update counters + new.highestmodseq = new.internalmodseq; + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + } + } + UidIndexOp::FlagDel(ident, candidate_modseq, rm_flags) => { + if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { + // Bump UIDValidity if required + if *candidate_modseq < new.internalmodseq { + let bump_modseq = + (new.internalmodseq.get() - candidate_modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); + } + + // Remove flags from the source of trust and the cache + existing_flags.retain(|x| !rm_flags.contains(x)); + new.idx_by_flag.remove(*uid, rm_flags); + + // Register that email has been modified + new.idx_by_modseq.insert(new.internalmodseq, *ident); + *email_modseq = new.internalmodseq; + + // Update counters + new.highestmodseq = new.internalmodseq; + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + } + } + UidIndexOp::FlagSet(ident, candidate_modseq, new_flags) => { + if let Some((uid, email_modseq, existing_flags)) = new.table.get_mut(ident) { + // Bump UIDValidity if required + if *candidate_modseq < new.internalmodseq { + let bump_modseq = + (new.internalmodseq.get() - candidate_modseq.get()) as u32; + new.uidvalidity = + NonZeroU32::new(new.uidvalidity.get() + bump_modseq).unwrap(); + } + + // Remove flags from the source of trust and the cache + let (keep_flags, rm_flags): (Vec, Vec) = existing_flags + .iter() + .cloned() + .partition(|x| new_flags.contains(x)); + *existing_flags = keep_flags; + let mut to_add: Vec = new_flags + .iter() + .filter(|f| !existing_flags.contains(f)) + .cloned() + .collect(); + existing_flags.append(&mut to_add); + new.idx_by_flag.remove(*uid, &rm_flags); + new.idx_by_flag.insert(*uid, &to_add); + + // Register that email has been modified + new.idx_by_modseq.insert(new.internalmodseq, *ident); + *email_modseq = new.internalmodseq; + + // Update counters + new.highestmodseq = new.internalmodseq; + new.internalmodseq = NonZeroU64::new(new.internalmodseq.get() + 1).unwrap(); + } + } + UidIndexOp::BumpUidvalidity(count) => { + new.uidvalidity = ImapUidvalidity::new(new.uidvalidity.get() + *count) + .unwrap_or(ImapUidvalidity::new(u32::MAX).unwrap()); + } + } + new + } +} + +// ---- FlagIndex implementation ---- + +#[derive(Clone)] +pub struct FlagIndex(HashMap>); +pub type FlagIter<'a> = im::hashmap::Keys<'a, Flag, OrdSet>; + +impl FlagIndex { + fn new() -> Self { + Self(HashMap::new()) + } + fn insert(&mut self, uid: ImapUid, flags: &[Flag]) { + flags.iter().for_each(|flag| { + self.0 + .entry(flag.clone()) + .or_insert(OrdSet::new()) + .insert(uid); + }); + } + fn remove(&mut self, uid: ImapUid, flags: &[Flag]) { + for flag in flags.iter() { + if let Some(set) = self.0.get_mut(flag) { + set.remove(&uid); + if set.is_empty() { + self.0.remove(flag); + } + } + } + } + + pub fn get(&self, f: &Flag) -> Option<&OrdSet> { + self.0.get(f) + } + + pub fn flags(&self) -> FlagIter { + self.0.keys() + } +} + +// ---- CUSTOM SERIALIZATION AND DESERIALIZATION ---- + +#[derive(Serialize, Deserialize)] +struct UidIndexSerializedRepr { + mails: Vec<(ImapUid, ModSeq, UniqueIdent, Vec)>, + + uidvalidity: ImapUidvalidity, + uidnext: ImapUid, + highestmodseq: ModSeq, + + internalseq: ImapUid, + internalmodseq: ModSeq, +} + +impl<'de> Deserialize<'de> for UidIndex { + fn deserialize(d: D) -> Result + where + D: Deserializer<'de>, + { + let val: UidIndexSerializedRepr = UidIndexSerializedRepr::deserialize(d)?; + + let mut uidindex = UidIndex { + table: OrdMap::new(), + + idx_by_uid: OrdMap::new(), + idx_by_modseq: OrdMap::new(), + idx_by_flag: FlagIndex::new(), + + uidvalidity: val.uidvalidity, + uidnext: val.uidnext, + highestmodseq: val.highestmodseq, + + internalseq: val.internalseq, + internalmodseq: val.internalmodseq, + }; + + val.mails + .iter() + .for_each(|(uid, modseq, uuid, flags)| uidindex.reg_email(*uuid, *uid, *modseq, flags)); + + Ok(uidindex) + } +} + +impl Serialize for UidIndex { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut mails = vec![]; + for (ident, (uid, modseq, flags)) in self.table.iter() { + mails.push((*uid, *modseq, *ident, flags.clone())); + } + + let val = UidIndexSerializedRepr { + mails, + uidvalidity: self.uidvalidity, + uidnext: self.uidnext, + highestmodseq: self.highestmodseq, + internalseq: self.internalseq, + internalmodseq: self.internalmodseq, + }; + + val.serialize(serializer) + } +} + +// ---- TESTS ---- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_uidindex() { + let mut state = UidIndex::default(); + + // Add message 1 + { + let m = UniqueIdent([0x01; 24]); + let f = vec!["\\Recent".to_string(), "\\Archive".to_string()]; + let ev = state.op_mail_add(m, f); + state = state.apply(&ev); + + // Early checks + assert_eq!(state.table.len(), 1); + let (uid, modseq, flags) = state.table.get(&m).unwrap(); + assert_eq!(*uid, NonZeroU32::new(1).unwrap()); + assert_eq!(*modseq, NonZeroU64::new(1).unwrap()); + assert_eq!(flags.len(), 2); + let ident = state.idx_by_uid.get(&NonZeroU32::new(1).unwrap()).unwrap(); + assert_eq!(&m, ident); + let recent = state.idx_by_flag.0.get("\\Recent").unwrap(); + assert_eq!(recent.len(), 1); + assert_eq!(recent.iter().next().unwrap(), &NonZeroU32::new(1).unwrap()); + assert_eq!(state.uidnext, NonZeroU32::new(2).unwrap()); + assert_eq!(state.uidvalidity, NonZeroU32::new(1).unwrap()); + } + + // Add message 2 + { + let m = UniqueIdent([0x02; 24]); + let f = vec!["\\Seen".to_string(), "\\Archive".to_string()]; + let ev = state.op_mail_add(m, f); + state = state.apply(&ev); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 2); + } + + // Add flags to message 1 + { + let m = UniqueIdent([0x01; 24]); + let f = vec!["Important".to_string(), "$cl_1".to_string()]; + let ev = state.op_flag_add(m, f); + state = state.apply(&ev); + } + + // Delete flags from message 1 + { + let m = UniqueIdent([0x01; 24]); + let f = vec!["\\Recent".to_string()]; + let ev = state.op_flag_del(m, f); + state = state.apply(&ev); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 2); + } + + // Delete message 2 + { + let m = UniqueIdent([0x02; 24]); + let ev = state.op_mail_del(m); + state = state.apply(&ev); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 1); + } + + // Add a message 3 concurrent to message 1 (trigger a uid validity change) + { + let m = UniqueIdent([0x03; 24]); + let f = vec!["\\Archive".to_string(), "\\Recent".to_string()]; + let ev = UidIndexOp::MailAdd( + m, + NonZeroU32::new(1).unwrap(), + NonZeroU64::new(1).unwrap(), + f, + ); + state = state.apply(&ev); + } + + // Checks + { + assert_eq!(state.table.len(), 2); + assert!(state.uidvalidity > NonZeroU32::new(1).unwrap()); + + let (last_uid, ident) = state.idx_by_uid.get_max().unwrap(); + assert_eq!(ident, &UniqueIdent([0x03; 24])); + + let archive = state.idx_by_flag.0.get("\\Archive").unwrap(); + assert_eq!(archive.len(), 2); + let mut iter = archive.iter(); + assert_eq!(iter.next().unwrap(), &NonZeroU32::new(1).unwrap()); + assert_eq!(iter.next().unwrap(), last_uid); + } + } +} diff --git a/aero-collections/src/mail/unique_ident.rs b/aero-collections/src/mail/unique_ident.rs new file mode 100644 index 0000000..0987a2c --- /dev/null +++ b/aero-collections/src/mail/unique_ident.rs @@ -0,0 +1,101 @@ +use std::str::FromStr; +use std::sync::atomic::{AtomicU64, Ordering}; + +use lazy_static::lazy_static; +use rand::prelude::*; +use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; + +use aero_bayou::timestamp::now_msec; + +/// An internal Mail Identifier is composed of two components: +/// - a process identifier, 128 bits, itself composed of: +/// - the timestamp of when the process started, 64 bits +/// - a 64-bit random number +/// - a sequence number, 64 bits +/// They are not part of the protocol but an internal representation +/// required by Aerogramme. +/// Their main property is to be unique without having to rely +/// on synchronization between IMAP processes. +#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)] +pub struct UniqueIdent(pub [u8; 24]); + +struct IdentGenerator { + pid: u128, + sn: AtomicU64, +} + +impl IdentGenerator { + fn new() -> Self { + let time = now_msec() as u128; + let rand = thread_rng().gen::() as u128; + Self { + pid: (time << 64) | rand, + sn: AtomicU64::new(0), + } + } + + fn gen(&self) -> UniqueIdent { + let sn = self.sn.fetch_add(1, Ordering::Relaxed); + let mut res = [0u8; 24]; + res[0..16].copy_from_slice(&u128::to_be_bytes(self.pid)); + res[16..24].copy_from_slice(&u64::to_be_bytes(sn)); + UniqueIdent(res) + } +} + +lazy_static! { + static ref GENERATOR: IdentGenerator = IdentGenerator::new(); +} + +pub fn gen_ident() -> UniqueIdent { + GENERATOR.gen() +} + +// -- serde -- + +impl<'de> Deserialize<'de> for UniqueIdent { + fn deserialize(d: D) -> Result + where + D: Deserializer<'de>, + { + let v = String::deserialize(d)?; + UniqueIdent::from_str(&v).map_err(D::Error::custom) + } +} + +impl Serialize for UniqueIdent { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl std::fmt::Display for UniqueIdent { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl std::fmt::Debug for UniqueIdent { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl FromStr for UniqueIdent { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + let bytes = hex::decode(s).map_err(|_| "invalid hex")?; + + if bytes.len() != 24 { + return Err("bad length"); + } + + let mut tmp = [0u8; 24]; + tmp[..].copy_from_slice(&bytes); + Ok(UniqueIdent(tmp)) + } +} diff --git a/aero-collections/src/user.rs b/aero-collections/src/user.rs new file mode 100644 index 0000000..193ce90 --- /dev/null +++ b/aero-collections/src/user.rs @@ -0,0 +1,311 @@ +use std::collections::HashMap; +use std::sync::{Arc, Weak}; + +use anyhow::{anyhow, bail, Result}; +use lazy_static::lazy_static; +use tokio::sync::watch; + +use aero_user::cryptoblob::{open_deserialize, seal_serialize}; +use aero_user::login::Credentials; +use aero_user::storage; + +use crate::mail::incoming::incoming_mail_watch_process; +use crate::mail::mailbox::Mailbox; +use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::unique_ident::UniqueIdent; +use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; + +//@FIXME User should be totally rewriten +//to extract the local mailbox list +//to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) + +pub struct User { + pub username: String, + pub creds: Credentials, + pub storage: storage::Store, + pub mailboxes: std::sync::Mutex>>, + + tx_inbox_id: watch::Sender>, +} + +impl User { + pub async fn new(username: String, creds: Credentials) -> Result> { + let cache_key = (username.clone(), creds.storage.unique()); + + { + let cache = USER_CACHE.lock().unwrap(); + if let Some(u) = cache.get(&cache_key).and_then(Weak::upgrade) { + return Ok(u); + } + } + + let user = Self::open(username, creds).await?; + + let mut cache = USER_CACHE.lock().unwrap(); + if let Some(concurrent_user) = cache.get(&cache_key).and_then(Weak::upgrade) { + drop(user); + Ok(concurrent_user) + } else { + cache.insert(cache_key, Arc::downgrade(&user)); + Ok(user) + } + } + + /// Lists user's available mailboxes + pub async fn list_mailboxes(&self) -> Result> { + let (list, _ct) = self.load_mailbox_list().await?; + Ok(list.existing_mailbox_names()) + } + + /// Opens an existing mailbox given its IMAP name. + pub async fn open_mailbox(&self, name: &str) -> Result>> { + let (mut list, ct) = self.load_mailbox_list().await?; + + //@FIXME it could be a trace or an opentelemtry trace thing. + // Be careful to not leak sensible data + /* + eprintln!("List of mailboxes:"); + for ent in list.0.iter() { + eprintln!(" - {:?}", ent); + } + */ + + if let Some((uidvalidity, Some(mbid))) = list.get_mailbox(name) { + let mb = self.open_mailbox_by_id(mbid, uidvalidity).await?; + let mb_uidvalidity = mb.current_uid_index().await.uidvalidity; + if mb_uidvalidity > uidvalidity { + list.update_uidvalidity(name, mb_uidvalidity); + self.save_mailbox_list(&list, ct).await?; + } + Ok(Some(mb)) + } else { + Ok(None) + } + } + + /// Check whether mailbox exists + pub async fn has_mailbox(&self, name: &str) -> Result { + let (list, _ct) = self.load_mailbox_list().await?; + Ok(list.has_mailbox(name)) + } + + /// Creates a new mailbox in the user's IMAP namespace. + pub async fn create_mailbox(&self, name: &str) -> Result<()> { + if name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", name); + } + + let (mut list, ct) = self.load_mailbox_list().await?; + match list.create_mailbox(name) { + CreatedMailbox::Created(_, _) => { + self.save_mailbox_list(&list, ct).await?; + Ok(()) + } + CreatedMailbox::Existed(_, _) => Err(anyhow!("Mailbox {} already exists", name)), + } + } + + /// Deletes a mailbox in the user's IMAP namespace. + pub async fn delete_mailbox(&self, name: &str) -> Result<()> { + if name == INBOX { + bail!("Cannot delete INBOX"); + } + + let (mut list, ct) = self.load_mailbox_list().await?; + if list.has_mailbox(name) { + //@TODO: actually delete mailbox contents + list.set_mailbox(name, None); + self.save_mailbox_list(&list, ct).await?; + Ok(()) + } else { + bail!("Mailbox {} does not exist", name); + } + } + + /// Renames a mailbox in the user's IMAP namespace. + pub async fn rename_mailbox(&self, old_name: &str, new_name: &str) -> Result<()> { + let (mut list, ct) = self.load_mailbox_list().await?; + + if old_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", old_name); + } + if new_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { + bail!("Invalid mailbox name: {}", new_name); + } + + if old_name == INBOX { + list.rename_mailbox(old_name, new_name)?; + if !self.ensure_inbox_exists(&mut list, &ct).await? { + self.save_mailbox_list(&list, ct).await?; + } + } else { + let names = list.existing_mailbox_names(); + + let old_name_w_delim = format!("{}{}", old_name, MAILBOX_HIERARCHY_DELIMITER); + let new_name_w_delim = format!("{}{}", new_name, MAILBOX_HIERARCHY_DELIMITER); + + if names + .iter() + .any(|x| x == new_name || x.starts_with(&new_name_w_delim)) + { + bail!("Mailbox {} already exists", new_name); + } + + for name in names.iter() { + if name == old_name { + list.rename_mailbox(name, new_name)?; + } else if let Some(tail) = name.strip_prefix(&old_name_w_delim) { + let nnew = format!("{}{}", new_name_w_delim, tail); + list.rename_mailbox(name, &nnew)?; + } + } + + self.save_mailbox_list(&list, ct).await?; + } + Ok(()) + } + + // ---- Internal user & mailbox management ---- + + async fn open(username: String, creds: Credentials) -> Result> { + let storage = creds.storage.build().await?; + + let (tx_inbox_id, rx_inbox_id) = watch::channel(None); + + let user = Arc::new(Self { + username, + creds: creds.clone(), + storage, + tx_inbox_id, + mailboxes: std::sync::Mutex::new(HashMap::new()), + }); + + // Ensure INBOX exists (done inside load_mailbox_list) + user.load_mailbox_list().await?; + + tokio::spawn(incoming_mail_watch_process( + Arc::downgrade(&user), + user.creds.clone(), + rx_inbox_id, + )); + + Ok(user) + } + + pub(super) async fn open_mailbox_by_id( + &self, + id: UniqueIdent, + min_uidvalidity: ImapUidvalidity, + ) -> Result> { + { + let cache = self.mailboxes.lock().unwrap(); + if let Some(mb) = cache.get(&id).and_then(Weak::upgrade) { + return Ok(mb); + } + } + + let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); + + let mut cache = self.mailboxes.lock().unwrap(); + if let Some(concurrent_mb) = cache.get(&id).and_then(Weak::upgrade) { + drop(mb); // we worked for nothing but at least we didn't starve someone else + Ok(concurrent_mb) + } else { + cache.insert(id, Arc::downgrade(&mb)); + Ok(mb) + } + } + + // ---- Mailbox list management ---- + + async fn load_mailbox_list(&self) -> Result<(MailboxList, Option)> { + let row_ref = storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK); + let (mut list, row) = match self + .storage + .row_fetch(&storage::Selector::Single(&row_ref)) + .await + { + Err(storage::StorageError::NotFound) => (MailboxList::new(), None), + Err(e) => return Err(e.into()), + Ok(rv) => { + let mut list = MailboxList::new(); + let (row_ref, row_vals) = match rv.into_iter().next() { + Some(row_val) => (row_val.row_ref, row_val.value), + None => (row_ref, vec![]), + }; + + for v in row_vals { + if let storage::Alternative::Value(vbytes) = v { + let list2 = + open_deserialize::(&vbytes, &self.creds.keys.master)?; + list.merge(list2); + } + } + (list, Some(row_ref)) + } + }; + + let is_default_mbx_missing = [DRAFTS, ARCHIVE, SENT, TRASH] + .iter() + .map(|mbx| list.create_mailbox(mbx)) + .fold(false, |acc, r| { + acc || matches!(r, CreatedMailbox::Created(..)) + }); + let is_inbox_missing = self.ensure_inbox_exists(&mut list, &row).await?; + if is_default_mbx_missing && !is_inbox_missing { + // It's the only case where we created some mailboxes and not saved them + // So we save them! + self.save_mailbox_list(&list, row.clone()).await?; + } + + Ok((list, row)) + } + + async fn ensure_inbox_exists( + &self, + list: &mut MailboxList, + ct: &Option, + ) -> Result { + // If INBOX doesn't exist, create a new mailbox with that name + // and save new mailbox list. + // Also, ensure that the mpsc::watch that keeps track of the + // inbox id is up-to-date. + let saved; + let (inbox_id, inbox_uidvalidity) = match list.create_mailbox(INBOX) { + CreatedMailbox::Created(i, v) => { + self.save_mailbox_list(list, ct.clone()).await?; + saved = true; + (i, v) + } + CreatedMailbox::Existed(i, v) => { + saved = false; + (i, v) + } + }; + let inbox_id = Some((inbox_id, inbox_uidvalidity)); + if *self.tx_inbox_id.borrow() != inbox_id { + self.tx_inbox_id.send(inbox_id).unwrap(); + } + + Ok(saved) + } + + async fn save_mailbox_list( + &self, + list: &MailboxList, + ct: Option, + ) -> Result<()> { + let list_blob = seal_serialize(list, &self.creds.keys.master)?; + let rref = ct.unwrap_or(storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK)); + let row_val = storage::RowVal::new(rref, list_blob); + self.storage.row_insert(vec![row_val]).await?; + Ok(()) + } +} + +// ---- User cache ---- + +lazy_static! { + static ref USER_CACHE: std::sync::Mutex>> = + std::sync::Mutex::new(HashMap::new()); +} diff --git a/aero-collections/user.rs b/aero-collections/user.rs deleted file mode 100644 index a38b9c1..0000000 --- a/aero-collections/user.rs +++ /dev/null @@ -1,313 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::sync::{Arc, Weak}; - -use anyhow::{anyhow, bail, Result}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use tokio::sync::watch; - -use crate::cryptoblob::{open_deserialize, seal_serialize}; -use crate::login::Credentials; -use crate::mail::incoming::incoming_mail_watch_process; -use crate::mail::mailbox::Mailbox; -use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::{gen_ident, UniqueIdent}; -use crate::storage; -use crate::timestamp::now_msec; - -use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; - -//@FIXME User should be totally rewriten -//to extract the local mailbox list -//to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) - -pub struct User { - pub username: String, - pub creds: Credentials, - pub storage: storage::Store, - pub mailboxes: std::sync::Mutex>>, - - tx_inbox_id: watch::Sender>, -} - -impl User { - pub async fn new(username: String, creds: Credentials) -> Result> { - let cache_key = (username.clone(), creds.storage.unique()); - - { - let cache = USER_CACHE.lock().unwrap(); - if let Some(u) = cache.get(&cache_key).and_then(Weak::upgrade) { - return Ok(u); - } - } - - let user = Self::open(username, creds).await?; - - let mut cache = USER_CACHE.lock().unwrap(); - if let Some(concurrent_user) = cache.get(&cache_key).and_then(Weak::upgrade) { - drop(user); - Ok(concurrent_user) - } else { - cache.insert(cache_key, Arc::downgrade(&user)); - Ok(user) - } - } - - /// Lists user's available mailboxes - pub async fn list_mailboxes(&self) -> Result> { - let (list, _ct) = self.load_mailbox_list().await?; - Ok(list.existing_mailbox_names()) - } - - /// Opens an existing mailbox given its IMAP name. - pub async fn open_mailbox(&self, name: &str) -> Result>> { - let (mut list, ct) = self.load_mailbox_list().await?; - - //@FIXME it could be a trace or an opentelemtry trace thing. - // Be careful to not leak sensible data - /* - eprintln!("List of mailboxes:"); - for ent in list.0.iter() { - eprintln!(" - {:?}", ent); - } - */ - - if let Some((uidvalidity, Some(mbid))) = list.get_mailbox(name) { - let mb = self.open_mailbox_by_id(mbid, uidvalidity).await?; - let mb_uidvalidity = mb.current_uid_index().await.uidvalidity; - if mb_uidvalidity > uidvalidity { - list.update_uidvalidity(name, mb_uidvalidity); - self.save_mailbox_list(&list, ct).await?; - } - Ok(Some(mb)) - } else { - Ok(None) - } - } - - /// Check whether mailbox exists - pub async fn has_mailbox(&self, name: &str) -> Result { - let (list, _ct) = self.load_mailbox_list().await?; - Ok(list.has_mailbox(name)) - } - - /// Creates a new mailbox in the user's IMAP namespace. - pub async fn create_mailbox(&self, name: &str) -> Result<()> { - if name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", name); - } - - let (mut list, ct) = self.load_mailbox_list().await?; - match list.create_mailbox(name) { - CreatedMailbox::Created(_, _) => { - self.save_mailbox_list(&list, ct).await?; - Ok(()) - } - CreatedMailbox::Existed(_, _) => Err(anyhow!("Mailbox {} already exists", name)), - } - } - - /// Deletes a mailbox in the user's IMAP namespace. - pub async fn delete_mailbox(&self, name: &str) -> Result<()> { - if name == INBOX { - bail!("Cannot delete INBOX"); - } - - let (mut list, ct) = self.load_mailbox_list().await?; - if list.has_mailbox(name) { - //@TODO: actually delete mailbox contents - list.set_mailbox(name, None); - self.save_mailbox_list(&list, ct).await?; - Ok(()) - } else { - bail!("Mailbox {} does not exist", name); - } - } - - /// Renames a mailbox in the user's IMAP namespace. - pub async fn rename_mailbox(&self, old_name: &str, new_name: &str) -> Result<()> { - let (mut list, ct) = self.load_mailbox_list().await?; - - if old_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", old_name); - } - if new_name.ends_with(MAILBOX_HIERARCHY_DELIMITER) { - bail!("Invalid mailbox name: {}", new_name); - } - - if old_name == INBOX { - list.rename_mailbox(old_name, new_name)?; - if !self.ensure_inbox_exists(&mut list, &ct).await? { - self.save_mailbox_list(&list, ct).await?; - } - } else { - let names = list.existing_mailbox_names(); - - let old_name_w_delim = format!("{}{}", old_name, MAILBOX_HIERARCHY_DELIMITER); - let new_name_w_delim = format!("{}{}", new_name, MAILBOX_HIERARCHY_DELIMITER); - - if names - .iter() - .any(|x| x == new_name || x.starts_with(&new_name_w_delim)) - { - bail!("Mailbox {} already exists", new_name); - } - - for name in names.iter() { - if name == old_name { - list.rename_mailbox(name, new_name)?; - } else if let Some(tail) = name.strip_prefix(&old_name_w_delim) { - let nnew = format!("{}{}", new_name_w_delim, tail); - list.rename_mailbox(name, &nnew)?; - } - } - - self.save_mailbox_list(&list, ct).await?; - } - Ok(()) - } - - // ---- Internal user & mailbox management ---- - - async fn open(username: String, creds: Credentials) -> Result> { - let storage = creds.storage.build().await?; - - let (tx_inbox_id, rx_inbox_id) = watch::channel(None); - - let user = Arc::new(Self { - username, - creds: creds.clone(), - storage, - tx_inbox_id, - mailboxes: std::sync::Mutex::new(HashMap::new()), - }); - - // Ensure INBOX exists (done inside load_mailbox_list) - user.load_mailbox_list().await?; - - tokio::spawn(incoming_mail_watch_process( - Arc::downgrade(&user), - user.creds.clone(), - rx_inbox_id, - )); - - Ok(user) - } - - pub(super) async fn open_mailbox_by_id( - &self, - id: UniqueIdent, - min_uidvalidity: ImapUidvalidity, - ) -> Result> { - { - let cache = self.mailboxes.lock().unwrap(); - if let Some(mb) = cache.get(&id).and_then(Weak::upgrade) { - return Ok(mb); - } - } - - let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); - - let mut cache = self.mailboxes.lock().unwrap(); - if let Some(concurrent_mb) = cache.get(&id).and_then(Weak::upgrade) { - drop(mb); // we worked for nothing but at least we didn't starve someone else - Ok(concurrent_mb) - } else { - cache.insert(id, Arc::downgrade(&mb)); - Ok(mb) - } - } - - // ---- Mailbox list management ---- - - async fn load_mailbox_list(&self) -> Result<(MailboxList, Option)> { - let row_ref = storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK); - let (mut list, row) = match self - .storage - .row_fetch(&storage::Selector::Single(&row_ref)) - .await - { - Err(storage::StorageError::NotFound) => (MailboxList::new(), None), - Err(e) => return Err(e.into()), - Ok(rv) => { - let mut list = MailboxList::new(); - let (row_ref, row_vals) = match rv.into_iter().next() { - Some(row_val) => (row_val.row_ref, row_val.value), - None => (row_ref, vec![]), - }; - - for v in row_vals { - if let storage::Alternative::Value(vbytes) = v { - let list2 = - open_deserialize::(&vbytes, &self.creds.keys.master)?; - list.merge(list2); - } - } - (list, Some(row_ref)) - } - }; - - let is_default_mbx_missing = [DRAFTS, ARCHIVE, SENT, TRASH] - .iter() - .map(|mbx| list.create_mailbox(mbx)) - .fold(false, |acc, r| { - acc || matches!(r, CreatedMailbox::Created(..)) - }); - let is_inbox_missing = self.ensure_inbox_exists(&mut list, &row).await?; - if is_default_mbx_missing && !is_inbox_missing { - // It's the only case where we created some mailboxes and not saved them - // So we save them! - self.save_mailbox_list(&list, row.clone()).await?; - } - - Ok((list, row)) - } - - async fn ensure_inbox_exists( - &self, - list: &mut MailboxList, - ct: &Option, - ) -> Result { - // If INBOX doesn't exist, create a new mailbox with that name - // and save new mailbox list. - // Also, ensure that the mpsc::watch that keeps track of the - // inbox id is up-to-date. - let saved; - let (inbox_id, inbox_uidvalidity) = match list.create_mailbox(INBOX) { - CreatedMailbox::Created(i, v) => { - self.save_mailbox_list(list, ct.clone()).await?; - saved = true; - (i, v) - } - CreatedMailbox::Existed(i, v) => { - saved = false; - (i, v) - } - }; - let inbox_id = Some((inbox_id, inbox_uidvalidity)); - if *self.tx_inbox_id.borrow() != inbox_id { - self.tx_inbox_id.send(inbox_id).unwrap(); - } - - Ok(saved) - } - - async fn save_mailbox_list( - &self, - list: &MailboxList, - ct: Option, - ) -> Result<()> { - let list_blob = seal_serialize(list, &self.creds.keys.master)?; - let rref = ct.unwrap_or(storage::RowRef::new(MAILBOX_LIST_PK, MAILBOX_LIST_SK)); - let row_val = storage::RowVal::new(rref, list_blob); - self.storage.row_insert(vec![row_val]).await?; - Ok(()) - } -} - -// ---- User cache ---- - -lazy_static! { - static ref USER_CACHE: std::sync::Mutex>> = - std::sync::Mutex::new(HashMap::new()); -} -- cgit v1.2.3 From 11462f80c4ae25696c7436ed7aacb92074d7e911 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 09:55:33 +0100 Subject: Re-enable proto --- Cargo.lock | 927 +++++++++++++++++++++++++-- Cargo.toml | 4 +- aero-dav/src/caldecoder.rs | 10 +- aero-dav/src/calencoder.rs | 8 +- aero-dav/src/decoder.rs | 15 +- aero-dav/src/lib.rs | 1 - aero-dav/src/realization.rs | 4 +- aero-proto/Cargo.toml | 35 + aero-proto/dav.rs | 145 ----- aero-proto/imap/attributes.rs | 77 --- aero-proto/imap/capability.rs | 159 ----- aero-proto/imap/command/anonymous.rs | 83 --- aero-proto/imap/command/anystate.rs | 54 -- aero-proto/imap/command/authenticated.rs | 683 -------------------- aero-proto/imap/command/mod.rs | 20 - aero-proto/imap/command/selected.rs | 424 ------------ aero-proto/imap/flags.rs | 30 - aero-proto/imap/flow.rs | 114 ---- aero-proto/imap/imf_view.rs | 109 ---- aero-proto/imap/index.rs | 211 ------ aero-proto/imap/mail_view.rs | 306 --------- aero-proto/imap/mailbox_view.rs | 772 ---------------------- aero-proto/imap/mime_view.rs | 580 ----------------- aero-proto/imap/mod.rs | 421 ------------ aero-proto/imap/request.rs | 9 - aero-proto/imap/response.rs | 124 ---- aero-proto/imap/search.rs | 477 -------------- aero-proto/imap/session.rs | 173 ----- aero-proto/lmtp.rs | 221 ------- aero-proto/sasl.rs | 140 ---- aero-proto/src/dav.rs | 146 +++++ aero-proto/src/imap/attributes.rs | 77 +++ aero-proto/src/imap/capability.rs | 159 +++++ aero-proto/src/imap/command/anonymous.rs | 84 +++ aero-proto/src/imap/command/anystate.rs | 54 ++ aero-proto/src/imap/command/authenticated.rs | 682 ++++++++++++++++++++ aero-proto/src/imap/command/mod.rs | 20 + aero-proto/src/imap/command/selected.rs | 425 ++++++++++++ aero-proto/src/imap/flags.rs | 30 + aero-proto/src/imap/flow.rs | 115 ++++ aero-proto/src/imap/imf_view.rs | 109 ++++ aero-proto/src/imap/index.rs | 211 ++++++ aero-proto/src/imap/mail_view.rs | 306 +++++++++ aero-proto/src/imap/mailbox_view.rs | 772 ++++++++++++++++++++++ aero-proto/src/imap/mime_view.rs | 582 +++++++++++++++++ aero-proto/src/imap/mod.rs | 417 ++++++++++++ aero-proto/src/imap/request.rs | 9 + aero-proto/src/imap/response.rs | 124 ++++ aero-proto/src/imap/search.rs | 478 ++++++++++++++ aero-proto/src/imap/session.rs | 175 +++++ aero-proto/src/lib.rs | 6 + aero-proto/src/lmtp.rs | 219 +++++++ aero-proto/src/sasl.rs | 142 ++++ aero-sasl/src/flow.rs | 8 +- 54 files changed, 6273 insertions(+), 5413 deletions(-) create mode 100644 aero-proto/Cargo.toml delete mode 100644 aero-proto/dav.rs delete mode 100644 aero-proto/imap/attributes.rs delete mode 100644 aero-proto/imap/capability.rs delete mode 100644 aero-proto/imap/command/anonymous.rs delete mode 100644 aero-proto/imap/command/anystate.rs delete mode 100644 aero-proto/imap/command/authenticated.rs delete mode 100644 aero-proto/imap/command/mod.rs delete mode 100644 aero-proto/imap/command/selected.rs delete mode 100644 aero-proto/imap/flags.rs delete mode 100644 aero-proto/imap/flow.rs delete mode 100644 aero-proto/imap/imf_view.rs delete mode 100644 aero-proto/imap/index.rs delete mode 100644 aero-proto/imap/mail_view.rs delete mode 100644 aero-proto/imap/mailbox_view.rs delete mode 100644 aero-proto/imap/mime_view.rs delete mode 100644 aero-proto/imap/mod.rs delete mode 100644 aero-proto/imap/request.rs delete mode 100644 aero-proto/imap/response.rs delete mode 100644 aero-proto/imap/search.rs delete mode 100644 aero-proto/imap/session.rs delete mode 100644 aero-proto/lmtp.rs delete mode 100644 aero-proto/sasl.rs create mode 100644 aero-proto/src/dav.rs create mode 100644 aero-proto/src/imap/attributes.rs create mode 100644 aero-proto/src/imap/capability.rs create mode 100644 aero-proto/src/imap/command/anonymous.rs create mode 100644 aero-proto/src/imap/command/anystate.rs create mode 100644 aero-proto/src/imap/command/authenticated.rs create mode 100644 aero-proto/src/imap/command/mod.rs create mode 100644 aero-proto/src/imap/command/selected.rs create mode 100644 aero-proto/src/imap/flags.rs create mode 100644 aero-proto/src/imap/flow.rs create mode 100644 aero-proto/src/imap/imf_view.rs create mode 100644 aero-proto/src/imap/index.rs create mode 100644 aero-proto/src/imap/mail_view.rs create mode 100644 aero-proto/src/imap/mailbox_view.rs create mode 100644 aero-proto/src/imap/mime_view.rs create mode 100644 aero-proto/src/imap/mod.rs create mode 100644 aero-proto/src/imap/request.rs create mode 100644 aero-proto/src/imap/response.rs create mode 100644 aero-proto/src/imap/search.rs create mode 100644 aero-proto/src/imap/session.rs create mode 100644 aero-proto/src/lib.rs create mode 100644 aero-proto/src/lmtp.rs create mode 100644 aero-proto/src/sasl.rs diff --git a/Cargo.lock b/Cargo.lock index 387615f..32b798c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,11 +2,20 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "abnf-core" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182d1f071b906a9f59269c89af101515a5cbe58f723eb6717e7fe7445c0dea" +dependencies = [ + "nom 7.1.3", +] + [[package]] name = "addr2line" -version = "0.21.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "e7a2e47a1fbe209ee101dd6d61285226744c6c8d3c21c8dc878ba6cb9f467f3a" dependencies = [ "gimli", ] @@ -62,6 +71,37 @@ dependencies = [ "tokio", ] +[[package]] +name = "aero-proto" +version = "0.3.0" +dependencies = [ + "aero-collections", + "aero-dav", + "aero-sasl", + "aero-user", + "anyhow", + "async-trait", + "base64 0.21.7", + "chrono", + "duplexify", + "eml-codec", + "futures", + "http-body-util", + "hyper 1.2.0", + "hyper-util", + "imap-codec", + "imap-flow", + "rustls 0.22.2", + "rustls-pemfile 2.1.1", + "smtp-message", + "smtp-server", + "thiserror", + "tokio", + "tokio-rustls 0.25.0", + "tokio-util", + "tracing", +] + [[package]] name = "aero-sasl" version = "0.3.0" @@ -157,6 +197,12 @@ dependencies = [ "password-hash", ] +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + [[package]] name = "asn1-rs" version = "0.3.1" @@ -196,6 +242,208 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +dependencies = [ + "concurrent-queue", + "event-listener 5.2.0", + "event-listener-strategy 0.5.0", + "futures-core", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-executor" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c" +dependencies = [ + "async-lock 3.3.0", + "async-task", + "concurrent-queue", + "fastrand 2.0.1", + "futures-lite 2.2.0", + "slab", +] + +[[package]] +name = "async-fs" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.2.0", + "async-executor", + "async-io 2.3.1", + "async-lock 3.3.0", + "blocking", + "futures-lite 2.2.0", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite 1.13.0", + "log", + "parking", + "polling 2.8.0", + "rustix 0.37.27", + "slab", + "socket2 0.4.10", + "waker-fn", +] + +[[package]] +name = "async-io" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65" +dependencies = [ + "async-lock 3.3.0", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite 2.2.0", + "parking", + "polling 3.5.0", + "rustix 0.38.31", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener 2.5.3", +] + +[[package]] +name = "async-lock" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +dependencies = [ + "event-listener 4.0.3", + "event-listener-strategy 0.4.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "async-net" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" +dependencies = [ + "async-io 1.13.0", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-process" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" +dependencies = [ + "async-io 1.13.0", + "async-lock 2.8.0", + "async-signal", + "blocking", + "cfg-if", + "event-listener 3.1.0", + "futures-lite 1.13.0", + "rustix 0.38.31", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-signal" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +dependencies = [ + "async-io 2.3.1", + "async-lock 2.8.0", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix 0.38.31", + "signal-hook-registry", + "slab", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io 1.13.0", + "async-lock 2.8.0", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite 1.13.0", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite 0.2.13", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-task" +version = "4.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" + [[package]] name = "async-trait" version = "0.1.77" @@ -207,6 +455,46 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "auto_enums" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe0dfe45d75158751e195799f47ea02e81f570aa24bc5ef999cdd9e888c4b5c3" +dependencies = [ + "auto_enums_core", + "auto_enums_derive", +] + +[[package]] +name = "auto_enums_core" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da47c46001293a2c4b744d731958be22cff408a2ab76e2279328f9713b1267b4" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "auto_enums_derive" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41aed1da83ecdc799503b7cb94da1b45a34d72b49caf40a61d9cf5b88ec07cfd" +dependencies = [ + "autocfg", + "derive_utils", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "autocfg" version = "1.1.0" @@ -232,7 +520,7 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand", + "fastrand 2.0.1", "hex", "http 0.2.12", "hyper 0.14.28", @@ -270,11 +558,11 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand", + "fastrand 2.0.1", "http 0.2.12", "http-body 0.4.6", "percent-encoding", - "pin-project-lite", + "pin-project-lite 0.2.13", "tracing", "uuid", ] @@ -433,7 +721,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" dependencies = [ "futures-util", - "pin-project-lite", + "pin-project-lite 0.2.13", "tokio", ] @@ -452,7 +740,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "md-5", - "pin-project-lite", + "pin-project-lite 0.2.13", "sha1", "sha2", "tracing", @@ -485,7 +773,7 @@ dependencies = [ "http-body 0.4.6", "once_cell", "percent-encoding", - "pin-project-lite", + "pin-project-lite 0.2.13", "pin-utils", "tracing", ] @@ -520,14 +808,14 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "bytes", - "fastrand", + "fastrand 2.0.1", "h2 0.3.24", "http 0.2.12", "http-body 0.4.6", "hyper 0.14.28", "hyper-rustls 0.24.2", "once_cell", - "pin-project-lite", + "pin-project-lite 0.2.13", "pin-utils", "rustls 0.21.10", "tokio", @@ -545,7 +833,7 @@ dependencies = [ "bytes", "http 0.2.12", "http 1.1.0", - "pin-project-lite", + "pin-project-lite 0.2.13", "tokio", "tracing", "zeroize", @@ -565,7 +853,7 @@ dependencies = [ "http-body 0.4.6", "itoa", "num-integer", - "pin-project-lite", + "pin-project-lite 0.2.13", "pin-utils", "ryu", "serde", @@ -600,9 +888,9 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "4717cfcbfaa661a0fd48f8453951837ae7e8f81e481fbb136e3202d72805a744" dependencies = [ "addr2line", "cc", @@ -653,6 +941,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" + [[package]] name = "bitmaps" version = "2.1.0" @@ -662,6 +956,18 @@ dependencies = [ "typenum", ] +[[package]] +name = "bitvec" +version = "0.19.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + [[package]] name = "blake2" version = "0.10.6" @@ -680,6 +986,42 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blocking" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" +dependencies = [ + "async-channel 2.2.0", + "async-lock 3.3.0", + "async-task", + "fastrand 2.0.1", + "futures-io", + "futures-lite 2.2.0", + "piper", + "tracing", +] + +[[package]] +name = "bounded-static" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2325bd33fa7e3018e7e37f5b0591ba009124963b5a3f8b7cae6d0a8c1028ed4" +dependencies = [ + "bounded-static-derive", +] + +[[package]] +name = "bounded-static-derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f10dd247355bf631d98d2753d87ae62c84c8dcb996ad9b24a4168e0aec29bd6b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "bumpalo" version = "3.14.0" @@ -738,6 +1080,15 @@ dependencies = [ "windows-targets 0.52.0", ] +[[package]] +name = "concurrent-queue" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -787,6 +1138,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -869,6 +1226,17 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "derive_utils" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "532b4c15dccee12c7044f1fcad956e98410860b22231e44a3b827464797ca7bf" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "digest" version = "0.10.7" @@ -891,6 +1259,16 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "duplexify" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1cc346cd6db38ceab2d33f59b26024c3ddb8e75f047c6cafbcbc016ea8065d5" +dependencies = [ + "async-std", + "pin-project-lite 0.1.12", +] + [[package]] name = "ecdsa" version = "0.14.8" @@ -965,6 +1343,84 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener-strategy" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" +dependencies = [ + "event-listener 4.0.3", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291" +dependencies = [ + "event-listener 5.2.0", + "pin-project-lite 0.2.13", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + [[package]] name = "fastrand" version = "2.0.1" @@ -996,6 +1452,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "futures" version = "0.3.30" @@ -1044,6 +1506,34 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite 0.2.13", + "waker-fn", +] + +[[package]] +name = "futures-lite" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" +dependencies = [ + "fastrand 2.0.1", + "futures-core", + "futures-io", + "parking", + "pin-project-lite 0.2.13", +] + [[package]] name = "futures-macro" version = "0.3.30" @@ -1080,7 +1570,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite", + "pin-project-lite 0.2.13", "pin-utils", "slab", ] @@ -1108,9 +1598,21 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4075386626662786ddb0ec9081e7c7eeb1ba31951f447ca780ef9f5d568189" + +[[package]] +name = "gloo-timers" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] [[package]] name = "group" @@ -1218,7 +1720,7 @@ checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http 0.2.12", - "pin-project-lite", + "pin-project-lite 0.2.13", ] [[package]] @@ -1241,7 +1743,7 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.0", - "pin-project-lite", + "pin-project-lite 0.2.13", ] [[package]] @@ -1272,8 +1774,8 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project-lite", - "socket2", + "pin-project-lite 0.2.13", + "socket2 0.5.5", "tokio", "tower-service", "tracing", @@ -1295,7 +1797,7 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project-lite", + "pin-project-lite 0.2.13", "smallvec", "tokio", "want", @@ -1348,8 +1850,8 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "hyper 1.2.0", - "pin-project-lite", - "socket2", + "pin-project-lite 0.2.13", + "socket2 0.5.5", "tokio", "tower", "tower-service", @@ -1379,6 +1881,17 @@ dependencies = [ "cc", ] +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.5.0" @@ -1403,6 +1916,46 @@ dependencies = [ "version_check", ] +[[package]] +name = "imap-codec" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" +dependencies = [ + "abnf-core", + "base64 0.21.7", + "bounded-static", + "chrono", + "imap-types", + "log", + "nom 7.1.3", + "thiserror", +] + +[[package]] +name = "imap-flow" +version = "0.1.0" +source = "git+https://github.com/duesee/imap-flow.git?branch=main#dce759a8531f317e8d7311fb032b366db6698e38" +dependencies = [ + "bounded-static", + "bytes", + "imap-codec", + "imap-types", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "imap-types" +version = "2.0.0" +source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" +dependencies = [ + "base64 0.21.7", + "bounded-static", + "chrono", + "thiserror", +] + [[package]] name = "indexmap" version = "2.2.5" @@ -1413,6 +1966,26 @@ dependencies = [ "hashbrown", ] +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "itoa" version = "1.0.10" @@ -1460,6 +2033,15 @@ dependencies = [ "tokio", ] +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -1504,6 +2086,19 @@ dependencies = [ "x509-parser", ] +[[package]] +name = "lexical-core" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" +dependencies = [ + "arrayvec", + "bitflags 1.3.2", + "cfg-if", + "ryu", + "static_assertions", +] + [[package]] name = "libc" version = "0.2.152" @@ -1533,11 +2128,32 @@ dependencies = [ "walkdir", ] +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + [[package]] name = "log" version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +dependencies = [ + "value-bag", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "md-5" @@ -1551,9 +2167,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.1" +version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" [[package]] name = "minimal-lexical" @@ -1563,11 +2179,12 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" dependencies = [ "adler", + "autocfg", ] [[package]] @@ -1587,6 +2204,19 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf51a729ecf40266a2368ad335a5fdde43471f545a967109cd62146ecf8b66ff" +[[package]] +name = "nom" +version = "6.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6a7a9657c84d5814c6196b68bb4429df09c18b1573806259fba397ea4ad0d44" +dependencies = [ + "bitvec", + "funty", + "lexical-core", + "memchr", + "version_check", +] + [[package]] name = "nom" version = "7.1.3" @@ -1639,12 +2269,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.2" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" -dependencies = [ - "memchr", -] +checksum = "1a5b3dd1c072ee7963717671d1ca129f1048fda25edea6b752bfc71ac8854170" [[package]] name = "oid-registry" @@ -1684,6 +2311,12 @@ dependencies = [ "sha2", ] +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + [[package]] name = "password-hash" version = "0.5.0" @@ -1727,6 +2360,12 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "pin-project-lite" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + [[package]] name = "pin-project-lite" version = "0.2.13" @@ -1739,6 +2378,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.1", + "futures-io", +] + [[package]] name = "pkcs8" version = "0.9.0" @@ -1755,6 +2405,36 @@ version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite 0.2.13", + "windows-sys 0.48.0", +] + +[[package]] +name = "polling" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24f040dee2588b4963afb4e420540439d126f73fdacf4a9c486a96d840bac3c9" +dependencies = [ + "cfg-if", + "concurrent-queue", + "pin-project-lite 0.2.13", + "rustix 0.38.31", + "tracing", + "windows-sys 0.52.0", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -1795,6 +2475,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + [[package]] name = "rand" version = "0.8.5" @@ -1834,12 +2520,27 @@ dependencies = [ "rand_core", ] +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + [[package]] name = "regex-lite" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "rfc6979" version = "0.3.1" @@ -1926,6 +2627,33 @@ dependencies = [ "nom 7.1.3", ] +[[package]] +name = "rustix" +version = "0.37.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +dependencies = [ + "bitflags 2.4.2", + "errno", + "libc", + "linux-raw-sys 0.4.13", + "windows-sys 0.52.0", +] + [[package]] name = "rustls" version = "0.20.9" @@ -2089,7 +2817,7 @@ version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -2209,6 +2937,71 @@ version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" +[[package]] +name = "smol" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" +dependencies = [ + "async-channel 1.9.0", + "async-executor", + "async-fs", + "async-io 1.13.0", + "async-lock 2.8.0", + "async-net", + "async-process", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "smtp-message" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "auto_enums", + "futures", + "idna 0.2.3", + "lazy_static", + "nom 6.2.2", + "pin-project", + "regex-automata", + "serde", +] + +[[package]] +name = "smtp-server" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "async-trait", + "chrono", + "duplexify", + "futures", + "smol", + "smtp-message", + "smtp-server-types", +] + +[[package]] +name = "smtp-server-types" +version = "0.1.0" +source = "git+http://github.com/Alexis211/kannader?branch=feature/lmtp#0560e7c46af752344a3095add5f84b02400b1111" +dependencies = [ + "serde", + "smtp-message", +] + +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "socket2" version = "0.5.5" @@ -2253,6 +3046,12 @@ dependencies = [ "der", ] +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + [[package]] name = "subtle" version = "2.5.0" @@ -2293,6 +3092,12 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "thiserror" version = "1.0.56" @@ -2368,9 +3173,9 @@ dependencies = [ "libc", "mio", "num_cpus", - "pin-project-lite", + "pin-project-lite 0.2.13", "signal-hook-registry", - "socket2", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] @@ -2425,7 +3230,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" dependencies = [ "futures-core", - "pin-project-lite", + "pin-project-lite 0.2.13", "tokio", ] @@ -2439,7 +3244,7 @@ dependencies = [ "futures-core", "futures-io", "futures-sink", - "pin-project-lite", + "pin-project-lite 0.2.13", "tokio", "tracing", ] @@ -2462,7 +3267,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project", - "pin-project-lite", + "pin-project-lite 0.2.13", "tokio", "tower-layer", "tower-service", @@ -2488,7 +3293,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "log", - "pin-project-lite", + "pin-project-lite 0.2.13", "tracing-attributes", "tracing-core", ] @@ -2571,7 +3376,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", - "idna", + "idna 0.5.0", "percent-encoding", ] @@ -2587,6 +3392,12 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +[[package]] +name = "value-bag" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" + [[package]] name = "version_check" version = "0.9.4" @@ -2599,6 +3410,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" +[[package]] +name = "waker-fn" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" + [[package]] name = "walkdir" version = "2.4.0" @@ -2649,6 +3466,18 @@ dependencies = [ "wasm-bindgen-shared", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.90" @@ -2870,6 +3699,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + [[package]] name = "x509-parser" version = "0.13.2" @@ -2928,13 +3763,3 @@ dependencies = [ "cc", "libc", ] - -[[patch.unused]] -name = "imap-codec" -version = "2.0.0" -source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" - -[[patch.unused]] -name = "imap-types" -version = "2.0.0" -source = "git+https://github.com/superboum/imap-codec?branch=custom/aerogramme#d8a5afc03fb771232e94c73af6a05e79dc80bbed" diff --git a/Cargo.toml b/Cargo.toml index 5654322..406d5bd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,7 @@ members = [ "aero-dav", "aero-dav/fuzz", "aero-collections", -# "aero-proto", + "aero-proto", # "aerogramme", ] @@ -20,7 +20,7 @@ aero-bayou = { version = "0.3.0", path = "aero-bayou" } aero-sasl = { version = "0.3.0", path = "aero-sasl" } aero-dav = { version = "0.3.0", path = "aero-dav" } aero-collections = { version = "0.3.0", path = "aero-collections" } -#aero-proto = { version = "0.3.0", path = "aero-proto" } +aero-proto = { version = "0.3.0", path = "aero-proto" } #aerogramme = { version = "0.3.0", path = "aerogramme" } # async runtime diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 5f40c4b..fb840d6 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -1,4 +1,4 @@ -use super::types as dav; +//use super::types as dav; use super::caltypes::*; use super::xml; use super::error; @@ -7,25 +7,25 @@ use super::error; // ---- EXTENSIONS --- impl xml::QRead for Violation { - async fn qread(xml: &mut xml::Reader) -> Result { + async fn qread(_xml: &mut xml::Reader) -> Result { unreachable!(); } } impl xml::QRead for Property { - async fn qread(xml: &mut xml::Reader) -> Result { + async fn qread(_xml: &mut xml::Reader) -> Result { unreachable!(); } } impl xml::QRead for PropertyRequest { - async fn qread(xml: &mut xml::Reader) -> Result { + async fn qread(_xml: &mut xml::Reader) -> Result { unreachable!(); } } impl xml::QRead for ResourceType { - async fn qread(xml: &mut xml::Reader) -> Result { + async fn qread(_xml: &mut xml::Reader) -> Result { unreachable!(); } } diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index ff6eb24..67892ed 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -1,7 +1,5 @@ use quick_xml::Error as QError; -use quick_xml::events::{Event, BytesEnd, BytesStart, BytesText}; -use quick_xml::name::PrefixDeclaration; -use tokio::io::AsyncWrite; +use quick_xml::events::{Event, BytesText}; use super::caltypes::*; use super::xml::{Node, QWrite, IWrite, Writer}; @@ -627,7 +625,7 @@ impl QWrite for ParamFilterMatch { impl QWrite for TimeZone { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("timezone"); + let start = xml.create_cal_element("timezone"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; @@ -638,7 +636,7 @@ impl QWrite for TimeZone { impl QWrite for Filter { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - let mut start = xml.create_cal_element("filter"); + let start = xml.create_cal_element("filter"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index 65cb712..02bc376 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -1,14 +1,9 @@ -use std::future::Future; - use quick_xml::events::Event; -use quick_xml::events::attributes::AttrError; -use quick_xml::name::{Namespace, QName, PrefixDeclaration, ResolveResult, ResolveResult::*}; -use quick_xml::reader::NsReader; -use tokio::io::AsyncBufRead; +use chrono::DateTime; use super::types::*; use super::error::ParsingError; -use super::xml::{Node, QRead, Reader, IRead, DAV_URN, CAL_URN}; +use super::xml::{Node, QRead, Reader, IRead, DAV_URN}; //@TODO (1) Rewrite all objects as Href, // where we return Ok(None) instead of trying to find the object at any cost. @@ -119,7 +114,7 @@ impl QRead for LockInfo { impl QRead> for PropValue { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "prop").await?; - let mut acc = xml.collect::>().await?; + let acc = xml.collect::>().await?; xml.close().await?; Ok(PropValue(acc)) } @@ -352,8 +347,6 @@ impl QRead> for PropertyRequest { impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result { - use chrono::{DateTime, FixedOffset, TimeZone}; - // Core WebDAV properties if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { let datestr = xml.tag_string().await?; @@ -592,7 +585,7 @@ impl QRead for LockType { impl QRead for Href { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "href").await?; - let mut url = xml.tag_string().await?; + let url = xml.tag_string().await?; xml.close().await?; Ok(Href(url)) } diff --git a/aero-dav/src/lib.rs b/aero-dav/src/lib.rs index 6bfbf62..0ca8243 100644 --- a/aero-dav/src/lib.rs +++ b/aero-dav/src/lib.rs @@ -1,5 +1,4 @@ #![feature(type_alias_impl_trait)] -#![feature(async_fn_in_trait)] #![feature(async_closure)] #![feature(trait_alias)] diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 33a556e..5781637 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -6,12 +6,12 @@ use super::error; #[derive(Debug, PartialEq)] pub struct Disabled(()); impl xml::QRead for Disabled { - async fn qread(xml: &mut xml::Reader) -> Result { + async fn qread(_xml: &mut xml::Reader) -> Result { Err(error::ParsingError::Recoverable) } } impl xml::QWrite for Disabled { - async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + async fn qwrite(&self, _xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { unreachable!(); } } diff --git a/aero-proto/Cargo.toml b/aero-proto/Cargo.toml new file mode 100644 index 0000000..df8c696 --- /dev/null +++ b/aero-proto/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "aero-proto" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "Binding between Aerogramme's internal components and well-known protocols" + +[dependencies] +aero-sasl.workspace = true +aero-dav.workspace = true +aero-user.workspace = true +aero-collections.workspace = true + +async-trait.workspace = true +anyhow.workspace = true +hyper.workspace = true +base64.workspace = true +hyper-util.workspace = true +http-body-util.workspace = true +futures.workspace = true +tokio.workspace = true +tokio-util.workspace = true +tokio-rustls.workspace = true +rustls.workspace = true +rustls-pemfile.workspace = true +imap-codec.workspace = true +imap-flow.workspace = true +chrono.workspace = true +eml-codec.workspace = true +thiserror.workspace = true +duplexify.workspace = true +smtp-message.workspace = true +smtp-server.workspace = true +tracing.workspace = true diff --git a/aero-proto/dav.rs b/aero-proto/dav.rs deleted file mode 100644 index fa2023a..0000000 --- a/aero-proto/dav.rs +++ /dev/null @@ -1,145 +0,0 @@ -use std::net::SocketAddr; - -use anyhow::{anyhow, Result}; -use base64::Engine; -use hyper::service::service_fn; -use hyper::{Request, Response, body::Bytes}; -use hyper::server::conn::http1 as http; -use hyper_util::rt::TokioIo; -use http_body_util::Full; -use futures::stream::{FuturesUnordered, StreamExt}; -use tokio::net::TcpListener; -use tokio::sync::watch; - -use crate::config::DavUnsecureConfig; -use crate::login::ArcLoginProvider; -use crate::user::User; - -pub struct Server { - bind_addr: SocketAddr, - login_provider: ArcLoginProvider, -} - -pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { - Server { - bind_addr: config.bind_addr, - login_provider: login, - } -} - -impl Server { - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!("DAV server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - tracing::info!("Accepted connection from {}", remote_addr); - let stream = TokioIo::new(socket); - let login = self.login_provider.clone(); - let conn = tokio::spawn(async move { - //@FIXME should create a generic "public web" server on which "routers" could be - //abitrarily bound - //@FIXME replace with a handler supporting http2 and TLS - match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { - let login = login.clone(); - async move { - auth(login, req).await - } - })).await { - Err(e) => tracing::warn!(err=?e, "connection failed"), - Ok(()) => tracing::trace!("connection terminated with success"), - } - }); - connections.push(conn); - } - drop(tcp); - - tracing::info!("Server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -//@FIXME We should not support only BasicAuth -async fn auth( - login: ArcLoginProvider, - req: Request, -) -> Result>> { - - let auth_val = match req.headers().get("Authorization") { - Some(hv) => hv.to_str()?, - None => return Ok(Response::builder() - .status(401) - .body(Full::new(Bytes::from("Missing Authorization field")))?), - }; - - let b64_creds_maybe_padded = match auth_val.split_once(" ") { - Some(("Basic", b64)) => b64, - _ => return Ok(Response::builder() - .status(400) - .body(Full::new(Bytes::from("Unsupported Authorization field")))?), - }; - - // base64urlencoded may have trailing equals, base64urlsafe has not - // theoretically authorization is padded but "be liberal in what you accept" - let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); - - // Decode base64 - let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; - let str_creds = std::str::from_utf8(&creds)?; - - // Split username and password - let (username, password) = str_creds - .split_once(':') - .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; - - // Call login provider - let creds = match login.login(username, password).await { - Ok(c) => c, - Err(e) => return Ok(Response::builder() - .status(401) - .body(Full::new(Bytes::from("Wrong credentials")))?), - }; - - // Build a user - let user = User::new(username.into(), creds).await?; - - // Call router with user - router(user, req).await -} - -async fn router(user: std::sync::Arc, req: Request) -> Result>> { - let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); - match path_segments.as_slice() { - [] => tracing::info!("root"), - [ username, ..] if *username != user.username => return Ok(Response::builder() - .status(403) - .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), - [ _ ] => tracing::info!("user home"), - [ _, "calendar" ] => tracing::info!("user calendars"), - [ _, "calendar", colname ] => tracing::info!(name=colname, "selected calendar"), - [ _, "calendar", colname, member ] => tracing::info!(name=colname, obj=member, "selected event"), - _ => return Ok(Response::builder() - .status(404) - .body(Full::new(Bytes::from("Resource not found")))?), - } - Ok(Response::new(Full::new(Bytes::from("Hello World!")))) -} - -async fn collections(user: std::sync::Arc, req: Request) -> Result>> { - unimplemented!(); -} diff --git a/aero-proto/imap/attributes.rs b/aero-proto/imap/attributes.rs deleted file mode 100644 index 89446a8..0000000 --- a/aero-proto/imap/attributes.rs +++ /dev/null @@ -1,77 +0,0 @@ -use imap_codec::imap_types::command::FetchModifier; -use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName, Section}; - -/// Internal decisions based on fetched attributes -/// passed by the client - -pub struct AttributesProxy { - pub attrs: Vec>, -} -impl AttributesProxy { - pub fn new( - attrs: &MacroOrMessageDataItemNames<'static>, - modifiers: &[FetchModifier], - is_uid_fetch: bool, - ) -> Self { - // Expand macros - let mut fetch_attrs = match attrs { - MacroOrMessageDataItemNames::Macro(m) => { - use imap_codec::imap_types::fetch::Macro; - use MessageDataItemName::*; - match m { - Macro::All => vec![Flags, InternalDate, Rfc822Size, Envelope], - Macro::Fast => vec![Flags, InternalDate, Rfc822Size], - Macro::Full => vec![Flags, InternalDate, Rfc822Size, Envelope, Body], - _ => { - tracing::error!("unimplemented macro"); - vec![] - } - } - } - MacroOrMessageDataItemNames::MessageDataItemNames(a) => a.clone(), - }; - - // Handle uids - if is_uid_fetch && !fetch_attrs.contains(&MessageDataItemName::Uid) { - fetch_attrs.push(MessageDataItemName::Uid); - } - - // Handle inferred MODSEQ tag - let is_changed_since = modifiers - .iter() - .any(|m| matches!(m, FetchModifier::ChangedSince(..))); - if is_changed_since && !fetch_attrs.contains(&MessageDataItemName::ModSeq) { - fetch_attrs.push(MessageDataItemName::ModSeq); - } - - Self { attrs: fetch_attrs } - } - - pub fn is_enabling_condstore(&self) -> bool { - self.attrs - .iter() - .any(|x| matches!(x, MessageDataItemName::ModSeq)) - } - - pub fn need_body(&self) -> bool { - self.attrs.iter().any(|x| match x { - MessageDataItemName::Body - | MessageDataItemName::Rfc822 - | MessageDataItemName::Rfc822Text - | MessageDataItemName::BodyStructure => true, - - MessageDataItemName::BodyExt { - section: Some(section), - partial: _, - peek: _, - } => match section { - Section::Header(None) - | Section::HeaderFields(None, _) - | Section::HeaderFieldsNot(None, _) => false, - _ => true, - }, - MessageDataItemName::BodyExt { .. } => true, - _ => false, - }) - } -} diff --git a/aero-proto/imap/capability.rs b/aero-proto/imap/capability.rs deleted file mode 100644 index c76b51c..0000000 --- a/aero-proto/imap/capability.rs +++ /dev/null @@ -1,159 +0,0 @@ -use imap_codec::imap_types::command::{FetchModifier, SelectExamineModifier, StoreModifier}; -use imap_codec::imap_types::core::Vec1; -use imap_codec::imap_types::extensions::enable::{CapabilityEnable, Utf8Kind}; -use imap_codec::imap_types::response::Capability; -use std::collections::HashSet; - -use crate::imap::attributes::AttributesProxy; - -fn capability_unselect() -> Capability<'static> { - Capability::try_from("UNSELECT").unwrap() -} - -fn capability_condstore() -> Capability<'static> { - Capability::try_from("CONDSTORE").unwrap() -} - -fn capability_uidplus() -> Capability<'static> { - Capability::try_from("UIDPLUS").unwrap() -} - -fn capability_liststatus() -> Capability<'static> { - Capability::try_from("LIST-STATUS").unwrap() -} - -/* -fn capability_qresync() -> Capability<'static> { - Capability::try_from("QRESYNC").unwrap() -} -*/ - -#[derive(Debug, Clone)] -pub struct ServerCapability(HashSet>); - -impl Default for ServerCapability { - fn default() -> Self { - Self(HashSet::from([ - Capability::Imap4Rev1, - Capability::Enable, - Capability::Move, - Capability::LiteralPlus, - Capability::Idle, - capability_unselect(), - capability_condstore(), - capability_uidplus(), - capability_liststatus(), - //capability_qresync(), - ])) - } -} - -impl ServerCapability { - pub fn to_vec(&self) -> Vec1> { - self.0 - .iter() - .map(|v| v.clone()) - .collect::>() - .try_into() - .unwrap() - } - - #[allow(dead_code)] - pub fn support(&self, cap: &Capability<'static>) -> bool { - self.0.contains(cap) - } -} - -#[derive(Clone)] -pub enum ClientStatus { - NotSupportedByServer, - Disabled, - Enabled, -} -impl ClientStatus { - pub fn is_enabled(&self) -> bool { - matches!(self, Self::Enabled) - } - - pub fn enable(&self) -> Self { - match self { - Self::Disabled => Self::Enabled, - other => other.clone(), - } - } -} - -pub struct ClientCapability { - pub condstore: ClientStatus, - pub utf8kind: Option, -} - -impl ClientCapability { - pub fn new(sc: &ServerCapability) -> Self { - Self { - condstore: match sc.0.contains(&capability_condstore()) { - true => ClientStatus::Disabled, - _ => ClientStatus::NotSupportedByServer, - }, - utf8kind: None, - } - } - - pub fn enable_condstore(&mut self) { - self.condstore = self.condstore.enable(); - } - - pub fn attributes_enable(&mut self, ap: &AttributesProxy) { - if ap.is_enabling_condstore() { - self.enable_condstore() - } - } - - pub fn fetch_modifiers_enable(&mut self, mods: &[FetchModifier]) { - if mods - .iter() - .any(|x| matches!(x, FetchModifier::ChangedSince(..))) - { - self.enable_condstore() - } - } - - pub fn store_modifiers_enable(&mut self, mods: &[StoreModifier]) { - if mods - .iter() - .any(|x| matches!(x, StoreModifier::UnchangedSince(..))) - { - self.enable_condstore() - } - } - - pub fn select_enable(&mut self, mods: &[SelectExamineModifier]) { - for m in mods.iter() { - match m { - SelectExamineModifier::Condstore => self.enable_condstore(), - } - } - } - - pub fn try_enable( - &mut self, - caps: &[CapabilityEnable<'static>], - ) -> Vec> { - let mut enabled = vec![]; - for cap in caps { - match cap { - CapabilityEnable::CondStore if matches!(self.condstore, ClientStatus::Disabled) => { - self.condstore = ClientStatus::Enabled; - enabled.push(cap.clone()); - } - CapabilityEnable::Utf8(kind) if Some(kind) != self.utf8kind.as_ref() => { - self.utf8kind = Some(kind.clone()); - enabled.push(cap.clone()); - } - _ => (), - } - } - - enabled - } -} diff --git a/aero-proto/imap/command/anonymous.rs b/aero-proto/imap/command/anonymous.rs deleted file mode 100644 index 811d1e4..0000000 --- a/aero-proto/imap/command/anonymous.rs +++ /dev/null @@ -1,83 +0,0 @@ -use anyhow::Result; -use imap_codec::imap_types::command::{Command, CommandBody}; -use imap_codec::imap_types::core::AString; -use imap_codec::imap_types::response::Code; -use imap_codec::imap_types::secret::Secret; - -use crate::imap::capability::ServerCapability; -use crate::imap::command::anystate; -use crate::imap::flow; -use crate::imap::response::Response; -use crate::login::ArcLoginProvider; -use crate::user::User; - -//--- dispatching - -pub struct AnonymousContext<'a> { - pub req: &'a Command<'static>, - pub server_capabilities: &'a ServerCapability, - pub login_provider: &'a ArcLoginProvider, -} - -pub async fn dispatch(ctx: AnonymousContext<'_>) -> Result<(Response<'static>, flow::Transition)> { - match &ctx.req.body { - // Any State - CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), - CommandBody::Capability => { - anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) - } - CommandBody::Logout => anystate::logout(), - - // Specific to anonymous context (3 commands) - CommandBody::Login { username, password } => ctx.login(username, password).await, - CommandBody::Authenticate { .. } => { - anystate::not_implemented(ctx.req.tag.clone(), "authenticate") - } - //StartTLS is not implemented for now, we will probably go full TLS. - - // Collect other commands - _ => anystate::wrong_state(ctx.req.tag.clone()), - } -} - -//--- Command controllers, private - -impl<'a> AnonymousContext<'a> { - async fn login( - self, - username: &AString<'a>, - password: &Secret>, - ) -> Result<(Response<'static>, flow::Transition)> { - let (u, p) = ( - std::str::from_utf8(username.as_ref())?, - std::str::from_utf8(password.declassify().as_ref())?, - ); - tracing::info!(user = %u, "command.login"); - - let creds = match self.login_provider.login(&u, &p).await { - Err(e) => { - tracing::debug!(error=%e, "authentication failed"); - return Ok(( - Response::build() - .to_req(self.req) - .message("Authentication failed") - .no()?, - flow::Transition::None, - )); - } - Ok(c) => c, - }; - - let user = User::new(u.to_string(), creds).await?; - - tracing::info!(username=%u, "connected"); - Ok(( - Response::build() - .to_req(self.req) - .code(Code::Capability(self.server_capabilities.to_vec())) - .message("Completed") - .ok()?, - flow::Transition::Authenticate(user), - )) - } -} diff --git a/aero-proto/imap/command/anystate.rs b/aero-proto/imap/command/anystate.rs deleted file mode 100644 index 718ba3f..0000000 --- a/aero-proto/imap/command/anystate.rs +++ /dev/null @@ -1,54 +0,0 @@ -use anyhow::Result; -use imap_codec::imap_types::core::Tag; -use imap_codec::imap_types::response::Data; - -use crate::imap::capability::ServerCapability; -use crate::imap::flow; -use crate::imap::response::Response; - -pub(crate) fn capability( - tag: Tag<'static>, - cap: &ServerCapability, -) -> Result<(Response<'static>, flow::Transition)> { - let res = Response::build() - .tag(tag) - .message("Server capabilities") - .data(Data::Capability(cap.to_vec())) - .ok()?; - - Ok((res, flow::Transition::None)) -} - -pub(crate) fn noop_nothing(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build().tag(tag).message("Noop completed.").ok()?, - flow::Transition::None, - )) -} - -pub(crate) fn logout() -> Result<(Response<'static>, flow::Transition)> { - Ok((Response::bye()?, flow::Transition::Logout)) -} - -pub(crate) fn not_implemented<'a>( - tag: Tag<'a>, - what: &str, -) -> Result<(Response<'a>, flow::Transition)> { - Ok(( - Response::build() - .tag(tag) - .message(format!("Command not implemented {}", what)) - .bad()?, - flow::Transition::None, - )) -} - -pub(crate) fn wrong_state(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build() - .tag(tag) - .message("Command not authorized in this state") - .bad()?, - flow::Transition::None, - )) -} diff --git a/aero-proto/imap/command/authenticated.rs b/aero-proto/imap/command/authenticated.rs deleted file mode 100644 index 3d332ec..0000000 --- a/aero-proto/imap/command/authenticated.rs +++ /dev/null @@ -1,683 +0,0 @@ -use std::collections::BTreeMap; -use std::sync::Arc; -use thiserror::Error; - -use anyhow::{anyhow, bail, Result}; -use imap_codec::imap_types::command::{ - Command, CommandBody, ListReturnItem, SelectExamineModifier, -}; -use imap_codec::imap_types::core::{Atom, Literal, QuotedChar, Vec1}; -use imap_codec::imap_types::datetime::DateTime; -use imap_codec::imap_types::extensions::enable::CapabilityEnable; -use imap_codec::imap_types::flag::{Flag, FlagNameAttribute}; -use imap_codec::imap_types::mailbox::{ListMailbox, Mailbox as MailboxCodec}; -use imap_codec::imap_types::response::{Code, CodeOther, Data}; -use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName}; - -use crate::imap::capability::{ClientCapability, ServerCapability}; -use crate::imap::command::{anystate, MailboxName}; -use crate::imap::flow; -use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; -use crate::imap::response::Response; -use crate::imap::Body; - -use crate::mail::uidindex::*; -use crate::user::User; -use crate::mail::IMF; -use crate::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; - -pub struct AuthenticatedContext<'a> { - pub req: &'a Command<'static>, - pub server_capabilities: &'a ServerCapability, - pub client_capabilities: &'a mut ClientCapability, - pub user: &'a Arc, -} - -pub async fn dispatch<'a>( - mut ctx: AuthenticatedContext<'a>, -) -> Result<(Response<'static>, flow::Transition)> { - match &ctx.req.body { - // Any state - CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), - CommandBody::Capability => { - anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) - } - CommandBody::Logout => anystate::logout(), - - // Specific to this state (11 commands) - CommandBody::Create { mailbox } => ctx.create(mailbox).await, - CommandBody::Delete { mailbox } => ctx.delete(mailbox).await, - CommandBody::Rename { from, to } => ctx.rename(from, to).await, - CommandBody::Lsub { - reference, - mailbox_wildcard, - } => ctx.list(reference, mailbox_wildcard, &[], true).await, - CommandBody::List { - reference, - mailbox_wildcard, - r#return, - } => ctx.list(reference, mailbox_wildcard, r#return, false).await, - CommandBody::Status { - mailbox, - item_names, - } => ctx.status(mailbox, item_names).await, - CommandBody::Subscribe { mailbox } => ctx.subscribe(mailbox).await, - CommandBody::Unsubscribe { mailbox } => ctx.unsubscribe(mailbox).await, - CommandBody::Select { mailbox, modifiers } => ctx.select(mailbox, modifiers).await, - CommandBody::Examine { mailbox, modifiers } => ctx.examine(mailbox, modifiers).await, - CommandBody::Append { - mailbox, - flags, - date, - message, - } => ctx.append(mailbox, flags, date, message).await, - - // rfc5161 ENABLE - CommandBody::Enable { capabilities } => ctx.enable(capabilities), - - // Collect other commands - _ => anystate::wrong_state(ctx.req.tag.clone()), - } -} - -// --- PRIVATE --- -impl<'a> AuthenticatedContext<'a> { - async fn create( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name = match mailbox { - MailboxCodec::Inbox => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Cannot create INBOX") - .bad()?, - flow::Transition::None, - )); - } - MailboxCodec::Other(aname) => std::str::from_utf8(aname.as_ref())?, - }; - - match self.user.create_mailbox(&name).await { - Ok(()) => Ok(( - Response::build() - .to_req(self.req) - .message("CREATE complete") - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(&e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - async fn delete( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - match self.user.delete_mailbox(&name).await { - Ok(()) => Ok(( - Response::build() - .to_req(self.req) - .message("DELETE complete") - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - async fn rename( - self, - from: &MailboxCodec<'a>, - to: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(from).try_into()?; - let new_name: &str = MailboxName(to).try_into()?; - - match self.user.rename_mailbox(&name, &new_name).await { - Ok(()) => Ok(( - Response::build() - .to_req(self.req) - .message("RENAME complete") - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - async fn list( - &mut self, - reference: &MailboxCodec<'a>, - mailbox_wildcard: &ListMailbox<'a>, - must_return: &[ListReturnItem], - is_lsub: bool, - ) -> Result<(Response<'static>, flow::Transition)> { - let mbx_hier_delim: QuotedChar = QuotedChar::unvalidated(MBX_HIER_DELIM_RAW); - - let reference: &str = MailboxName(reference).try_into()?; - if !reference.is_empty() { - return Ok(( - Response::build() - .to_req(self.req) - .message("References not supported") - .bad()?, - flow::Transition::None, - )); - } - - let status_item_names = must_return.iter().find_map(|m| match m { - ListReturnItem::Status(v) => Some(v), - _ => None, - }); - - // @FIXME would probably need a rewrite to better use the imap_codec library - let wildcard = match mailbox_wildcard { - ListMailbox::Token(v) => std::str::from_utf8(v.as_ref())?, - ListMailbox::String(v) => std::str::from_utf8(v.as_ref())?, - }; - if wildcard.is_empty() { - if is_lsub { - return Ok(( - Response::build() - .to_req(self.req) - .message("LSUB complete") - .data(Data::Lsub { - items: vec![], - delimiter: Some(mbx_hier_delim), - mailbox: "".try_into().unwrap(), - }) - .ok()?, - flow::Transition::None, - )); - } else { - return Ok(( - Response::build() - .to_req(self.req) - .message("LIST complete") - .data(Data::List { - items: vec![], - delimiter: Some(mbx_hier_delim), - mailbox: "".try_into().unwrap(), - }) - .ok()?, - flow::Transition::None, - )); - } - } - - let mailboxes = self.user.list_mailboxes().await?; - let mut vmailboxes = BTreeMap::new(); - for mb in mailboxes.iter() { - for (i, _) in mb.match_indices(MBX_HIER_DELIM_RAW) { - if i > 0 { - let smb = &mb[..i]; - vmailboxes.entry(smb).or_insert(false); - } - } - vmailboxes.insert(mb, true); - } - - let mut ret = vec![]; - for (mb, is_real) in vmailboxes.iter() { - if matches_wildcard(&wildcard, mb) { - let mailbox: MailboxCodec = mb - .to_string() - .try_into() - .map_err(|_| anyhow!("invalid mailbox name"))?; - let mut items = vec![FlagNameAttribute::from(Atom::unvalidated("Subscribed"))]; - - // Decoration - if !*is_real { - items.push(FlagNameAttribute::Noselect); - } else { - match *mb { - "Drafts" => items.push(Atom::unvalidated("Drafts").into()), - "Archive" => items.push(Atom::unvalidated("Archive").into()), - "Sent" => items.push(Atom::unvalidated("Sent").into()), - "Trash" => items.push(Atom::unvalidated("Trash").into()), - _ => (), - }; - } - - // Result type - if is_lsub { - ret.push(Data::Lsub { - items, - delimiter: Some(mbx_hier_delim), - mailbox: mailbox.clone(), - }); - } else { - ret.push(Data::List { - items, - delimiter: Some(mbx_hier_delim), - mailbox: mailbox.clone(), - }); - } - - // Also collect status - if let Some(sin) = status_item_names { - let ret_attrs = match self.status_items(mb, sin).await { - Ok(a) => a, - Err(e) => { - tracing::error!(err=?e, mailbox=%mb, "Unable to fetch status for mailbox"); - continue; - } - }; - - let data = Data::Status { - mailbox, - items: ret_attrs.into(), - }; - - ret.push(data); - } - } - } - - let msg = if is_lsub { - "LSUB completed" - } else { - "LIST completed" - }; - Ok(( - Response::build() - .to_req(self.req) - .message(msg) - .many_data(ret) - .ok()?, - flow::Transition::None, - )) - } - - async fn status( - &mut self, - mailbox: &MailboxCodec<'static>, - attributes: &[StatusDataItemName], - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - let ret_attrs = match self.status_items(name, attributes).await { - Ok(v) => v, - Err(e) => match e.downcast_ref::() { - Some(CommandError::MailboxNotFound) => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Mailbox does not exist") - .no()?, - flow::Transition::None, - )) - } - _ => return Err(e.into()), - }, - }; - - let data = Data::Status { - mailbox: mailbox.clone(), - items: ret_attrs.into(), - }; - - Ok(( - Response::build() - .to_req(self.req) - .message("STATUS completed") - .data(data) - .ok()?, - flow::Transition::None, - )) - } - - async fn status_items( - &mut self, - name: &str, - attributes: &[StatusDataItemName], - ) -> Result> { - let mb_opt = self.user.open_mailbox(name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => return Err(CommandError::MailboxNotFound.into()), - }; - - let view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - - let mut ret_attrs = vec![]; - for attr in attributes.iter() { - ret_attrs.push(match attr { - StatusDataItemName::Messages => StatusDataItem::Messages(view.exists()?), - StatusDataItemName::Unseen => StatusDataItem::Unseen(view.unseen_count() as u32), - StatusDataItemName::Recent => StatusDataItem::Recent(view.recent()?), - StatusDataItemName::UidNext => StatusDataItem::UidNext(view.uidnext()), - StatusDataItemName::UidValidity => { - StatusDataItem::UidValidity(view.uidvalidity()) - } - StatusDataItemName::Deleted => { - bail!("quota not implemented, can't return deleted elements waiting for EXPUNGE"); - }, - StatusDataItemName::DeletedStorage => { - bail!("quota not implemented, can't return freed storage after EXPUNGE will be run"); - }, - StatusDataItemName::HighestModSeq => { - self.client_capabilities.enable_condstore(); - StatusDataItem::HighestModSeq(view.highestmodseq().get()) - }, - }); - } - Ok(ret_attrs) - } - - async fn subscribe( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - if self.user.has_mailbox(&name).await? { - Ok(( - Response::build() - .to_req(self.req) - .message("SUBSCRIBE complete") - .ok()?, - flow::Transition::None, - )) - } else { - Ok(( - Response::build() - .to_req(self.req) - .message(format!("Mailbox {} does not exist", name)) - .bad()?, - flow::Transition::None, - )) - } - } - - async fn unsubscribe( - self, - mailbox: &MailboxCodec<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let name: &str = MailboxName(mailbox).try_into()?; - - if self.user.has_mailbox(&name).await? { - Ok(( - Response::build() - .to_req(self.req) - .message(format!( - "Cannot unsubscribe from mailbox {}: not supported by Aerogramme", - name - )) - .bad()?, - flow::Transition::None, - )) - } else { - Ok(( - Response::build() - .to_req(self.req) - .message(format!("Mailbox {} does not exist", name)) - .no()?, - flow::Transition::None, - )) - } - } - - /* - * TRACE BEGIN --- - - - Example: C: A142 SELECT INBOX - S: * 172 EXISTS - S: * 1 RECENT - S: * OK [UNSEEN 12] Message 12 is first unseen - S: * OK [UIDVALIDITY 3857529045] UIDs valid - S: * OK [UIDNEXT 4392] Predicted next UID - S: * FLAGS (\Answered \Flagged \Deleted \Seen \Draft) - S: * OK [PERMANENTFLAGS (\Deleted \Seen \*)] Limited - S: A142 OK [READ-WRITE] SELECT completed - - --- a mailbox with no unseen message -> no unseen entry - NOTES: - RFC3501 (imap4rev1) says if there is no OK [UNSEEN] response, client must make no assumption, - it is therefore correct to not return it even if there are unseen messages - RFC9051 (imap4rev2) says that OK [UNSEEN] responses are deprecated after SELECT and EXAMINE - For Aerogramme, we just don't send the OK [UNSEEN], it's correct to do in both specifications. - - - 20 select "INBOX.achats" - * FLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1) - * OK [PERMANENTFLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1 \*)] Flags permitted. - * 88 EXISTS - * 0 RECENT - * OK [UIDVALIDITY 1347986788] UIDs valid - * OK [UIDNEXT 91] Predicted next UID - * OK [HIGHESTMODSEQ 72] Highest - 20 OK [READ-WRITE] Select completed (0.001 + 0.000 secs). - - * TRACE END --- - */ - async fn select( - self, - mailbox: &MailboxCodec<'a>, - modifiers: &[SelectExamineModifier], - ) -> Result<(Response<'static>, flow::Transition)> { - self.client_capabilities.select_enable(modifiers); - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Mailbox does not exist") - .no()?, - flow::Transition::None, - )) - } - }; - tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.selected"); - - let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - let data = mb.summary()?; - - Ok(( - Response::build() - .message("Select completed") - .to_req(self.req) - .code(Code::ReadWrite) - .set_body(data) - .ok()?, - flow::Transition::Select(mb, flow::MailboxPerm::ReadWrite), - )) - } - - async fn examine( - self, - mailbox: &MailboxCodec<'a>, - modifiers: &[SelectExamineModifier], - ) -> Result<(Response<'static>, flow::Transition)> { - self.client_capabilities.select_enable(modifiers); - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Mailbox does not exist") - .no()?, - flow::Transition::None, - )) - } - }; - tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.examined"); - - let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - let data = mb.summary()?; - - Ok(( - Response::build() - .to_req(self.req) - .message("Examine completed") - .code(Code::ReadOnly) - .set_body(data) - .ok()?, - flow::Transition::Select(mb, flow::MailboxPerm::ReadOnly), - )) - } - - //@FIXME we should write a specific version for the "selected" state - //that returns some unsollicited responses - async fn append( - self, - mailbox: &MailboxCodec<'a>, - flags: &[Flag<'a>], - date: &Option, - message: &Literal<'a>, - ) -> Result<(Response<'static>, flow::Transition)> { - let append_tag = self.req.tag.clone(); - match self.append_internal(mailbox, flags, date, message).await { - Ok((_mb_view, uidvalidity, uid, _modseq)) => Ok(( - Response::build() - .tag(append_tag) - .message("APPEND completed") - .code(Code::Other(CodeOther::unvalidated( - format!("APPENDUID {} {}", uidvalidity, uid).into_bytes(), - ))) - .ok()?, - flow::Transition::None, - )), - Err(e) => Ok(( - Response::build() - .tag(append_tag) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - fn enable( - self, - cap_enable: &Vec1>, - ) -> Result<(Response<'static>, flow::Transition)> { - let mut response_builder = Response::build().to_req(self.req); - let capabilities = self.client_capabilities.try_enable(cap_enable.as_ref()); - if capabilities.len() > 0 { - response_builder = response_builder.data(Data::Enabled { capabilities }); - } - Ok(( - response_builder.message("ENABLE completed").ok()?, - flow::Transition::None, - )) - } - - //@FIXME should be refactored and integrated to the mailbox view - pub(crate) async fn append_internal( - self, - mailbox: &MailboxCodec<'a>, - flags: &[Flag<'a>], - date: &Option, - message: &Literal<'a>, - ) -> Result<(MailboxView, ImapUidvalidity, ImapUid, ModSeq)> { - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => bail!("Mailbox does not exist"), - }; - let mut view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; - - if date.is_some() { - tracing::warn!("Cannot set date when appending message"); - } - - let msg = - IMF::try_from(message.data()).map_err(|_| anyhow!("Could not parse e-mail message"))?; - let flags = flags.iter().map(|x| x.to_string()).collect::>(); - // TODO: filter allowed flags? ping @Quentin - - let (uidvalidity, uid, modseq) = - view.internal.mailbox.append(msg, None, &flags[..]).await?; - //let unsollicited = view.update(UpdateParameters::default()).await?; - - Ok((view, uidvalidity, uid, modseq)) - } -} - -fn matches_wildcard(wildcard: &str, name: &str) -> bool { - let wildcard = wildcard.chars().collect::>(); - let name = name.chars().collect::>(); - - let mut matches = vec![vec![false; wildcard.len() + 1]; name.len() + 1]; - - for i in 0..=name.len() { - for j in 0..=wildcard.len() { - matches[i][j] = (i == 0 && j == 0) - || (j > 0 - && matches[i][j - 1] - && (wildcard[j - 1] == '%' || wildcard[j - 1] == '*')) - || (i > 0 - && j > 0 - && matches[i - 1][j - 1] - && wildcard[j - 1] == name[i - 1] - && wildcard[j - 1] != '%' - && wildcard[j - 1] != '*') - || (i > 0 - && j > 0 - && matches[i - 1][j] - && (wildcard[j - 1] == '*' - || (wildcard[j - 1] == '%' && name[i - 1] != MBX_HIER_DELIM_RAW))); - } - } - - matches[name.len()][wildcard.len()] -} - -#[derive(Error, Debug)] -pub enum CommandError { - #[error("Mailbox not found")] - MailboxNotFound, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_wildcard_matches() { - assert!(matches_wildcard("INBOX", "INBOX")); - assert!(matches_wildcard("*", "INBOX")); - assert!(matches_wildcard("%", "INBOX")); - assert!(!matches_wildcard("%", "Test.Azerty")); - assert!(!matches_wildcard("INBOX.*", "INBOX")); - assert!(matches_wildcard("Sent.*", "Sent.A")); - assert!(matches_wildcard("Sent.*", "Sent.A.B")); - assert!(!matches_wildcard("Sent.%", "Sent.A.B")); - } -} diff --git a/aero-proto/imap/command/mod.rs b/aero-proto/imap/command/mod.rs deleted file mode 100644 index f201eb6..0000000 --- a/aero-proto/imap/command/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -pub mod anonymous; -pub mod anystate; -pub mod authenticated; -pub mod selected; - -use crate::mail::namespace::INBOX; -use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; - -/// Convert an IMAP mailbox name/identifier representation -/// to an utf-8 string that is used internally in Aerogramme -struct MailboxName<'a>(&'a MailboxCodec<'a>); -impl<'a> TryInto<&'a str> for MailboxName<'a> { - type Error = std::str::Utf8Error; - fn try_into(self) -> Result<&'a str, Self::Error> { - match self.0 { - MailboxCodec::Inbox => Ok(INBOX), - MailboxCodec::Other(aname) => Ok(std::str::from_utf8(aname.as_ref())?), - } - } -} diff --git a/aero-proto/imap/command/selected.rs b/aero-proto/imap/command/selected.rs deleted file mode 100644 index eedfbd6..0000000 --- a/aero-proto/imap/command/selected.rs +++ /dev/null @@ -1,424 +0,0 @@ -use std::num::NonZeroU64; -use std::sync::Arc; - -use anyhow::Result; -use imap_codec::imap_types::command::{Command, CommandBody, FetchModifier, StoreModifier}; -use imap_codec::imap_types::core::{Charset, Vec1}; -use imap_codec::imap_types::fetch::MacroOrMessageDataItemNames; -use imap_codec::imap_types::flag::{Flag, StoreResponse, StoreType}; -use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; -use imap_codec::imap_types::response::{Code, CodeOther}; -use imap_codec::imap_types::search::SearchKey; -use imap_codec::imap_types::sequence::SequenceSet; - -use crate::imap::attributes::AttributesProxy; -use crate::imap::capability::{ClientCapability, ServerCapability}; -use crate::imap::command::{anystate, authenticated, MailboxName}; -use crate::imap::flow; -use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; -use crate::imap::response::Response; -use crate::user::User; - -pub struct SelectedContext<'a> { - pub req: &'a Command<'static>, - pub user: &'a Arc, - pub mailbox: &'a mut MailboxView, - pub server_capabilities: &'a ServerCapability, - pub client_capabilities: &'a mut ClientCapability, - pub perm: &'a flow::MailboxPerm, -} - -pub async fn dispatch<'a>( - ctx: SelectedContext<'a>, -) -> Result<(Response<'static>, flow::Transition)> { - match &ctx.req.body { - // Any State - // noop is specific to this state - CommandBody::Capability => { - anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) - } - CommandBody::Logout => anystate::logout(), - - // Specific to this state (7 commands + NOOP) - CommandBody::Close => match ctx.perm { - flow::MailboxPerm::ReadWrite => ctx.close().await, - flow::MailboxPerm::ReadOnly => ctx.examine_close().await, - }, - CommandBody::Noop | CommandBody::Check => ctx.noop().await, - CommandBody::Fetch { - sequence_set, - macro_or_item_names, - modifiers, - uid, - } => { - ctx.fetch(sequence_set, macro_or_item_names, modifiers, uid) - .await - } - //@FIXME SearchKey::And is a legacy hack, should be refactored - CommandBody::Search { - charset, - criteria, - uid, - } => { - ctx.search(charset, &SearchKey::And(criteria.clone()), uid) - .await - } - CommandBody::Expunge { - // UIDPLUS (rfc4315) - uid_sequence_set, - } => ctx.expunge(uid_sequence_set).await, - CommandBody::Store { - sequence_set, - kind, - response, - flags, - modifiers, - uid, - } => { - ctx.store(sequence_set, kind, response, flags, modifiers, uid) - .await - } - CommandBody::Copy { - sequence_set, - mailbox, - uid, - } => ctx.copy(sequence_set, mailbox, uid).await, - CommandBody::Move { - sequence_set, - mailbox, - uid, - } => ctx.r#move(sequence_set, mailbox, uid).await, - - // UNSELECT extension (rfc3691) - CommandBody::Unselect => ctx.unselect().await, - - // In selected mode, we fallback to authenticated when needed - _ => { - authenticated::dispatch(authenticated::AuthenticatedContext { - req: ctx.req, - server_capabilities: ctx.server_capabilities, - client_capabilities: ctx.client_capabilities, - user: ctx.user, - }) - .await - } - } -} - -// --- PRIVATE --- - -impl<'a> SelectedContext<'a> { - async fn close(self) -> Result<(Response<'static>, flow::Transition)> { - // We expunge messages, - // but we don't send the untagged EXPUNGE responses - let tag = self.req.tag.clone(); - self.expunge(&None).await?; - Ok(( - Response::build().tag(tag).message("CLOSE completed").ok()?, - flow::Transition::Unselect, - )) - } - - /// CLOSE in examined state is not the same as in selected state - /// (in selected state it also does an EXPUNGE, here it doesn't) - async fn examine_close(self) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build() - .to_req(self.req) - .message("CLOSE completed") - .ok()?, - flow::Transition::Unselect, - )) - } - - async fn unselect(self) -> Result<(Response<'static>, flow::Transition)> { - Ok(( - Response::build() - .to_req(self.req) - .message("UNSELECT completed") - .ok()?, - flow::Transition::Unselect, - )) - } - - pub async fn fetch( - self, - sequence_set: &SequenceSet, - attributes: &'a MacroOrMessageDataItemNames<'static>, - modifiers: &[FetchModifier], - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - let ap = AttributesProxy::new(attributes, modifiers, *uid); - let mut changed_since: Option = None; - modifiers.iter().for_each(|m| match m { - FetchModifier::ChangedSince(val) => { - changed_since = Some(*val); - } - }); - - match self - .mailbox - .fetch(sequence_set, &ap, changed_since, uid) - .await - { - Ok(resp) => { - // Capabilities enabling logic only on successful command - // (according to my understanding of the spec) - self.client_capabilities.attributes_enable(&ap); - self.client_capabilities.fetch_modifiers_enable(modifiers); - - // Response to the client - Ok(( - Response::build() - .to_req(self.req) - .message("FETCH completed") - .set_body(resp) - .ok()?, - flow::Transition::None, - )) - } - Err(e) => Ok(( - Response::build() - .to_req(self.req) - .message(e.to_string()) - .no()?, - flow::Transition::None, - )), - } - } - - pub async fn search( - self, - charset: &Option>, - criteria: &SearchKey<'a>, - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - let (found, enable_condstore) = self.mailbox.search(charset, criteria, *uid).await?; - if enable_condstore { - self.client_capabilities.enable_condstore(); - } - Ok(( - Response::build() - .to_req(self.req) - .set_body(found) - .message("SEARCH completed") - .ok()?, - flow::Transition::None, - )) - } - - pub async fn noop(self) -> Result<(Response<'static>, flow::Transition)> { - self.mailbox.internal.mailbox.force_sync().await?; - - let updates = self.mailbox.update(UpdateParameters::default()).await?; - Ok(( - Response::build() - .to_req(self.req) - .message("NOOP completed.") - .set_body(updates) - .ok()?, - flow::Transition::None, - )) - } - - async fn expunge( - self, - uid_sequence_set: &Option, - ) -> Result<(Response<'static>, flow::Transition)> { - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let tag = self.req.tag.clone(); - let data = self.mailbox.expunge(uid_sequence_set).await?; - - Ok(( - Response::build() - .tag(tag) - .message("EXPUNGE completed") - .set_body(data) - .ok()?, - flow::Transition::None, - )) - } - - async fn store( - self, - sequence_set: &SequenceSet, - kind: &StoreType, - response: &StoreResponse, - flags: &[Flag<'a>], - modifiers: &[StoreModifier], - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let mut unchanged_since: Option = None; - modifiers.iter().for_each(|m| match m { - StoreModifier::UnchangedSince(val) => { - unchanged_since = Some(*val); - } - }); - - let (data, modified) = self - .mailbox - .store(sequence_set, kind, response, flags, unchanged_since, uid) - .await?; - - let mut ok_resp = Response::build() - .to_req(self.req) - .message("STORE completed") - .set_body(data); - - match modified[..] { - [] => (), - [_head, ..] => { - let modified_str = format!( - "MODIFIED {}", - modified - .into_iter() - .map(|x| x.to_string()) - .collect::>() - .join(",") - ); - ok_resp = ok_resp.code(Code::Other(CodeOther::unvalidated( - modified_str.into_bytes(), - ))); - } - }; - - self.client_capabilities.store_modifiers_enable(modifiers); - - Ok((ok_resp.ok()?, flow::Transition::None)) - } - - async fn copy( - self, - sequence_set: &SequenceSet, - mailbox: &MailboxCodec<'a>, - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - //@FIXME Could copy be valid in EXAMINE mode? - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Destination mailbox does not exist") - .code(Code::TryCreate) - .no()?, - flow::Transition::None, - )) - } - }; - - let (uidval, uid_map) = self.mailbox.copy(sequence_set, mb, uid).await?; - - let copyuid_str = format!( - "{} {} {}", - uidval, - uid_map - .iter() - .map(|(sid, _)| format!("{}", sid)) - .collect::>() - .join(","), - uid_map - .iter() - .map(|(_, tuid)| format!("{}", tuid)) - .collect::>() - .join(",") - ); - - Ok(( - Response::build() - .to_req(self.req) - .message("COPY completed") - .code(Code::Other(CodeOther::unvalidated( - format!("COPYUID {}", copyuid_str).into_bytes(), - ))) - .ok()?, - flow::Transition::None, - )) - } - - async fn r#move( - self, - sequence_set: &SequenceSet, - mailbox: &MailboxCodec<'a>, - uid: &bool, - ) -> Result<(Response<'static>, flow::Transition)> { - if let Some(failed) = self.fail_read_only() { - return Ok((failed, flow::Transition::None)); - } - - let name: &str = MailboxName(mailbox).try_into()?; - - let mb_opt = self.user.open_mailbox(&name).await?; - let mb = match mb_opt { - Some(mb) => mb, - None => { - return Ok(( - Response::build() - .to_req(self.req) - .message("Destination mailbox does not exist") - .code(Code::TryCreate) - .no()?, - flow::Transition::None, - )) - } - }; - - let (uidval, uid_map, data) = self.mailbox.r#move(sequence_set, mb, uid).await?; - - // compute code - let copyuid_str = format!( - "{} {} {}", - uidval, - uid_map - .iter() - .map(|(sid, _)| format!("{}", sid)) - .collect::>() - .join(","), - uid_map - .iter() - .map(|(_, tuid)| format!("{}", tuid)) - .collect::>() - .join(",") - ); - - Ok(( - Response::build() - .to_req(self.req) - .message("COPY completed") - .code(Code::Other(CodeOther::unvalidated( - format!("COPYUID {}", copyuid_str).into_bytes(), - ))) - .set_body(data) - .ok()?, - flow::Transition::None, - )) - } - - fn fail_read_only(&self) -> Option> { - match self.perm { - flow::MailboxPerm::ReadWrite => None, - flow::MailboxPerm::ReadOnly => Some( - Response::build() - .to_req(self.req) - .message("Write command are forbidden while exmining mailbox") - .no() - .unwrap(), - ), - } - } -} diff --git a/aero-proto/imap/flags.rs b/aero-proto/imap/flags.rs deleted file mode 100644 index 0f6ec64..0000000 --- a/aero-proto/imap/flags.rs +++ /dev/null @@ -1,30 +0,0 @@ -use imap_codec::imap_types::core::Atom; -use imap_codec::imap_types::flag::{Flag, FlagFetch}; - -pub fn from_str(f: &str) -> Option> { - match f.chars().next() { - Some('\\') => match f { - "\\Seen" => Some(FlagFetch::Flag(Flag::Seen)), - "\\Answered" => Some(FlagFetch::Flag(Flag::Answered)), - "\\Flagged" => Some(FlagFetch::Flag(Flag::Flagged)), - "\\Deleted" => Some(FlagFetch::Flag(Flag::Deleted)), - "\\Draft" => Some(FlagFetch::Flag(Flag::Draft)), - "\\Recent" => Some(FlagFetch::Recent), - _ => match Atom::try_from(f.strip_prefix('\\').unwrap().to_string()) { - Err(_) => { - tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); - None - } - Ok(a) => Some(FlagFetch::Flag(Flag::system(a))), - }, - }, - Some(_) => match Atom::try_from(f.to_string()) { - Err(_) => { - tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); - None - } - Ok(a) => Some(FlagFetch::Flag(Flag::keyword(a))), - }, - None => None, - } -} diff --git a/aero-proto/imap/flow.rs b/aero-proto/imap/flow.rs deleted file mode 100644 index 86eb12e..0000000 --- a/aero-proto/imap/flow.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::error::Error as StdError; -use std::fmt; -use std::sync::Arc; - -use imap_codec::imap_types::core::Tag; -use tokio::sync::Notify; - -use crate::imap::mailbox_view::MailboxView; -use crate::user::User; - -#[derive(Debug)] -pub enum Error { - ForbiddenTransition, -} -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Forbidden Transition") - } -} -impl StdError for Error {} - -pub enum State { - NotAuthenticated, - Authenticated(Arc), - Selected(Arc, MailboxView, MailboxPerm), - Idle( - Arc, - MailboxView, - MailboxPerm, - Tag<'static>, - Arc, - ), - Logout, -} -impl State { - pub fn notify(&self) -> Option> { - match self { - Self::Idle(_, _, _, _, anotif) => Some(anotif.clone()), - _ => None, - } - } -} -impl fmt::Display for State { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use State::*; - match self { - NotAuthenticated => write!(f, "NotAuthenticated"), - Authenticated(..) => write!(f, "Authenticated"), - Selected(..) => write!(f, "Selected"), - Idle(..) => write!(f, "Idle"), - Logout => write!(f, "Logout"), - } - } -} - -#[derive(Clone)] -pub enum MailboxPerm { - ReadOnly, - ReadWrite, -} - -pub enum Transition { - None, - Authenticate(Arc), - Select(MailboxView, MailboxPerm), - Idle(Tag<'static>, Notify), - UnIdle, - Unselect, - Logout, -} -impl fmt::Display for Transition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use Transition::*; - match self { - None => write!(f, "None"), - Authenticate(..) => write!(f, "Authenticated"), - Select(..) => write!(f, "Selected"), - Idle(..) => write!(f, "Idle"), - UnIdle => write!(f, "UnIdle"), - Unselect => write!(f, "Unselect"), - Logout => write!(f, "Logout"), - } - } -} - -// See RFC3501 section 3. -// https://datatracker.ietf.org/doc/html/rfc3501#page-13 -impl State { - pub fn apply(&mut self, tr: Transition) -> Result<(), Error> { - tracing::debug!(state=%self, transition=%tr, "try change state"); - - let new_state = match (std::mem::replace(self, State::Logout), tr) { - (s, Transition::None) => s, - (State::NotAuthenticated, Transition::Authenticate(u)) => State::Authenticated(u), - (State::Authenticated(u) | State::Selected(u, _, _), Transition::Select(m, p)) => { - State::Selected(u, m, p) - } - (State::Selected(u, _, _), Transition::Unselect) => State::Authenticated(u.clone()), - (State::Selected(u, m, p), Transition::Idle(t, s)) => { - State::Idle(u, m, p, t, Arc::new(s)) - } - (State::Idle(u, m, p, _, _), Transition::UnIdle) => State::Selected(u, m, p), - (_, Transition::Logout) => State::Logout, - (s, t) => { - tracing::error!(state=%s, transition=%t, "forbidden transition"); - return Err(Error::ForbiddenTransition); - } - }; - *self = new_state; - tracing::debug!(state=%self, "transition succeeded"); - - Ok(()) - } -} diff --git a/aero-proto/imap/imf_view.rs b/aero-proto/imap/imf_view.rs deleted file mode 100644 index a4ca2e8..0000000 --- a/aero-proto/imap/imf_view.rs +++ /dev/null @@ -1,109 +0,0 @@ -use anyhow::{anyhow, Result}; -use chrono::naive::NaiveDate; - -use imap_codec::imap_types::core::{IString, NString}; -use imap_codec::imap_types::envelope::{Address, Envelope}; - -use eml_codec::imf; - -pub struct ImfView<'a>(pub &'a imf::Imf<'a>); - -impl<'a> ImfView<'a> { - pub fn naive_date(&self) -> Result { - Ok(self.0.date.ok_or(anyhow!("date is not set"))?.date_naive()) - } - - /// Envelope rules are defined in RFC 3501, section 7.4.2 - /// https://datatracker.ietf.org/doc/html/rfc3501#section-7.4.2 - /// - /// Some important notes: - /// - /// If the Sender or Reply-To lines are absent in the [RFC-2822] - /// header, or are present but empty, the server sets the - /// corresponding member of the envelope to be the same value as - /// the from member (the client is not expected to know to do - /// this). Note: [RFC-2822] requires that all messages have a valid - /// From header. Therefore, the from, sender, and reply-to - /// members in the envelope can not be NIL. - /// - /// If the Date, Subject, In-Reply-To, and Message-ID header lines - /// are absent in the [RFC-2822] header, the corresponding member - /// of the envelope is NIL; if these header lines are present but - /// empty the corresponding member of the envelope is the empty - /// string. - - //@FIXME return an error if the envelope is invalid instead of panicking - //@FIXME some fields must be defaulted if there are not set. - pub fn message_envelope(&self) -> Envelope<'static> { - let msg = self.0; - let from = msg.from.iter().map(convert_mbx).collect::>(); - - Envelope { - date: NString( - msg.date - .as_ref() - .map(|d| IString::try_from(d.to_rfc3339()).unwrap()), - ), - subject: NString( - msg.subject - .as_ref() - .map(|d| IString::try_from(d.to_string()).unwrap()), - ), - sender: msg - .sender - .as_ref() - .map(|v| vec![convert_mbx(v)]) - .unwrap_or(from.clone()), - reply_to: if msg.reply_to.is_empty() { - from.clone() - } else { - convert_addresses(&msg.reply_to) - }, - from, - to: convert_addresses(&msg.to), - cc: convert_addresses(&msg.cc), - bcc: convert_addresses(&msg.bcc), - in_reply_to: NString( - msg.in_reply_to - .iter() - .next() - .map(|d| IString::try_from(d.to_string()).unwrap()), - ), - message_id: NString( - msg.msg_id - .as_ref() - .map(|d| IString::try_from(d.to_string()).unwrap()), - ), - } - } -} - -pub fn convert_addresses(addrlist: &Vec) -> Vec> { - let mut acc = vec![]; - for item in addrlist { - match item { - imf::address::AddressRef::Single(a) => acc.push(convert_mbx(a)), - imf::address::AddressRef::Many(l) => acc.extend(l.participants.iter().map(convert_mbx)), - } - } - return acc; -} - -pub fn convert_mbx(addr: &imf::mailbox::MailboxRef) -> Address<'static> { - Address { - name: NString( - addr.name - .as_ref() - .map(|x| IString::try_from(x.to_string()).unwrap()), - ), - // SMTP at-domain-list (source route) seems obsolete since at least 1991 - // https://www.mhonarc.org/archive/html/ietf-822/1991-06/msg00060.html - adl: NString(None), - mailbox: NString(Some( - IString::try_from(addr.addrspec.local_part.to_string()).unwrap(), - )), - host: NString(Some( - IString::try_from(addr.addrspec.domain.to_string()).unwrap(), - )), - } -} diff --git a/aero-proto/imap/index.rs b/aero-proto/imap/index.rs deleted file mode 100644 index 9b794b8..0000000 --- a/aero-proto/imap/index.rs +++ /dev/null @@ -1,211 +0,0 @@ -use std::num::{NonZeroU32, NonZeroU64}; - -use anyhow::{anyhow, Result}; -use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; - -use crate::mail::uidindex::{ImapUid, ModSeq, UidIndex}; -use crate::mail::unique_ident::UniqueIdent; - -pub struct Index<'a> { - pub imap_index: Vec>, - pub internal: &'a UidIndex, -} -impl<'a> Index<'a> { - pub fn new(internal: &'a UidIndex) -> Result { - let imap_index = internal - .idx_by_uid - .iter() - .enumerate() - .map(|(i_enum, (&uid, &uuid))| { - let (_, modseq, flags) = internal - .table - .get(&uuid) - .ok_or(anyhow!("mail is missing from index"))?; - let i_int: u32 = (i_enum + 1).try_into()?; - let i: NonZeroU32 = i_int.try_into()?; - - Ok(MailIndex { - i, - uid, - uuid, - modseq: *modseq, - flags, - }) - }) - .collect::>>()?; - - Ok(Self { - imap_index, - internal, - }) - } - - pub fn last(&'a self) -> Option<&'a MailIndex<'a>> { - self.imap_index.last() - } - - /// Fetch mail descriptors based on a sequence of UID - /// - /// Complexity analysis: - /// - Sort is O(n * log n) where n is the number of uid generated by the sequence - /// - Finding the starting point in the index O(log m) where m is the size of the mailbox - /// While n =< m, it's not clear if the difference is big or not. - /// - /// For now, the algorithm tries to be fast for small values of n, - /// as it is what is expected by clients. - /// - /// So we assume for our implementation that : n << m. - /// It's not true for full mailbox searches for example... - pub fn fetch_on_uid(&'a self, sequence_set: &SequenceSet) -> Vec<&'a MailIndex<'a>> { - if self.imap_index.is_empty() { - return vec![]; - } - let largest = self.last().expect("The mailbox is not empty").uid; - let mut unroll_seq = sequence_set.iter(largest).collect::>(); - unroll_seq.sort(); - - let start_seq = match unroll_seq.iter().next() { - Some(elem) => elem, - None => return vec![], - }; - - // Quickly jump to the right point in the mailbox vector O(log m) instead - // of iterating one by one O(m). Works only because both unroll_seq & imap_index are sorted per uid. - let mut imap_idx = { - let start_idx = self - .imap_index - .partition_point(|mail_idx| &mail_idx.uid < start_seq); - &self.imap_index[start_idx..] - }; - - let mut acc = vec![]; - for wanted_uid in unroll_seq.iter() { - // Slide the window forward as long as its first element is lower than our wanted uid. - let start_idx = match imap_idx.iter().position(|midx| &midx.uid >= wanted_uid) { - Some(v) => v, - None => break, - }; - imap_idx = &imap_idx[start_idx..]; - - // If the beginning of our new window is the uid we want, we collect it - if &imap_idx[0].uid == wanted_uid { - acc.push(&imap_idx[0]); - } - } - - acc - } - - pub fn fetch_on_id(&'a self, sequence_set: &SequenceSet) -> Result>> { - if self.imap_index.is_empty() { - return Ok(vec![]); - } - let largest = NonZeroU32::try_from(self.imap_index.len() as u32)?; - let mut acc = sequence_set - .iter(largest) - .map(|wanted_id| { - self.imap_index - .get((wanted_id.get() as usize) - 1) - .ok_or(anyhow!("Mail not found")) - }) - .collect::>>()?; - - // Sort the result to be consistent with UID - acc.sort_by(|a, b| a.i.cmp(&b.i)); - - Ok(acc) - } - - pub fn fetch( - self: &'a Index<'a>, - sequence_set: &SequenceSet, - by_uid: bool, - ) -> Result>> { - match by_uid { - true => Ok(self.fetch_on_uid(sequence_set)), - _ => self.fetch_on_id(sequence_set), - } - } - - pub fn fetch_changed_since( - self: &'a Index<'a>, - sequence_set: &SequenceSet, - maybe_modseq: Option, - by_uid: bool, - ) -> Result>> { - let raw = self.fetch(sequence_set, by_uid)?; - let res = match maybe_modseq { - Some(pit) => raw.into_iter().filter(|midx| midx.modseq > pit).collect(), - None => raw, - }; - - Ok(res) - } - - pub fn fetch_unchanged_since( - self: &'a Index<'a>, - sequence_set: &SequenceSet, - maybe_modseq: Option, - by_uid: bool, - ) -> Result<(Vec<&'a MailIndex<'a>>, Vec<&'a MailIndex<'a>>)> { - let raw = self.fetch(sequence_set, by_uid)?; - let res = match maybe_modseq { - Some(pit) => raw.into_iter().partition(|midx| midx.modseq <= pit), - None => (raw, vec![]), - }; - - Ok(res) - } -} - -#[derive(Clone, Debug)] -pub struct MailIndex<'a> { - pub i: NonZeroU32, - pub uid: ImapUid, - pub uuid: UniqueIdent, - pub modseq: ModSeq, - pub flags: &'a Vec, -} - -impl<'a> MailIndex<'a> { - // The following functions are used to implement the SEARCH command - pub fn is_in_sequence_i(&self, seq: &Sequence) -> bool { - match seq { - Sequence::Single(SeqOrUid::Asterisk) => true, - Sequence::Single(SeqOrUid::Value(target)) => target == &self.i, - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) - | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.i, - Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { - if x1 < x2 { - x1 <= &self.i && &self.i <= x2 - } else { - x1 >= &self.i && &self.i >= x2 - } - } - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, - } - } - - pub fn is_in_sequence_uid(&self, seq: &Sequence) -> bool { - match seq { - Sequence::Single(SeqOrUid::Asterisk) => true, - Sequence::Single(SeqOrUid::Value(target)) => target == &self.uid, - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) - | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.uid, - Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { - if x1 < x2 { - x1 <= &self.uid && &self.uid <= x2 - } else { - x1 >= &self.uid && &self.uid >= x2 - } - } - Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, - } - } - - pub fn is_flag_set(&self, flag: &str) -> bool { - self.flags - .iter() - .any(|candidate| candidate.as_str() == flag) - } -} diff --git a/aero-proto/imap/mail_view.rs b/aero-proto/imap/mail_view.rs deleted file mode 100644 index a8db733..0000000 --- a/aero-proto/imap/mail_view.rs +++ /dev/null @@ -1,306 +0,0 @@ -use std::num::NonZeroU32; - -use anyhow::{anyhow, bail, Result}; -use chrono::{naive::NaiveDate, DateTime as ChronoDateTime, Local, Offset, TimeZone, Utc}; - -use imap_codec::imap_types::core::NString; -use imap_codec::imap_types::datetime::DateTime; -use imap_codec::imap_types::fetch::{ - MessageDataItem, MessageDataItemName, Section as FetchSection, -}; -use imap_codec::imap_types::flag::Flag; -use imap_codec::imap_types::response::Data; - -use eml_codec::{ - imf, - part::{composite::Message, AnyPart}, -}; - -use crate::mail::query::QueryResult; - -use crate::imap::attributes::AttributesProxy; -use crate::imap::flags; -use crate::imap::imf_view::ImfView; -use crate::imap::index::MailIndex; -use crate::imap::mime_view; -use crate::imap::response::Body; - -pub struct MailView<'a> { - pub in_idx: &'a MailIndex<'a>, - pub query_result: &'a QueryResult, - pub content: FetchedMail<'a>, -} - -impl<'a> MailView<'a> { - pub fn new(query_result: &'a QueryResult, in_idx: &'a MailIndex<'a>) -> Result> { - Ok(Self { - in_idx, - query_result, - content: match query_result { - QueryResult::FullResult { content, .. } => { - let (_, parsed) = - eml_codec::parse_message(&content).or(Err(anyhow!("Invalid mail body")))?; - FetchedMail::full_from_message(parsed) - } - QueryResult::PartialResult { metadata, .. } => { - let (_, parsed) = eml_codec::parse_message(&metadata.headers) - .or(Err(anyhow!("unable to parse email headers")))?; - FetchedMail::partial_from_message(parsed) - } - QueryResult::IndexResult { .. } => FetchedMail::IndexOnly, - }, - }) - } - - pub fn imf(&self) -> Option { - self.content.as_imf().map(ImfView) - } - - pub fn selected_mime(&'a self) -> Option> { - self.content.as_anypart().ok().map(mime_view::SelectedMime) - } - - pub fn filter(&self, ap: &AttributesProxy) -> Result<(Body<'static>, SeenFlag)> { - let mut seen = SeenFlag::DoNothing; - let res_attrs = ap - .attrs - .iter() - .map(|attr| match attr { - MessageDataItemName::Uid => Ok(self.uid()), - MessageDataItemName::Flags => Ok(self.flags()), - MessageDataItemName::Rfc822Size => self.rfc_822_size(), - MessageDataItemName::Rfc822Header => self.rfc_822_header(), - MessageDataItemName::Rfc822Text => self.rfc_822_text(), - MessageDataItemName::Rfc822 => { - if self.is_not_yet_seen() { - seen = SeenFlag::MustAdd; - } - self.rfc822() - } - MessageDataItemName::Envelope => Ok(self.envelope()), - MessageDataItemName::Body => self.body(), - MessageDataItemName::BodyStructure => self.body_structure(), - MessageDataItemName::BodyExt { - section, - partial, - peek, - } => { - let (body, has_seen) = self.body_ext(section, partial, peek)?; - seen = has_seen; - Ok(body) - } - MessageDataItemName::InternalDate => self.internal_date(), - MessageDataItemName::ModSeq => Ok(self.modseq()), - }) - .collect::, _>>()?; - - Ok(( - Body::Data(Data::Fetch { - seq: self.in_idx.i, - items: res_attrs.try_into()?, - }), - seen, - )) - } - - pub fn stored_naive_date(&self) -> Result { - let mail_meta = self.query_result.metadata().expect("metadata were fetched"); - let mail_ts: i64 = mail_meta.internaldate.try_into()?; - let msg_date: ChronoDateTime = ChronoDateTime::from_timestamp(mail_ts, 0) - .ok_or(anyhow!("unable to parse timestamp"))? - .with_timezone(&Local); - - Ok(msg_date.date_naive()) - } - - pub fn is_header_contains_pattern(&self, hdr: &[u8], pattern: &[u8]) -> bool { - let mime = match self.selected_mime() { - None => return false, - Some(x) => x, - }; - - let val = match mime.header_value(hdr) { - None => return false, - Some(x) => x, - }; - - val.windows(pattern.len()).any(|win| win == pattern) - } - - // Private function, mainly for filter! - fn uid(&self) -> MessageDataItem<'static> { - MessageDataItem::Uid(self.in_idx.uid.clone()) - } - - fn flags(&self) -> MessageDataItem<'static> { - MessageDataItem::Flags( - self.in_idx - .flags - .iter() - .filter_map(|f| flags::from_str(f)) - .collect(), - ) - } - - fn rfc_822_size(&self) -> Result> { - let sz = self - .query_result - .metadata() - .ok_or(anyhow!("mail metadata are required"))? - .rfc822_size; - Ok(MessageDataItem::Rfc822Size(sz as u32)) - } - - fn rfc_822_header(&self) -> Result> { - let hdrs: NString = self - .query_result - .metadata() - .ok_or(anyhow!("mail metadata are required"))? - .headers - .to_vec() - .try_into()?; - Ok(MessageDataItem::Rfc822Header(hdrs)) - } - - fn rfc_822_text(&self) -> Result> { - let txt: NString = self.content.as_msg()?.raw_body.to_vec().try_into()?; - Ok(MessageDataItem::Rfc822Text(txt)) - } - - fn rfc822(&self) -> Result> { - let full: NString = self.content.as_msg()?.raw_part.to_vec().try_into()?; - Ok(MessageDataItem::Rfc822(full)) - } - - fn envelope(&self) -> MessageDataItem<'static> { - MessageDataItem::Envelope( - self.imf() - .expect("an imf object is derivable from fetchedmail") - .message_envelope(), - ) - } - - fn body(&self) -> Result> { - Ok(MessageDataItem::Body(mime_view::bodystructure( - self.content.as_msg()?.child.as_ref(), - false, - )?)) - } - - fn body_structure(&self) -> Result> { - Ok(MessageDataItem::BodyStructure(mime_view::bodystructure( - self.content.as_msg()?.child.as_ref(), - true, - )?)) - } - - fn is_not_yet_seen(&self) -> bool { - let seen_flag = Flag::Seen.to_string(); - !self.in_idx.flags.iter().any(|x| *x == seen_flag) - } - - /// maps to BODY[
]<> and BODY.PEEK[
]<> - /// peek does not implicitly set the \Seen flag - /// eg. BODY[HEADER.FIELDS (DATE FROM)] - /// eg. BODY[]<0.2048> - fn body_ext( - &self, - section: &Option>, - partial: &Option<(u32, NonZeroU32)>, - peek: &bool, - ) -> Result<(MessageDataItem<'static>, SeenFlag)> { - // Manage Seen flag - let mut seen = SeenFlag::DoNothing; - if !peek && self.is_not_yet_seen() { - // Add \Seen flag - //self.mailbox.add_flags(uuid, &[seen_flag]).await?; - seen = SeenFlag::MustAdd; - } - - // Process message - let (text, origin) = - match mime_view::body_ext(self.content.as_anypart()?, section, partial)? { - mime_view::BodySection::Full(body) => (body, None), - mime_view::BodySection::Slice { body, origin_octet } => (body, Some(origin_octet)), - }; - - let data: NString = text.to_vec().try_into()?; - - return Ok(( - MessageDataItem::BodyExt { - section: section.as_ref().map(|fs| fs.clone()), - origin, - data, - }, - seen, - )); - } - - fn internal_date(&self) -> Result> { - let dt = Utc - .fix() - .timestamp_opt( - i64::try_from( - self.query_result - .metadata() - .ok_or(anyhow!("mail metadata were not fetched"))? - .internaldate - / 1000, - )?, - 0, - ) - .earliest() - .ok_or(anyhow!("Unable to parse internal date"))?; - Ok(MessageDataItem::InternalDate(DateTime::unvalidated(dt))) - } - - fn modseq(&self) -> MessageDataItem<'static> { - MessageDataItem::ModSeq(self.in_idx.modseq) - } -} - -pub enum SeenFlag { - DoNothing, - MustAdd, -} - -// ------------------- - -pub enum FetchedMail<'a> { - IndexOnly, - Partial(AnyPart<'a>), - Full(AnyPart<'a>), -} -impl<'a> FetchedMail<'a> { - pub fn full_from_message(msg: Message<'a>) -> Self { - Self::Full(AnyPart::Msg(msg)) - } - - pub fn partial_from_message(msg: Message<'a>) -> Self { - Self::Partial(AnyPart::Msg(msg)) - } - - pub fn as_anypart(&self) -> Result<&AnyPart<'a>> { - match self { - FetchedMail::Full(x) => Ok(&x), - FetchedMail::Partial(x) => Ok(&x), - _ => bail!("The full message must be fetched, not only its headers"), - } - } - - pub fn as_msg(&self) -> Result<&Message<'a>> { - match self { - FetchedMail::Full(AnyPart::Msg(x)) => Ok(&x), - FetchedMail::Partial(AnyPart::Msg(x)) => Ok(&x), - _ => bail!("The full message must be fetched, not only its headers AND it must be an AnyPart::Msg."), - } - } - - pub fn as_imf(&self) -> Option<&imf::Imf<'a>> { - match self { - FetchedMail::Full(AnyPart::Msg(x)) => Some(&x.imf), - FetchedMail::Partial(AnyPart::Msg(x)) => Some(&x.imf), - _ => None, - } - } -} diff --git a/aero-proto/imap/mailbox_view.rs b/aero-proto/imap/mailbox_view.rs deleted file mode 100644 index 1c53b93..0000000 --- a/aero-proto/imap/mailbox_view.rs +++ /dev/null @@ -1,772 +0,0 @@ -use std::collections::HashSet; -use std::num::{NonZeroU32, NonZeroU64}; -use std::sync::Arc; - -use anyhow::{anyhow, Error, Result}; - -use futures::stream::{StreamExt, TryStreamExt}; - -use imap_codec::imap_types::core::{Charset, Vec1}; -use imap_codec::imap_types::fetch::MessageDataItem; -use imap_codec::imap_types::flag::{Flag, FlagFetch, FlagPerm, StoreResponse, StoreType}; -use imap_codec::imap_types::response::{Code, CodeOther, Data, Status}; -use imap_codec::imap_types::search::SearchKey; -use imap_codec::imap_types::sequence::SequenceSet; - -use crate::mail::mailbox::Mailbox; -use crate::mail::query::QueryScope; -use crate::mail::snapshot::FrozenMailbox; -use crate::mail::uidindex::{ImapUid, ImapUidvalidity, ModSeq}; -use crate::mail::unique_ident::UniqueIdent; - -use crate::imap::attributes::AttributesProxy; -use crate::imap::flags; -use crate::imap::index::Index; -use crate::imap::mail_view::{MailView, SeenFlag}; -use crate::imap::response::Body; -use crate::imap::search; - -const DEFAULT_FLAGS: [Flag; 5] = [ - Flag::Seen, - Flag::Answered, - Flag::Flagged, - Flag::Deleted, - Flag::Draft, -]; - -pub struct UpdateParameters { - pub silence: HashSet, - pub with_modseq: bool, - pub with_uid: bool, -} -impl Default for UpdateParameters { - fn default() -> Self { - Self { - silence: HashSet::new(), - with_modseq: false, - with_uid: false, - } - } -} - -/// A MailboxView is responsible for giving the client the information -/// it needs about a mailbox, such as an initial summary of the mailbox's -/// content and continuous updates indicating when the content -/// of the mailbox has been changed. -/// To do this, it keeps a variable `known_state` that corresponds to -/// what the client knows, and produces IMAP messages to be sent to the -/// client that go along updates to `known_state`. -pub struct MailboxView { - pub internal: FrozenMailbox, - pub is_condstore: bool, -} - -impl MailboxView { - /// Creates a new IMAP view into a mailbox. - pub async fn new(mailbox: Arc, is_cond: bool) -> Self { - Self { - internal: mailbox.frozen().await, - is_condstore: is_cond, - } - } - - /// Create an updated view, useful to make a diff - /// between what the client knows and new stuff - /// Produces a set of IMAP responses describing the change between - /// what the client knows and what is actually in the mailbox. - /// This does NOT trigger a sync, it bases itself on what is currently - /// loaded in RAM by Bayou. - pub async fn update(&mut self, params: UpdateParameters) -> Result>> { - let old_snapshot = self.internal.update().await; - let new_snapshot = &self.internal.snapshot; - - let mut data = Vec::::new(); - - // Calculate diff between two mailbox states - // See example in IMAP RFC in section on NOOP command: - // we want to produce something like this: - // C: a047 NOOP - // S: * 22 EXPUNGE - // S: * 23 EXISTS - // S: * 14 FETCH (UID 1305 FLAGS (\Seen \Deleted)) - // S: a047 OK Noop completed - // In other words: - // - notify client of expunged mails - // - if new mails arrived, notify client of number of existing mails - // - if flags changed for existing mails, tell client - // (for this last step: if uidvalidity changed, do nothing, - // just notify of new uidvalidity and they will resync) - - // - notify client of expunged mails - let mut n_expunge = 0; - for (i, (_uid, uuid)) in old_snapshot.idx_by_uid.iter().enumerate() { - if !new_snapshot.table.contains_key(uuid) { - data.push(Body::Data(Data::Expunge( - NonZeroU32::try_from((i + 1 - n_expunge) as u32).unwrap(), - ))); - n_expunge += 1; - } - } - - // - if new mails arrived, notify client of number of existing mails - if new_snapshot.table.len() != old_snapshot.table.len() - n_expunge - || new_snapshot.uidvalidity != old_snapshot.uidvalidity - { - data.push(self.exists_status()?); - } - - if new_snapshot.uidvalidity != old_snapshot.uidvalidity { - // TODO: do we want to push less/more info than this? - data.push(self.uidvalidity_status()?); - data.push(self.uidnext_status()?); - } else { - // - if flags changed for existing mails, tell client - for (i, (_uid, uuid)) in new_snapshot.idx_by_uid.iter().enumerate() { - if params.silence.contains(uuid) { - continue; - } - - let old_mail = old_snapshot.table.get(uuid); - let new_mail = new_snapshot.table.get(uuid); - if old_mail.is_some() && old_mail != new_mail { - if let Some((uid, modseq, flags)) = new_mail { - let mut items = vec![MessageDataItem::Flags( - flags.iter().filter_map(|f| flags::from_str(f)).collect(), - )]; - - if params.with_uid { - items.push(MessageDataItem::Uid(*uid)); - } - - if params.with_modseq { - items.push(MessageDataItem::ModSeq(*modseq)); - } - - data.push(Body::Data(Data::Fetch { - seq: NonZeroU32::try_from((i + 1) as u32).unwrap(), - items: items.try_into()?, - })); - } - } - } - } - Ok(data) - } - - /// Generates the necessary IMAP messages so that the client - /// has a satisfactory summary of the current mailbox's state. - /// These are the messages that are sent in response to a SELECT command. - pub fn summary(&self) -> Result>> { - let mut data = Vec::::new(); - data.push(self.exists_status()?); - data.push(self.recent_status()?); - data.extend(self.flags_status()?.into_iter()); - data.push(self.uidvalidity_status()?); - data.push(self.uidnext_status()?); - if self.is_condstore { - data.push(self.highestmodseq_status()?); - } - /*self.unseen_first_status()? - .map(|unseen_status| data.push(unseen_status));*/ - - Ok(data) - } - - pub async fn store<'a>( - &mut self, - sequence_set: &SequenceSet, - kind: &StoreType, - response: &StoreResponse, - flags: &[Flag<'a>], - unchanged_since: Option, - is_uid_store: &bool, - ) -> Result<(Vec>, Vec)> { - self.internal.sync().await?; - - let flags = flags.iter().map(|x| x.to_string()).collect::>(); - - let idx = self.index()?; - let (editable, in_conflict) = - idx.fetch_unchanged_since(sequence_set, unchanged_since, *is_uid_store)?; - - for mi in editable.iter() { - match kind { - StoreType::Add => { - self.internal.mailbox.add_flags(mi.uuid, &flags[..]).await?; - } - StoreType::Remove => { - self.internal.mailbox.del_flags(mi.uuid, &flags[..]).await?; - } - StoreType::Replace => { - self.internal.mailbox.set_flags(mi.uuid, &flags[..]).await?; - } - } - } - - let silence = match response { - StoreResponse::Answer => HashSet::new(), - StoreResponse::Silent => editable.iter().map(|midx| midx.uuid).collect(), - }; - - let conflict_id_or_uid = match is_uid_store { - true => in_conflict.into_iter().map(|midx| midx.uid).collect(), - _ => in_conflict.into_iter().map(|midx| midx.i).collect(), - }; - - let summary = self - .update(UpdateParameters { - with_uid: *is_uid_store, - with_modseq: unchanged_since.is_some(), - silence, - }) - .await?; - - Ok((summary, conflict_id_or_uid)) - } - - pub async fn idle_sync(&mut self) -> Result>> { - self.internal - .mailbox - .notify() - .await - .upgrade() - .ok_or(anyhow!("test"))? - .notified() - .await; - self.internal.mailbox.opportunistic_sync().await?; - self.update(UpdateParameters::default()).await - } - - pub async fn expunge( - &mut self, - maybe_seq_set: &Option, - ) -> Result>> { - // Get a recent view to apply our change - self.internal.sync().await?; - let state = self.internal.peek().await; - let idx = Index::new(&state)?; - - // Build a default sequence set for the default case - use imap_codec::imap_types::sequence::{SeqOrUid, Sequence}; - let seq = match maybe_seq_set { - Some(s) => s.clone(), - None => SequenceSet( - vec![Sequence::Range( - SeqOrUid::Value(NonZeroU32::MIN), - SeqOrUid::Asterisk, - )] - .try_into() - .unwrap(), - ), - }; - - let deleted_flag = Flag::Deleted.to_string(); - let msgs = idx - .fetch_on_uid(&seq) - .into_iter() - .filter(|midx| midx.flags.iter().any(|x| *x == deleted_flag)) - .map(|midx| midx.uuid); - - for msg in msgs { - self.internal.mailbox.delete(msg).await?; - } - - self.update(UpdateParameters::default()).await - } - - pub async fn copy( - &self, - sequence_set: &SequenceSet, - to: Arc, - is_uid_copy: &bool, - ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>)> { - let idx = self.index()?; - let mails = idx.fetch(sequence_set, *is_uid_copy)?; - - let mut new_uuids = vec![]; - for mi in mails.iter() { - new_uuids.push(to.copy_from(&self.internal.mailbox, mi.uuid).await?); - } - - let mut ret = vec![]; - let to_state = to.current_uid_index().await; - for (mi, new_uuid) in mails.iter().zip(new_uuids.iter()) { - let dest_uid = to_state - .table - .get(new_uuid) - .ok_or(anyhow!("copied mail not in destination mailbox"))? - .0; - ret.push((mi.uid, dest_uid)); - } - - Ok((to_state.uidvalidity, ret)) - } - - pub async fn r#move( - &mut self, - sequence_set: &SequenceSet, - to: Arc, - is_uid_copy: &bool, - ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>, Vec>)> { - let idx = self.index()?; - let mails = idx.fetch(sequence_set, *is_uid_copy)?; - - for mi in mails.iter() { - to.move_from(&self.internal.mailbox, mi.uuid).await?; - } - - let mut ret = vec![]; - let to_state = to.current_uid_index().await; - for mi in mails.iter() { - let dest_uid = to_state - .table - .get(&mi.uuid) - .ok_or(anyhow!("moved mail not in destination mailbox"))? - .0; - ret.push((mi.uid, dest_uid)); - } - - let update = self - .update(UpdateParameters { - with_uid: *is_uid_copy, - ..UpdateParameters::default() - }) - .await?; - - Ok((to_state.uidvalidity, ret, update)) - } - - /// Looks up state changes in the mailbox and produces a set of IMAP - /// responses describing the new state. - pub async fn fetch<'b>( - &self, - sequence_set: &SequenceSet, - ap: &AttributesProxy, - changed_since: Option, - is_uid_fetch: &bool, - ) -> Result>> { - // [1/6] Pre-compute data - // a. what are the uuids of the emails we want? - // b. do we need to fetch the full body? - //let ap = AttributesProxy::new(attributes, *is_uid_fetch); - let query_scope = match ap.need_body() { - true => QueryScope::Full, - _ => QueryScope::Partial, - }; - tracing::debug!("Query scope {:?}", query_scope); - let idx = self.index()?; - let mail_idx_list = idx.fetch_changed_since(sequence_set, changed_since, *is_uid_fetch)?; - - // [2/6] Fetch the emails - let uuids = mail_idx_list - .iter() - .map(|midx| midx.uuid) - .collect::>(); - - let query = self.internal.query(&uuids, query_scope); - //let query_result = self.internal.query(&uuids, query_scope).fetch().await?; - - let query_stream = query - .fetch() - .zip(futures::stream::iter(mail_idx_list)) - // [3/6] Derive an IMAP-specific view from the results, apply the filters - .map(|(maybe_qr, midx)| match maybe_qr { - Ok(qr) => Ok((MailView::new(&qr, midx)?.filter(&ap)?, midx)), - Err(e) => Err(e), - }) - // [4/6] Apply the IMAP transformation - .then(|maybe_ret| async move { - let ((body, seen), midx) = maybe_ret?; - - // [5/6] Register the \Seen flags - if matches!(seen, SeenFlag::MustAdd) { - let seen_flag = Flag::Seen.to_string(); - self.internal - .mailbox - .add_flags(midx.uuid, &[seen_flag]) - .await?; - } - - Ok::<_, anyhow::Error>(body) - }); - - // [6/6] Build the final result that will be sent to the client. - query_stream.try_collect().await - } - - /// A naive search implementation... - pub async fn search<'a>( - &self, - _charset: &Option>, - search_key: &SearchKey<'a>, - uid: bool, - ) -> Result<(Vec>, bool)> { - // 1. Compute the subset of sequence identifiers we need to fetch - // based on the search query - let crit = search::Criteria(search_key); - let (seq_set, seq_type) = crit.to_sequence_set(); - - // 2. Get the selection - let idx = self.index()?; - let selection = idx.fetch(&seq_set, seq_type.is_uid())?; - - // 3. Filter the selection based on the ID / UID / Flags - let (kept_idx, to_fetch) = crit.filter_on_idx(&selection); - - // 4.a Fetch additional info about the emails - let query_scope = crit.query_scope(); - let uuids = to_fetch.iter().map(|midx| midx.uuid).collect::>(); - let query = self.internal.query(&uuids, query_scope); - - // 4.b We don't want to keep all data in memory, so we do the computing in a stream - let query_stream = query - .fetch() - .zip(futures::stream::iter(&to_fetch)) - // 5.a Build a mailview with the body, might fail with an error - // 5.b If needed, filter the selection based on the body, but keep the errors - // 6. Drop the query+mailbox, keep only the mail index - // Here we release a lot of memory, this is the most important part ^^ - .filter_map(|(maybe_qr, midx)| { - let r = match maybe_qr { - Ok(qr) => match MailView::new(&qr, midx).map(|mv| crit.is_keep_on_query(&mv)) { - Ok(true) => Some(Ok(*midx)), - Ok(_) => None, - Err(e) => Some(Err(e)), - }, - Err(e) => Some(Err(e)), - }; - futures::future::ready(r) - }); - - // 7. Chain both streams (part resolved from index, part resolved from metadata+body) - let main_stream = futures::stream::iter(kept_idx) - .map(Ok) - .chain(query_stream) - .map_ok(|idx| match uid { - true => (idx.uid, idx.modseq), - _ => (idx.i, idx.modseq), - }); - - // 8. Do the actual computation - let internal_result: Vec<_> = main_stream.try_collect().await?; - let (selection, modseqs): (Vec<_>, Vec<_>) = internal_result.into_iter().unzip(); - - // 9. Aggregate the maximum modseq value - let maybe_modseq = match crit.is_modseq() { - true => modseqs.into_iter().max(), - _ => None, - }; - - // 10. Return the final result - Ok(( - vec![Body::Data(Data::Search(selection, maybe_modseq))], - maybe_modseq.is_some(), - )) - } - - // ---- - /// @FIXME index should be stored for longer than a single request - /// Instead they should be tied to the FrozenMailbox refresh - /// It's not trivial to refactor the code to do that, so we are doing - /// some useless computation for now... - fn index<'a>(&'a self) -> Result> { - Index::new(&self.internal.snapshot) - } - - /// Produce an OK [UIDVALIDITY _] message corresponding to `known_state` - fn uidvalidity_status(&self) -> Result> { - let uid_validity = Status::ok( - None, - Some(Code::UidValidity(self.uidvalidity())), - "UIDs valid", - ) - .map_err(Error::msg)?; - Ok(Body::Status(uid_validity)) - } - - pub(crate) fn uidvalidity(&self) -> ImapUidvalidity { - self.internal.snapshot.uidvalidity - } - - /// Produce an OK [UIDNEXT _] message corresponding to `known_state` - fn uidnext_status(&self) -> Result> { - let next_uid = Status::ok( - None, - Some(Code::UidNext(self.uidnext())), - "Predict next UID", - ) - .map_err(Error::msg)?; - Ok(Body::Status(next_uid)) - } - - pub(crate) fn uidnext(&self) -> ImapUid { - self.internal.snapshot.uidnext - } - - pub(crate) fn highestmodseq_status(&self) -> Result> { - Ok(Body::Status(Status::ok( - None, - Some(Code::Other(CodeOther::unvalidated( - format!("HIGHESTMODSEQ {}", self.highestmodseq()).into_bytes(), - ))), - "Highest", - )?)) - } - - pub(crate) fn highestmodseq(&self) -> ModSeq { - self.internal.snapshot.highestmodseq - } - - /// Produce an EXISTS message corresponding to the number of mails - /// in `known_state` - fn exists_status(&self) -> Result> { - Ok(Body::Data(Data::Exists(self.exists()?))) - } - - pub(crate) fn exists(&self) -> Result { - Ok(u32::try_from(self.internal.snapshot.idx_by_uid.len())?) - } - - /// Produce a RECENT message corresponding to the number of - /// recent mails in `known_state` - fn recent_status(&self) -> Result> { - Ok(Body::Data(Data::Recent(self.recent()?))) - } - - #[allow(dead_code)] - fn unseen_first_status(&self) -> Result>> { - Ok(self - .unseen_first()? - .map(|unseen_id| { - Status::ok(None, Some(Code::Unseen(unseen_id)), "First unseen.").map(Body::Status) - }) - .transpose()?) - } - - #[allow(dead_code)] - fn unseen_first(&self) -> Result> { - Ok(self - .internal - .snapshot - .table - .values() - .enumerate() - .find(|(_i, (_imap_uid, _modseq, flags))| !flags.contains(&"\\Seen".to_string())) - .map(|(i, _)| NonZeroU32::try_from(i as u32 + 1)) - .transpose()?) - } - - pub(crate) fn recent(&self) -> Result { - let recent = self - .internal - .snapshot - .idx_by_flag - .get(&"\\Recent".to_string()) - .map(|os| os.len()) - .unwrap_or(0); - Ok(u32::try_from(recent)?) - } - - /// Produce a FLAGS and a PERMANENTFLAGS message that indicates - /// the flags that are in `known_state` + default flags - fn flags_status(&self) -> Result>> { - let mut body = vec![]; - - // 1. Collecting all the possible flags in the mailbox - // 1.a Fetch them from our index - let mut known_flags: Vec = self - .internal - .snapshot - .idx_by_flag - .flags() - .filter_map(|f| match flags::from_str(f) { - Some(FlagFetch::Flag(fl)) => Some(fl), - _ => None, - }) - .collect(); - // 1.b Merge it with our default flags list - for f in DEFAULT_FLAGS.iter() { - if !known_flags.contains(f) { - known_flags.push(f.clone()); - } - } - // 1.c Create the IMAP message - body.push(Body::Data(Data::Flags(known_flags.clone()))); - - // 2. Returning flags that are persisted - // 2.a Always advertise our default flags - let mut permanent = DEFAULT_FLAGS - .iter() - .map(|f| FlagPerm::Flag(f.clone())) - .collect::>(); - // 2.b Say that we support any keyword flag - permanent.push(FlagPerm::Asterisk); - // 2.c Create the IMAP message - let permanent_flags = Status::ok( - None, - Some(Code::PermanentFlags(permanent)), - "Flags permitted", - ) - .map_err(Error::msg)?; - body.push(Body::Status(permanent_flags)); - - // Done! - Ok(body) - } - - pub(crate) fn unseen_count(&self) -> usize { - let total = self.internal.snapshot.table.len(); - let seen = self - .internal - .snapshot - .idx_by_flag - .get(&Flag::Seen.to_string()) - .map(|x| x.len()) - .unwrap_or(0); - total - seen - } -} - -#[cfg(test)] -mod tests { - use super::*; - use imap_codec::encode::Encoder; - use imap_codec::imap_types::core::Vec1; - use imap_codec::imap_types::fetch::Section; - use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName}; - use imap_codec::imap_types::response::Response; - use imap_codec::ResponseCodec; - use std::fs; - - use crate::cryptoblob; - use crate::imap::index::MailIndex; - use crate::imap::mail_view::MailView; - use crate::imap::mime_view; - use crate::mail::mailbox::MailMeta; - use crate::mail::query::QueryResult; - use crate::mail::unique_ident; - - #[test] - fn mailview_body_ext() -> Result<()> { - let ap = AttributesProxy::new( - &MacroOrMessageDataItemNames::MessageDataItemNames(vec![ - MessageDataItemName::BodyExt { - section: Some(Section::Header(None)), - partial: None, - peek: false, - }, - ]), - &[], - false, - ); - - let key = cryptoblob::gen_key(); - let meta = MailMeta { - internaldate: 0u64, - headers: vec![], - message_key: key, - rfc822_size: 8usize, - }; - - let index_entry = (NonZeroU32::MIN, NonZeroU64::MIN, vec![]); - let mail_in_idx = MailIndex { - i: NonZeroU32::MIN, - uid: index_entry.0, - modseq: index_entry.1, - uuid: unique_ident::gen_ident(), - flags: &index_entry.2, - }; - let rfc822 = b"Subject: hello\r\nFrom: a@a.a\r\nTo: b@b.b\r\nDate: Thu, 12 Oct 2023 08:45:28 +0000\r\n\r\nhello world"; - let qr = QueryResult::FullResult { - uuid: mail_in_idx.uuid.clone(), - metadata: meta, - content: rfc822.to_vec(), - }; - - let mv = MailView::new(&qr, &mail_in_idx)?; - let (res_body, _seen) = mv.filter(&ap)?; - - let fattr = match res_body { - Body::Data(Data::Fetch { - seq: _seq, - items: attr, - }) => Ok(attr), - _ => Err(anyhow!("Not a fetch body")), - }?; - - assert_eq!(fattr.as_ref().len(), 1); - - let (sec, _orig, _data) = match &fattr.as_ref()[0] { - MessageDataItem::BodyExt { - section, - origin, - data, - } => Ok((section, origin, data)), - _ => Err(anyhow!("not a body ext message attribute")), - }?; - - assert_eq!(sec.as_ref().unwrap(), &Section::Header(None)); - - Ok(()) - } - - /// Future automated test. We use lossy utf8 conversion + lowercase everything, - /// so this test might allow invalid results. But at least it allows us to quickly test a - /// large variety of emails. - /// Keep in mind that special cases must still be tested manually! - #[test] - fn fetch_body() -> Result<()> { - let prefixes = [ - /* *** MY OWN DATASET *** */ - "tests/emails/dxflrs/0001_simple", - "tests/emails/dxflrs/0002_mime", - "tests/emails/dxflrs/0003_mime-in-mime", - "tests/emails/dxflrs/0004_msg-in-msg", - // eml_codec do not support continuation for the moment - //"tests/emails/dxflrs/0005_mail-parser-readme", - "tests/emails/dxflrs/0006_single-mime", - "tests/emails/dxflrs/0007_raw_msg_in_rfc822", - /* *** (STRANGE) RFC *** */ - //"tests/emails/rfc/000", // must return text/enriched, we return text/plain - //"tests/emails/rfc/001", // does not recognize the multipart/external-body, breaks the - // whole parsing - //"tests/emails/rfc/002", // wrong date in email - - //"tests/emails/rfc/003", // dovecot fixes \r\r: the bytes number is wrong + text/enriched - - /* *** THIRD PARTY *** */ - //"tests/emails/thirdparty/000", // dovecot fixes \r\r: the bytes number is wrong - //"tests/emails/thirdparty/001", // same - "tests/emails/thirdparty/002", // same - - /* *** LEGACY *** */ - //"tests/emails/legacy/000", // same issue with \r\r - ]; - - for pref in prefixes.iter() { - println!("{}", pref); - let txt = fs::read(format!("{}.eml", pref))?; - let oracle = fs::read(format!("{}.dovecot.body", pref))?; - let message = eml_codec::parse_message(&txt).unwrap().1; - - let test_repr = Response::Data(Data::Fetch { - seq: NonZeroU32::new(1).unwrap(), - items: Vec1::from(MessageDataItem::Body(mime_view::bodystructure( - &message.child, - false, - )?)), - }); - let test_bytes = ResponseCodec::new().encode(&test_repr).dump(); - let test_str = String::from_utf8_lossy(&test_bytes).to_lowercase(); - - let oracle_str = - format!("* 1 FETCH {}\r\n", String::from_utf8_lossy(&oracle)).to_lowercase(); - - println!("aerogramme: {}\n\ndovecot: {}\n\n", test_str, oracle_str); - //println!("\n\n {} \n\n", String::from_utf8_lossy(&resp)); - assert_eq!(test_str, oracle_str); - } - - Ok(()) - } -} diff --git a/aero-proto/imap/mime_view.rs b/aero-proto/imap/mime_view.rs deleted file mode 100644 index 8bbbd2d..0000000 --- a/aero-proto/imap/mime_view.rs +++ /dev/null @@ -1,580 +0,0 @@ -use std::borrow::Cow; -use std::collections::HashSet; -use std::num::NonZeroU32; - -use anyhow::{anyhow, bail, Result}; - -use imap_codec::imap_types::body::{ - BasicFields, Body as FetchBody, BodyStructure, MultiPartExtensionData, SinglePartExtensionData, - SpecificFields, -}; -use imap_codec::imap_types::core::{AString, IString, NString, Vec1}; -use imap_codec::imap_types::fetch::{Part as FetchPart, Section as FetchSection}; - -use eml_codec::{ - header, mime, mime::r#type::Deductible, part::composite, part::discrete, part::AnyPart, -}; - -use crate::imap::imf_view::ImfView; - -pub enum BodySection<'a> { - Full(Cow<'a, [u8]>), - Slice { - body: Cow<'a, [u8]>, - origin_octet: u32, - }, -} - -/// Logic for BODY[
]<> -/// Works in 3 times: -/// 1. Find the section (RootMime::subset) -/// 2. Apply the extraction logic (SelectedMime::extract), like TEXT, HEADERS, etc. -/// 3. Keep only the given subset provided by partial -/// -/// Example of message sections: -/// -/// ``` -/// HEADER ([RFC-2822] header of the message) -/// TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED -/// 1 TEXT/PLAIN -/// 2 APPLICATION/OCTET-STREAM -/// 3 MESSAGE/RFC822 -/// 3.HEADER ([RFC-2822] header of the message) -/// 3.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED -/// 3.1 TEXT/PLAIN -/// 3.2 APPLICATION/OCTET-STREAM -/// 4 MULTIPART/MIXED -/// 4.1 IMAGE/GIF -/// 4.1.MIME ([MIME-IMB] header for the IMAGE/GIF) -/// 4.2 MESSAGE/RFC822 -/// 4.2.HEADER ([RFC-2822] header of the message) -/// 4.2.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED -/// 4.2.1 TEXT/PLAIN -/// 4.2.2 MULTIPART/ALTERNATIVE -/// 4.2.2.1 TEXT/PLAIN -/// 4.2.2.2 TEXT/RICHTEXT -/// ``` -pub fn body_ext<'a>( - part: &'a AnyPart<'a>, - section: &'a Option>, - partial: &'a Option<(u32, NonZeroU32)>, -) -> Result> { - let root_mime = NodeMime(part); - let (extractor, path) = SubsettedSection::from(section); - let selected_mime = root_mime.subset(path)?; - let extracted_full = selected_mime.extract(&extractor)?; - Ok(extracted_full.to_body_section(partial)) -} - -/// Logic for BODY and BODYSTRUCTURE -/// -/// ```raw -/// b fetch 29878:29879 (BODY) -/// * 29878 FETCH (BODY (("text" "plain" ("charset" "utf-8") NIL NIL "quoted-printable" 3264 82)("text" "html" ("charset" "utf-8") NIL NIL "quoted-printable" 31834 643) "alternative")) -/// * 29879 FETCH (BODY ("text" "html" ("charset" "us-ascii") NIL NIL "7bit" 4107 131)) -/// ^^^^^^^^^^^^^^^^^^^^^^ ^^^ ^^^ ^^^^^^ ^^^^ ^^^ -/// | | | | | | number of lines -/// | | | | | size -/// | | | | content transfer encoding -/// | | | description -/// | | id -/// | parameter list -/// b OK Fetch completed (0.001 + 0.000 secs). -/// ``` -pub fn bodystructure(part: &AnyPart, is_ext: bool) -> Result> { - NodeMime(part).structure(is_ext) -} - -/// NodeMime -/// -/// Used for recursive logic on MIME. -/// See SelectedMime for inspection. -struct NodeMime<'a>(&'a AnyPart<'a>); -impl<'a> NodeMime<'a> { - /// A MIME object is a tree of elements. - /// The path indicates which element must be picked. - /// This function returns the picked element as the new view - fn subset(self, path: Option<&'a FetchPart>) -> Result> { - match path { - None => Ok(SelectedMime(self.0)), - Some(v) => self.rec_subset(v.0.as_ref()), - } - } - - fn rec_subset(self, path: &'a [NonZeroU32]) -> Result { - if path.is_empty() { - Ok(SelectedMime(self.0)) - } else { - match self.0 { - AnyPart::Mult(x) => { - let next = Self(x.children - .get(path[0].get() as usize - 1) - .ok_or(anyhow!("Unable to resolve subpath {:?}, current multipart has only {} elements", path, x.children.len()))?); - next.rec_subset(&path[1..]) - }, - AnyPart::Msg(x) => { - let next = Self(x.child.as_ref()); - next.rec_subset(path) - }, - _ => bail!("You tried to access a subpart on an atomic part (text or binary). Unresolved subpath {:?}", path), - } - } - } - - fn structure(&self, is_ext: bool) -> Result> { - match self.0 { - AnyPart::Txt(x) => NodeTxt(self, x).structure(is_ext), - AnyPart::Bin(x) => NodeBin(self, x).structure(is_ext), - AnyPart::Mult(x) => NodeMult(self, x).structure(is_ext), - AnyPart::Msg(x) => NodeMsg(self, x).structure(is_ext), - } - } -} - -//---------------------------------------------------------- - -/// A FetchSection must be handled in 2 times: -/// - First we must extract the MIME part -/// - Then we must process it as desired -/// The given struct mixes both work, so -/// we separate this work here. -enum SubsettedSection<'a> { - Part, - Header, - HeaderFields(&'a Vec1>), - HeaderFieldsNot(&'a Vec1>), - Text, - Mime, -} -impl<'a> SubsettedSection<'a> { - fn from(section: &'a Option) -> (Self, Option<&'a FetchPart>) { - match section { - Some(FetchSection::Text(maybe_part)) => (Self::Text, maybe_part.as_ref()), - Some(FetchSection::Header(maybe_part)) => (Self::Header, maybe_part.as_ref()), - Some(FetchSection::HeaderFields(maybe_part, fields)) => { - (Self::HeaderFields(fields), maybe_part.as_ref()) - } - Some(FetchSection::HeaderFieldsNot(maybe_part, fields)) => { - (Self::HeaderFieldsNot(fields), maybe_part.as_ref()) - } - Some(FetchSection::Mime(part)) => (Self::Mime, Some(part)), - Some(FetchSection::Part(part)) => (Self::Part, Some(part)), - None => (Self::Part, None), - } - } -} - -/// Used for current MIME inspection -/// -/// See NodeMime for recursive logic -pub struct SelectedMime<'a>(pub &'a AnyPart<'a>); -impl<'a> SelectedMime<'a> { - pub fn header_value(&'a self, to_match_ext: &[u8]) -> Option<&'a [u8]> { - let to_match = to_match_ext.to_ascii_lowercase(); - - self.eml_mime() - .kv - .iter() - .filter_map(|field| match field { - header::Field::Good(header::Kv2(k, v)) => Some((k, v)), - _ => None, - }) - .find(|(k, _)| k.to_ascii_lowercase() == to_match) - .map(|(_, v)| v) - .copied() - } - - /// The subsetted fetch section basically tells us the - /// extraction logic to apply on our selected MIME. - /// This function acts as a router for these logic. - fn extract(&self, extractor: &SubsettedSection<'a>) -> Result> { - match extractor { - SubsettedSection::Text => self.text(), - SubsettedSection::Header => self.header(), - SubsettedSection::HeaderFields(fields) => self.header_fields(fields, false), - SubsettedSection::HeaderFieldsNot(fields) => self.header_fields(fields, true), - SubsettedSection::Part => self.part(), - SubsettedSection::Mime => self.mime(), - } - } - - fn mime(&self) -> Result> { - let bytes = match &self.0 { - AnyPart::Txt(p) => p.mime.fields.raw, - AnyPart::Bin(p) => p.mime.fields.raw, - AnyPart::Msg(p) => p.child.mime().raw, - AnyPart::Mult(p) => p.mime.fields.raw, - }; - Ok(ExtractedFull(bytes.into())) - } - - fn part(&self) -> Result> { - let bytes = match &self.0 { - AnyPart::Txt(p) => p.body, - AnyPart::Bin(p) => p.body, - AnyPart::Msg(p) => p.raw_part, - AnyPart::Mult(_) => bail!("Multipart part has no body"), - }; - Ok(ExtractedFull(bytes.to_vec().into())) - } - - fn eml_mime(&self) -> &eml_codec::mime::NaiveMIME<'_> { - match &self.0 { - AnyPart::Msg(msg) => msg.child.mime(), - other => other.mime(), - } - } - - /// The [...] HEADER.FIELDS, and HEADER.FIELDS.NOT part - /// specifiers refer to the [RFC-2822] header of the message or of - /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. - /// HEADER.FIELDS and HEADER.FIELDS.NOT are followed by a list of - /// field-name (as defined in [RFC-2822]) names, and return a - /// subset of the header. The subset returned by HEADER.FIELDS - /// contains only those header fields with a field-name that - /// matches one of the names in the list; similarly, the subset - /// returned by HEADER.FIELDS.NOT contains only the header fields - /// with a non-matching field-name. The field-matching is - /// case-insensitive but otherwise exact. - fn header_fields( - &self, - fields: &'a Vec1>, - invert: bool, - ) -> Result> { - // Build a lowercase ascii hashset with the fields to fetch - let index = fields - .as_ref() - .iter() - .map(|x| { - match x { - AString::Atom(a) => a.inner().as_bytes(), - AString::String(IString::Literal(l)) => l.as_ref(), - AString::String(IString::Quoted(q)) => q.inner().as_bytes(), - } - .to_ascii_lowercase() - }) - .collect::>(); - - // Extract MIME headers - let mime = self.eml_mime(); - - // Filter our MIME headers based on the field index - // 1. Keep only the correctly formatted headers - // 2. Keep only based on the index presence or absence - // 3. Reduce as a byte vector - let buffer = mime - .kv - .iter() - .filter_map(|field| match field { - header::Field::Good(header::Kv2(k, v)) => Some((k, v)), - _ => None, - }) - .filter(|(k, _)| index.contains(&k.to_ascii_lowercase()) ^ invert) - .fold(vec![], |mut acc, (k, v)| { - acc.extend(*k); - acc.extend(b": "); - acc.extend(*v); - acc.extend(b"\r\n"); - acc - }); - - Ok(ExtractedFull(buffer.into())) - } - - /// The HEADER [...] part specifiers refer to the [RFC-2822] header of the message or of - /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. - /// ```raw - /// HEADER ([RFC-2822] header of the message) - /// ``` - fn header(&self) -> Result> { - let msg = self - .0 - .as_message() - .ok_or(anyhow!("Selected part must be a message/rfc822"))?; - Ok(ExtractedFull(msg.raw_headers.into())) - } - - /// The TEXT part specifier refers to the text body of the message, omitting the [RFC-2822] header. - fn text(&self) -> Result> { - let msg = self - .0 - .as_message() - .ok_or(anyhow!("Selected part must be a message/rfc822"))?; - Ok(ExtractedFull(msg.raw_body.into())) - } - - // ------------ - - /// Basic field of a MIME part that is - /// common to all parts - fn basic_fields(&self) -> Result> { - let sz = match self.0 { - AnyPart::Txt(x) => x.body.len(), - AnyPart::Bin(x) => x.body.len(), - AnyPart::Msg(x) => x.raw_part.len(), - AnyPart::Mult(_) => 0, - }; - let m = self.0.mime(); - let parameter_list = m - .ctype - .as_ref() - .map(|x| { - x.params - .iter() - .map(|p| { - ( - IString::try_from(String::from_utf8_lossy(p.name).to_string()), - IString::try_from(p.value.to_string()), - ) - }) - .filter(|(k, v)| k.is_ok() && v.is_ok()) - .map(|(k, v)| (k.unwrap(), v.unwrap())) - .collect() - }) - .unwrap_or(vec![]); - - Ok(BasicFields { - parameter_list, - id: NString( - m.id.as_ref() - .and_then(|ci| IString::try_from(ci.to_string()).ok()), - ), - description: NString( - m.description - .as_ref() - .and_then(|cd| IString::try_from(cd.to_string()).ok()), - ), - content_transfer_encoding: match m.transfer_encoding { - mime::mechanism::Mechanism::_8Bit => unchecked_istring("8bit"), - mime::mechanism::Mechanism::Binary => unchecked_istring("binary"), - mime::mechanism::Mechanism::QuotedPrintable => { - unchecked_istring("quoted-printable") - } - mime::mechanism::Mechanism::Base64 => unchecked_istring("base64"), - _ => unchecked_istring("7bit"), - }, - // @FIXME we can't compute the size of the message currently... - size: u32::try_from(sz)?, - }) - } -} - -// --------------------------- -struct NodeMsg<'a>(&'a NodeMime<'a>, &'a composite::Message<'a>); -impl<'a> NodeMsg<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let basic = SelectedMime(self.0 .0).basic_fields()?; - - Ok(BodyStructure::Single { - body: FetchBody { - basic, - specific: SpecificFields::Message { - envelope: Box::new(ImfView(&self.1.imf).message_envelope()), - body_structure: Box::new(NodeMime(&self.1.child).structure(is_ext)?), - number_of_lines: nol(self.1.raw_part), - }, - }, - extension_data: match is_ext { - true => Some(SinglePartExtensionData { - md5: NString(None), - tail: None, - }), - _ => None, - }, - }) - } -} -struct NodeMult<'a>(&'a NodeMime<'a>, &'a composite::Multipart<'a>); -impl<'a> NodeMult<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let itype = &self.1.mime.interpreted_type; - let subtype = IString::try_from(itype.subtype.to_string()) - .unwrap_or(unchecked_istring("alternative")); - - let inner_bodies = self - .1 - .children - .iter() - .filter_map(|inner| NodeMime(&inner).structure(is_ext).ok()) - .collect::>(); - - Vec1::validate(&inner_bodies)?; - let bodies = Vec1::unvalidated(inner_bodies); - - Ok(BodyStructure::Multi { - bodies, - subtype, - extension_data: match is_ext { - true => Some(MultiPartExtensionData { - parameter_list: vec![( - IString::try_from("boundary").unwrap(), - IString::try_from(self.1.mime.interpreted_type.boundary.to_string())?, - )], - tail: None, - }), - _ => None, - }, - }) - } -} -struct NodeTxt<'a>(&'a NodeMime<'a>, &'a discrete::Text<'a>); -impl<'a> NodeTxt<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let mut basic = SelectedMime(self.0 .0).basic_fields()?; - - // Get the interpreted content type, set it - let itype = match &self.1.mime.interpreted_type { - Deductible::Inferred(v) | Deductible::Explicit(v) => v, - }; - let subtype = - IString::try_from(itype.subtype.to_string()).unwrap_or(unchecked_istring("plain")); - - // Add charset to the list of parameters if we know it has been inferred as it will be - // missing from the parsed content. - if let Deductible::Inferred(charset) = &itype.charset { - basic.parameter_list.push(( - unchecked_istring("charset"), - IString::try_from(charset.to_string()).unwrap_or(unchecked_istring("us-ascii")), - )); - } - - Ok(BodyStructure::Single { - body: FetchBody { - basic, - specific: SpecificFields::Text { - subtype, - number_of_lines: nol(self.1.body), - }, - }, - extension_data: match is_ext { - true => Some(SinglePartExtensionData { - md5: NString(None), - tail: None, - }), - _ => None, - }, - }) - } -} - -struct NodeBin<'a>(&'a NodeMime<'a>, &'a discrete::Binary<'a>); -impl<'a> NodeBin<'a> { - fn structure(&self, is_ext: bool) -> Result> { - let basic = SelectedMime(self.0 .0).basic_fields()?; - - let default = mime::r#type::NaiveType { - main: &b"application"[..], - sub: &b"octet-stream"[..], - params: vec![], - }; - let ct = self.1.mime.fields.ctype.as_ref().unwrap_or(&default); - - let r#type = IString::try_from(String::from_utf8_lossy(ct.main).to_string()).or(Err( - anyhow!("Unable to build IString from given Content-Type type given"), - ))?; - - let subtype = IString::try_from(String::from_utf8_lossy(ct.sub).to_string()).or(Err( - anyhow!("Unable to build IString from given Content-Type subtype given"), - ))?; - - Ok(BodyStructure::Single { - body: FetchBody { - basic, - specific: SpecificFields::Basic { r#type, subtype }, - }, - extension_data: match is_ext { - true => Some(SinglePartExtensionData { - md5: NString(None), - tail: None, - }), - _ => None, - }, - }) - } -} - -// --------------------------- - -struct ExtractedFull<'a>(Cow<'a, [u8]>); -impl<'a> ExtractedFull<'a> { - /// It is possible to fetch a substring of the designated text. - /// This is done by appending an open angle bracket ("<"), the - /// octet position of the first desired octet, a period, the - /// maximum number of octets desired, and a close angle bracket - /// (">") to the part specifier. If the starting octet is beyond - /// the end of the text, an empty string is returned. - /// - /// Any partial fetch that attempts to read beyond the end of the - /// text is truncated as appropriate. A partial fetch that starts - /// at octet 0 is returned as a partial fetch, even if this - /// truncation happened. - /// - /// Note: This means that BODY[]<0.2048> of a 1500-octet message - /// will return BODY[]<0> with a literal of size 1500, not - /// BODY[]. - /// - /// Note: A substring fetch of a HEADER.FIELDS or - /// HEADER.FIELDS.NOT part specifier is calculated after - /// subsetting the header. - fn to_body_section(self, partial: &'_ Option<(u32, NonZeroU32)>) -> BodySection<'a> { - match partial { - Some((begin, len)) => self.partialize(*begin, *len), - None => BodySection::Full(self.0), - } - } - - fn partialize(self, begin: u32, len: NonZeroU32) -> BodySection<'a> { - // Asked range is starting after the end of the content, - // returning an empty buffer - if begin as usize > self.0.len() { - return BodySection::Slice { - body: Cow::Borrowed(&[][..]), - origin_octet: begin, - }; - } - - // Asked range is ending after the end of the content, - // slice only the beginning of the buffer - if (begin + len.get()) as usize >= self.0.len() { - return BodySection::Slice { - body: match self.0 { - Cow::Borrowed(body) => Cow::Borrowed(&body[begin as usize..]), - Cow::Owned(body) => Cow::Owned(body[begin as usize..].to_vec()), - }, - origin_octet: begin, - }; - } - - // Range is included inside the considered content, - // this is the "happy case" - BodySection::Slice { - body: match self.0 { - Cow::Borrowed(body) => { - Cow::Borrowed(&body[begin as usize..(begin + len.get()) as usize]) - } - Cow::Owned(body) => { - Cow::Owned(body[begin as usize..(begin + len.get()) as usize].to_vec()) - } - }, - origin_octet: begin, - } - } -} - -/// ---- LEGACY - -/// s is set to static to ensure that only compile time values -/// checked by developpers are passed. -fn unchecked_istring(s: &'static str) -> IString { - IString::try_from(s).expect("this value is expected to be a valid imap-codec::IString") -} - -// Number Of Lines -fn nol(input: &[u8]) -> u32 { - input - .iter() - .filter(|x| **x == b'\n') - .count() - .try_into() - .unwrap_or(0) -} diff --git a/aero-proto/imap/mod.rs b/aero-proto/imap/mod.rs deleted file mode 100644 index 02ab9ce..0000000 --- a/aero-proto/imap/mod.rs +++ /dev/null @@ -1,421 +0,0 @@ -mod attributes; -mod capability; -mod command; -mod flags; -mod flow; -mod imf_view; -mod index; -mod mail_view; -mod mailbox_view; -mod mime_view; -mod request; -mod response; -mod search; -mod session; - -use std::net::SocketAddr; - -use anyhow::{anyhow, bail, Context, Result}; -use futures::stream::{FuturesUnordered, StreamExt}; - -use tokio::net::TcpListener; -use tokio::sync::mpsc; -use tokio::sync::watch; - -use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status}; -use imap_codec::imap_types::{core::Text, response::Greeting}; -use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions}; -use imap_flow::stream::AnyStream; -use rustls_pemfile::{certs, private_key}; -use tokio_rustls::TlsAcceptor; - -use crate::config::{ImapConfig, ImapUnsecureConfig}; -use crate::imap::capability::ServerCapability; -use crate::imap::request::Request; -use crate::imap::response::{Body, ResponseOrIdle}; -use crate::imap::session::Instance; -use crate::login::ArcLoginProvider; - -/// Server is a thin wrapper to register our Services in BàL -pub struct Server { - bind_addr: SocketAddr, - login_provider: ArcLoginProvider, - capabilities: ServerCapability, - tls: Option, -} - -#[derive(Clone)] -struct ClientContext { - addr: SocketAddr, - login_provider: ArcLoginProvider, - must_exit: watch::Receiver, - server_capabilities: ServerCapability, -} - -pub fn new(config: ImapConfig, login: ArcLoginProvider) -> Result { - let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( - config.certs, - )?)) - .collect::, _>>()?; - let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( - config.key, - )?))? - .unwrap(); - - let tls_config = rustls::ServerConfig::builder() - .with_no_client_auth() - .with_single_cert(loaded_certs, loaded_key)?; - let acceptor = TlsAcceptor::from(Arc::new(tls_config)); - - Ok(Server { - bind_addr: config.bind_addr, - login_provider: login, - capabilities: ServerCapability::default(), - tls: Some(acceptor), - }) -} - -pub fn new_unsecure(config: ImapUnsecureConfig, login: ArcLoginProvider) -> Server { - Server { - bind_addr: config.bind_addr, - login_provider: login, - capabilities: ServerCapability::default(), - tls: None, - } -} - -impl Server { - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!("IMAP server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - tracing::info!("IMAP: accepted connection from {}", remote_addr); - let stream = match self.tls.clone() { - Some(acceptor) => { - let stream = match acceptor.accept(socket).await { - Ok(v) => v, - Err(e) => { - tracing::error!(err=?e, "TLS negociation failed"); - continue; - } - }; - AnyStream::new(stream) - } - None => AnyStream::new(socket), - }; - - let client = ClientContext { - addr: remote_addr.clone(), - login_provider: self.login_provider.clone(), - must_exit: must_exit.clone(), - server_capabilities: self.capabilities.clone(), - }; - let conn = tokio::spawn(NetLoop::handler(client, stream)); - connections.push(conn); - } - drop(tcp); - - tracing::info!("IMAP server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -use std::sync::Arc; -use tokio::sync::mpsc::*; -use tokio::sync::Notify; -use tokio_util::bytes::BytesMut; - -const PIPELINABLE_COMMANDS: usize = 64; - -// @FIXME a full refactor of this part of the code will be needed sooner or later -struct NetLoop { - ctx: ClientContext, - server: ServerFlow, - cmd_tx: Sender, - resp_rx: UnboundedReceiver, -} - -impl NetLoop { - async fn handler(ctx: ClientContext, sock: AnyStream) { - let addr = ctx.addr.clone(); - - let mut nl = match Self::new(ctx, sock).await { - Ok(nl) => { - tracing::debug!(addr=?addr, "netloop successfully initialized"); - nl - } - Err(e) => { - tracing::error!(addr=?addr, err=?e, "netloop can not be initialized, closing session"); - return; - } - }; - - match nl.core().await { - Ok(()) => { - tracing::debug!("closing successful netloop core for {:?}", addr); - } - Err(e) => { - tracing::error!("closing errored netloop core for {:?}: {}", addr, e); - } - } - } - - async fn new(ctx: ClientContext, sock: AnyStream) -> Result { - let mut opts = ServerFlowOptions::default(); - opts.crlf_relaxed = false; - opts.literal_accept_text = Text::unvalidated("OK"); - opts.literal_reject_text = Text::unvalidated("Literal rejected"); - - // Send greeting - let (server, _) = ServerFlow::send_greeting( - sock, - opts, - Greeting::ok( - Some(Code::Capability(ctx.server_capabilities.to_vec())), - "Aerogramme", - ) - .unwrap(), - ) - .await?; - - // Start a mailbox session in background - let (cmd_tx, cmd_rx) = mpsc::channel::(PIPELINABLE_COMMANDS); - let (resp_tx, resp_rx) = mpsc::unbounded_channel::(); - tokio::spawn(Self::session(ctx.clone(), cmd_rx, resp_tx)); - - // Return the object - Ok(NetLoop { - ctx, - server, - cmd_tx, - resp_rx, - }) - } - - /// Coms with the background session - async fn session( - ctx: ClientContext, - mut cmd_rx: Receiver, - resp_tx: UnboundedSender, - ) -> () { - let mut session = Instance::new(ctx.login_provider, ctx.server_capabilities); - loop { - let cmd = match cmd_rx.recv().await { - None => break, - Some(cmd_recv) => cmd_recv, - }; - - tracing::debug!(cmd=?cmd, sock=%ctx.addr, "command"); - let maybe_response = session.request(cmd).await; - tracing::debug!(cmd=?maybe_response, sock=%ctx.addr, "response"); - - match resp_tx.send(maybe_response) { - Err(_) => break, - Ok(_) => (), - }; - } - tracing::info!("runner is quitting"); - } - - async fn core(&mut self) -> Result<()> { - let mut maybe_idle: Option> = None; - loop { - tokio::select! { - // Managing imap_flow stuff - srv_evt = self.server.progress() => match srv_evt? { - ServerFlowEvent::ResponseSent { handle: _handle, response } => { - match response { - Response::Status(Status::Bye(_)) => return Ok(()), - _ => tracing::trace!("sent to {} content {:?}", self.ctx.addr, response), - } - }, - ServerFlowEvent::CommandReceived { command } => { - match self.cmd_tx.try_send(Request::ImapCommand(command)) { - Ok(_) => (), - Err(mpsc::error::TrySendError::Full(_)) => { - self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); - tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); - } - _ => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - } - } - }, - ServerFlowEvent::IdleCommandReceived { tag } => { - match self.cmd_tx.try_send(Request::IdleStart(tag)) { - Ok(_) => (), - Err(mpsc::error::TrySendError::Full(_)) => { - self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); - tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); - } - _ => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - } - } - } - ServerFlowEvent::IdleDoneReceived => { - tracing::trace!("client sent DONE and want to stop IDLE"); - maybe_idle.ok_or(anyhow!("Received IDLE done but not idling currently"))?.notify_one(); - maybe_idle = None; - } - flow => { - self.server.enqueue_status(Status::bye(None, "Unsupported server flow event").unwrap()); - tracing::error!("session task exited for {:?} due to unsupported flow {:?}", self.ctx.addr, flow); - } - }, - - // Managing response generated by Aerogramme - maybe_msg = self.resp_rx.recv() => match maybe_msg { - Some(ResponseOrIdle::Response(response)) => { - tracing::trace!("Interactive, server has a response for the client"); - for body_elem in response.body.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.server.enqueue_status(response.completion); - }, - Some(ResponseOrIdle::IdleAccept(stop)) => { - tracing::trace!("Interactive, server agreed to switch in idle mode"); - let cr = CommandContinuationRequest::basic(None, "Idling")?; - self.server.idle_accept(cr).or(Err(anyhow!("refused continuation for idle accept")))?; - self.cmd_tx.try_send(Request::IdlePoll)?; - if maybe_idle.is_some() { - bail!("Can't start IDLE if already idling"); - } - maybe_idle = Some(stop); - }, - Some(ResponseOrIdle::IdleEvent(elems)) => { - tracing::trace!("server imap session has some change to communicate to the client"); - for body_elem in elems.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.cmd_tx.try_send(Request::IdlePoll)?; - }, - Some(ResponseOrIdle::IdleReject(response)) => { - tracing::trace!("inform client that session rejected idle"); - self.server - .idle_reject(response.completion) - .or(Err(anyhow!("wrong reject command")))?; - }, - None => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - }, - Some(_) => unreachable!(), - - }, - - // When receiving a CTRL+C - _ = self.ctx.must_exit.changed() => { - tracing::trace!("Interactive, CTRL+C, exiting"); - self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); - }, - }; - } - } - - /* - async fn idle_mode(&mut self, mut buff: BytesMut, stop: Arc) -> Result { - // Flush send - loop { - tracing::trace!("flush server send"); - match self.server.progress_send().await? { - Some(..) => continue, - None => break, - } - } - - tokio::select! { - // Receiving IDLE event from background - maybe_msg = self.resp_rx.recv() => match maybe_msg { - // Session decided idle is terminated - Some(ResponseOrIdle::Response(response)) => { - tracing::trace!("server imap session said idle is done, sending response done, switching to interactive"); - for body_elem in response.body.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.server.enqueue_status(response.completion); - return Ok(LoopMode::Interactive) - }, - // Session has some information for user - Some(ResponseOrIdle::IdleEvent(elems)) => { - tracing::trace!("server imap session has some change to communicate to the client"); - for body_elem in elems.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.cmd_tx.try_send(Request::Idle)?; - return Ok(LoopMode::Idle(buff, stop)) - }, - - // Session crashed - None => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - return Ok(LoopMode::Interactive) - }, - - // Session can't start idling while already idling, it's a logic error! - Some(ResponseOrIdle::StartIdle(..)) => bail!("can't start idling while already idling!"), - }, - - // User is trying to interact with us - read_client_result = self.server.stream.read(&mut buff) => { - let _bytes_read = read_client_result?; - use imap_codec::decode::Decoder; - let codec = imap_codec::IdleDoneCodec::new(); - tracing::trace!("client sent some data for the server IMAP session"); - match codec.decode(&buff) { - Ok(([], imap_codec::imap_types::extensions::idle::IdleDone)) => { - // Session will be informed that it must stop idle - // It will generate the "done" message and change the loop mode - tracing::trace!("client sent DONE and want to stop IDLE"); - stop.notify_one() - }, - Err(_) => { - tracing::trace!("Unable to decode DONE, maybe not enough data were sent?"); - }, - _ => bail!("Client sent data after terminating the continuation without waiting for the server. This is an unsupported behavior and bug in Aerogramme, quitting."), - }; - - return Ok(LoopMode::Idle(buff, stop)) - }, - - // When receiving a CTRL+C - _ = self.ctx.must_exit.changed() => { - tracing::trace!("CTRL+C sent, aborting IDLE for this session"); - self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); - return Ok(LoopMode::Interactive) - }, - }; - }*/ -} diff --git a/aero-proto/imap/request.rs b/aero-proto/imap/request.rs deleted file mode 100644 index cff18a3..0000000 --- a/aero-proto/imap/request.rs +++ /dev/null @@ -1,9 +0,0 @@ -use imap_codec::imap_types::command::Command; -use imap_codec::imap_types::core::Tag; - -#[derive(Debug)] -pub enum Request { - ImapCommand(Command<'static>), - IdleStart(Tag<'static>), - IdlePoll, -} diff --git a/aero-proto/imap/response.rs b/aero-proto/imap/response.rs deleted file mode 100644 index b6a0e98..0000000 --- a/aero-proto/imap/response.rs +++ /dev/null @@ -1,124 +0,0 @@ -use anyhow::Result; -use imap_codec::imap_types::command::Command; -use imap_codec::imap_types::core::Tag; -use imap_codec::imap_types::response::{Code, Data, Status}; -use std::sync::Arc; -use tokio::sync::Notify; - -#[derive(Debug)] -pub enum Body<'a> { - Data(Data<'a>), - Status(Status<'a>), -} - -pub struct ResponseBuilder<'a> { - tag: Option>, - code: Option>, - text: String, - body: Vec>, -} - -impl<'a> ResponseBuilder<'a> { - pub fn to_req(mut self, cmd: &Command<'a>) -> Self { - self.tag = Some(cmd.tag.clone()); - self - } - pub fn tag(mut self, tag: Tag<'a>) -> Self { - self.tag = Some(tag); - self - } - - pub fn message(mut self, txt: impl Into) -> Self { - self.text = txt.into(); - self - } - - pub fn code(mut self, code: Code<'a>) -> Self { - self.code = Some(code); - self - } - - pub fn data(mut self, data: Data<'a>) -> Self { - self.body.push(Body::Data(data)); - self - } - - pub fn many_data(mut self, data: Vec>) -> Self { - for d in data.into_iter() { - self = self.data(d); - } - self - } - - #[allow(dead_code)] - pub fn info(mut self, status: Status<'a>) -> Self { - self.body.push(Body::Status(status)); - self - } - - #[allow(dead_code)] - pub fn many_info(mut self, status: Vec>) -> Self { - for d in status.into_iter() { - self = self.info(d); - } - self - } - - pub fn set_body(mut self, body: Vec>) -> Self { - self.body = body; - self - } - - pub fn ok(self) -> Result> { - Ok(Response { - completion: Status::ok(self.tag, self.code, self.text)?, - body: self.body, - }) - } - - pub fn no(self) -> Result> { - Ok(Response { - completion: Status::no(self.tag, self.code, self.text)?, - body: self.body, - }) - } - - pub fn bad(self) -> Result> { - Ok(Response { - completion: Status::bad(self.tag, self.code, self.text)?, - body: self.body, - }) - } -} - -#[derive(Debug)] -pub struct Response<'a> { - pub body: Vec>, - pub completion: Status<'a>, -} - -impl<'a> Response<'a> { - pub fn build() -> ResponseBuilder<'a> { - ResponseBuilder { - tag: None, - code: None, - text: "".to_string(), - body: vec![], - } - } - - pub fn bye() -> Result> { - Ok(Response { - completion: Status::bye(None, "bye")?, - body: vec![], - }) - } -} - -#[derive(Debug)] -pub enum ResponseOrIdle { - Response(Response<'static>), - IdleAccept(Arc), - IdleReject(Response<'static>), - IdleEvent(Vec>), -} diff --git a/aero-proto/imap/search.rs b/aero-proto/imap/search.rs deleted file mode 100644 index 37a7e9e..0000000 --- a/aero-proto/imap/search.rs +++ /dev/null @@ -1,477 +0,0 @@ -use std::num::{NonZeroU32, NonZeroU64}; - -use imap_codec::imap_types::core::Vec1; -use imap_codec::imap_types::search::{MetadataItemSearch, SearchKey}; -use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; - -use crate::imap::index::MailIndex; -use crate::imap::mail_view::MailView; -use crate::mail::query::QueryScope; - -pub enum SeqType { - Undefined, - NonUid, - Uid, -} -impl SeqType { - pub fn is_uid(&self) -> bool { - matches!(self, Self::Uid) - } -} - -pub struct Criteria<'a>(pub &'a SearchKey<'a>); -impl<'a> Criteria<'a> { - /// Returns a set of email identifiers that is greater or equal - /// to the set of emails to return - pub fn to_sequence_set(&self) -> (SequenceSet, SeqType) { - match self.0 { - SearchKey::All => (sequence_set_all(), SeqType::Undefined), - SearchKey::SequenceSet(seq_set) => (seq_set.clone(), SeqType::NonUid), - SearchKey::Uid(seq_set) => (seq_set.clone(), SeqType::Uid), - SearchKey::Not(_inner) => { - tracing::debug!( - "using NOT in a search request is slow: it selects all identifiers" - ); - (sequence_set_all(), SeqType::Undefined) - } - SearchKey::Or(left, right) => { - tracing::debug!("using OR in a search request is slow: no deduplication is done"); - let (base, base_seqtype) = Self(&left).to_sequence_set(); - let (ext, ext_seqtype) = Self(&right).to_sequence_set(); - - // Check if we have a UID/ID conflict in fetching: now we don't know how to handle them - match (base_seqtype, ext_seqtype) { - (SeqType::Uid, SeqType::NonUid) | (SeqType::NonUid, SeqType::Uid) => { - (sequence_set_all(), SeqType::Undefined) - } - (SeqType::Undefined, x) | (x, _) => { - let mut new_vec = base.0.into_inner(); - new_vec.extend_from_slice(ext.0.as_ref()); - let seq = SequenceSet( - Vec1::try_from(new_vec) - .expect("merging non empty vec lead to non empty vec"), - ); - (seq, x) - } - } - } - SearchKey::And(search_list) => { - tracing::debug!( - "using AND in a search request is slow: no intersection is performed" - ); - // As we perform no intersection, we don't care if we mix uid or id. - // We only keep the smallest range, being it ID or UID, depending of - // which one has the less items. This is an approximation as UID ranges - // can have holes while ID ones can't. - search_list - .as_ref() - .iter() - .map(|crit| Self(&crit).to_sequence_set()) - .min_by(|(x, _), (y, _)| { - let x_size = approx_sequence_set_size(x); - let y_size = approx_sequence_set_size(y); - x_size.cmp(&y_size) - }) - .unwrap_or((sequence_set_all(), SeqType::Undefined)) - } - _ => (sequence_set_all(), SeqType::Undefined), - } - } - - /// Not really clever as we can have cases where we filter out - /// the email before needing to inspect its meta. - /// But for now we are seeking the most basic/stupid algorithm. - pub fn query_scope(&self) -> QueryScope { - use SearchKey::*; - match self.0 { - // Combinators - And(and_list) => and_list - .as_ref() - .iter() - .fold(QueryScope::Index, |prev, sk| { - prev.union(&Criteria(sk).query_scope()) - }), - Not(inner) => Criteria(inner).query_scope(), - Or(left, right) => Criteria(left) - .query_scope() - .union(&Criteria(right).query_scope()), - All => QueryScope::Index, - - // IMF Headers - Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) - | Subject(_) | To(_) => QueryScope::Partial, - // Internal Date is also stored in MailMeta - Before(_) | On(_) | Since(_) => QueryScope::Partial, - // Message size is also stored in MailMeta - Larger(_) | Smaller(_) => QueryScope::Partial, - // Text and Body require that we fetch the full content! - Text(_) | Body(_) => QueryScope::Full, - - _ => QueryScope::Index, - } - } - - pub fn is_modseq(&self) -> bool { - use SearchKey::*; - match self.0 { - And(and_list) => and_list - .as_ref() - .iter() - .any(|child| Criteria(child).is_modseq()), - Or(left, right) => Criteria(left).is_modseq() || Criteria(right).is_modseq(), - Not(child) => Criteria(child).is_modseq(), - ModSeq { .. } => true, - _ => false, - } - } - - /// Returns emails that we now for sure we want to keep - /// but also a second list of emails we need to investigate further by - /// fetching some remote data - pub fn filter_on_idx<'b>( - &self, - midx_list: &[&'b MailIndex<'b>], - ) -> (Vec<&'b MailIndex<'b>>, Vec<&'b MailIndex<'b>>) { - let (p1, p2): (Vec<_>, Vec<_>) = midx_list - .iter() - .map(|x| (x, self.is_keep_on_idx(x))) - .filter(|(_midx, decision)| decision.is_keep()) - .map(|(midx, decision)| (*midx, decision)) - .partition(|(_midx, decision)| matches!(decision, PartialDecision::Keep)); - - let to_keep = p1.into_iter().map(|(v, _)| v).collect(); - let to_fetch = p2.into_iter().map(|(v, _)| v).collect(); - (to_keep, to_fetch) - } - - // ---- - - /// Here we are doing a partial filtering: we do not have access - /// to the headers or to the body, so every time we encounter a rule - /// based on them, we need to keep it. - /// - /// @TODO Could be optimized on a per-email basis by also returning the QueryScope - /// when more information is needed! - fn is_keep_on_idx(&self, midx: &MailIndex) -> PartialDecision { - use SearchKey::*; - match self.0 { - // Combinator logic - And(expr_list) => expr_list - .as_ref() - .iter() - .fold(PartialDecision::Keep, |acc, cur| { - acc.and(&Criteria(cur).is_keep_on_idx(midx)) - }), - Or(left, right) => { - let left_decision = Criteria(left).is_keep_on_idx(midx); - let right_decision = Criteria(right).is_keep_on_idx(midx); - left_decision.or(&right_decision) - } - Not(expr) => Criteria(expr).is_keep_on_idx(midx).not(), - All => PartialDecision::Keep, - - // Sequence logic - maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, midx).into(), - maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, midx).into(), - ModSeq { - metadata_item, - modseq, - } => is_keep_modseq(metadata_item, modseq, midx).into(), - - // All the stuff we can't evaluate yet - Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) - | Subject(_) | To(_) | Before(_) | On(_) | Since(_) | Larger(_) | Smaller(_) - | Text(_) | Body(_) => PartialDecision::Postpone, - - unknown => { - tracing::error!("Unknown filter {:?}", unknown); - PartialDecision::Discard - } - } - } - - /// @TODO we re-eveluate twice the same logic. The correct way would be, on each pass, - /// to simplify the searck query, by removing the elements that were already checked. - /// For example if we have AND(OR(seqid(X), body(Y)), body(X)), we can't keep for sure - /// the email, as body(x) might be false. So we need to check it. But as seqid(x) is true, - /// we could simplify the request to just body(x) and truncate the first OR. Today, we are - /// not doing that, and thus we reevaluate everything. - pub fn is_keep_on_query(&self, mail_view: &MailView) -> bool { - use SearchKey::*; - match self.0 { - // Combinator logic - And(expr_list) => expr_list - .as_ref() - .iter() - .all(|cur| Criteria(cur).is_keep_on_query(mail_view)), - Or(left, right) => { - Criteria(left).is_keep_on_query(mail_view) - || Criteria(right).is_keep_on_query(mail_view) - } - Not(expr) => !Criteria(expr).is_keep_on_query(mail_view), - All => true, - - //@FIXME Reevaluating our previous logic... - maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, &mail_view.in_idx), - maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, &mail_view.in_idx), - ModSeq { - metadata_item, - modseq, - } => is_keep_modseq(metadata_item, modseq, &mail_view.in_idx).into(), - - // Filter on mail meta - Before(search_naive) => match mail_view.stored_naive_date() { - Ok(msg_naive) => &msg_naive < search_naive.as_ref(), - _ => false, - }, - On(search_naive) => match mail_view.stored_naive_date() { - Ok(msg_naive) => &msg_naive == search_naive.as_ref(), - _ => false, - }, - Since(search_naive) => match mail_view.stored_naive_date() { - Ok(msg_naive) => &msg_naive > search_naive.as_ref(), - _ => false, - }, - - // Message size is also stored in MailMeta - Larger(size_ref) => { - mail_view - .query_result - .metadata() - .expect("metadata were fetched") - .rfc822_size - > *size_ref as usize - } - Smaller(size_ref) => { - mail_view - .query_result - .metadata() - .expect("metadata were fetched") - .rfc822_size - < *size_ref as usize - } - - // Filter on well-known headers - Bcc(txt) => mail_view.is_header_contains_pattern(&b"bcc"[..], txt.as_ref()), - Cc(txt) => mail_view.is_header_contains_pattern(&b"cc"[..], txt.as_ref()), - From(txt) => mail_view.is_header_contains_pattern(&b"from"[..], txt.as_ref()), - Subject(txt) => mail_view.is_header_contains_pattern(&b"subject"[..], txt.as_ref()), - To(txt) => mail_view.is_header_contains_pattern(&b"to"[..], txt.as_ref()), - Header(hdr, txt) => mail_view.is_header_contains_pattern(hdr.as_ref(), txt.as_ref()), - - // Filter on Date header - SentBefore(search_naive) => mail_view - .imf() - .map(|imf| imf.naive_date().ok()) - .flatten() - .map(|msg_naive| &msg_naive < search_naive.as_ref()) - .unwrap_or(false), - SentOn(search_naive) => mail_view - .imf() - .map(|imf| imf.naive_date().ok()) - .flatten() - .map(|msg_naive| &msg_naive == search_naive.as_ref()) - .unwrap_or(false), - SentSince(search_naive) => mail_view - .imf() - .map(|imf| imf.naive_date().ok()) - .flatten() - .map(|msg_naive| &msg_naive > search_naive.as_ref()) - .unwrap_or(false), - - // Filter on the full content of the email - Text(txt) => mail_view - .content - .as_msg() - .map(|msg| { - msg.raw_part - .windows(txt.as_ref().len()) - .any(|win| win == txt.as_ref()) - }) - .unwrap_or(false), - Body(txt) => mail_view - .content - .as_msg() - .map(|msg| { - msg.raw_body - .windows(txt.as_ref().len()) - .any(|win| win == txt.as_ref()) - }) - .unwrap_or(false), - - unknown => { - tracing::error!("Unknown filter {:?}", unknown); - false - } - } - } -} - -// ---- Sequence things ---- -fn sequence_set_all() -> SequenceSet { - SequenceSet::from(Sequence::Range( - SeqOrUid::Value(NonZeroU32::MIN), - SeqOrUid::Asterisk, - )) -} - -// This is wrong as sequences can overlap -fn approx_sequence_set_size(seq_set: &SequenceSet) -> u64 { - seq_set.0.as_ref().iter().fold(0u64, |acc, seq| { - acc.saturating_add(approx_sequence_size(seq)) - }) -} - -// This is wrong as sequence UID can have holes, -// as we don't know the number of messages in the mailbox also -// we gave to guess -fn approx_sequence_size(seq: &Sequence) -> u64 { - match seq { - Sequence::Single(_) => 1, - Sequence::Range(SeqOrUid::Asterisk, _) | Sequence::Range(_, SeqOrUid::Asterisk) => u64::MAX, - Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { - let x2 = x2.get() as i64; - let x1 = x1.get() as i64; - (x2 - x1).abs().try_into().unwrap_or(1) - } - } -} - -// --- Partial decision things ---- - -enum PartialDecision { - Keep, - Discard, - Postpone, -} -impl From for PartialDecision { - fn from(x: bool) -> Self { - match x { - true => PartialDecision::Keep, - _ => PartialDecision::Discard, - } - } -} -impl PartialDecision { - fn not(&self) -> Self { - match self { - Self::Keep => Self::Discard, - Self::Discard => Self::Keep, - Self::Postpone => Self::Postpone, - } - } - - fn or(&self, other: &Self) -> Self { - match (self, other) { - (Self::Keep, _) | (_, Self::Keep) => Self::Keep, - (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, - (Self::Discard, Self::Discard) => Self::Discard, - } - } - - fn and(&self, other: &Self) -> Self { - match (self, other) { - (Self::Discard, _) | (_, Self::Discard) => Self::Discard, - (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, - (Self::Keep, Self::Keep) => Self::Keep, - } - } - - fn is_keep(&self) -> bool { - !matches!(self, Self::Discard) - } -} - -// ----- Search Key things --- -fn is_sk_flag(sk: &SearchKey) -> bool { - use SearchKey::*; - match sk { - Answered | Deleted | Draft | Flagged | Keyword(..) | New | Old | Recent | Seen - | Unanswered | Undeleted | Undraft | Unflagged | Unkeyword(..) | Unseen => true, - _ => false, - } -} - -fn is_keep_flag(sk: &SearchKey, midx: &MailIndex) -> bool { - use SearchKey::*; - match sk { - Answered => midx.is_flag_set("\\Answered"), - Deleted => midx.is_flag_set("\\Deleted"), - Draft => midx.is_flag_set("\\Draft"), - Flagged => midx.is_flag_set("\\Flagged"), - Keyword(kw) => midx.is_flag_set(kw.inner()), - New => { - let is_recent = midx.is_flag_set("\\Recent"); - let is_seen = midx.is_flag_set("\\Seen"); - is_recent && !is_seen - } - Old => { - let is_recent = midx.is_flag_set("\\Recent"); - !is_recent - } - Recent => midx.is_flag_set("\\Recent"), - Seen => midx.is_flag_set("\\Seen"), - Unanswered => { - let is_answered = midx.is_flag_set("\\Recent"); - !is_answered - } - Undeleted => { - let is_deleted = midx.is_flag_set("\\Deleted"); - !is_deleted - } - Undraft => { - let is_draft = midx.is_flag_set("\\Draft"); - !is_draft - } - Unflagged => { - let is_flagged = midx.is_flag_set("\\Flagged"); - !is_flagged - } - Unkeyword(kw) => { - let is_keyword_set = midx.is_flag_set(kw.inner()); - !is_keyword_set - } - Unseen => { - let is_seen = midx.is_flag_set("\\Seen"); - !is_seen - } - - // Not flag logic - _ => unreachable!(), - } -} - -fn is_sk_seq(sk: &SearchKey) -> bool { - use SearchKey::*; - match sk { - SequenceSet(..) | Uid(..) => true, - _ => false, - } -} -fn is_keep_seq(sk: &SearchKey, midx: &MailIndex) -> bool { - use SearchKey::*; - match sk { - SequenceSet(seq_set) => seq_set - .0 - .as_ref() - .iter() - .any(|seq| midx.is_in_sequence_i(seq)), - Uid(seq_set) => seq_set - .0 - .as_ref() - .iter() - .any(|seq| midx.is_in_sequence_uid(seq)), - _ => unreachable!(), - } -} - -fn is_keep_modseq( - filter: &Option, - modseq: &NonZeroU64, - midx: &MailIndex, -) -> bool { - if filter.is_some() { - tracing::warn!(filter=?filter, "Ignoring search metadata filter as it's not supported yet"); - } - modseq <= &midx.modseq -} diff --git a/aero-proto/imap/session.rs b/aero-proto/imap/session.rs deleted file mode 100644 index fa3232a..0000000 --- a/aero-proto/imap/session.rs +++ /dev/null @@ -1,173 +0,0 @@ -use crate::imap::capability::{ClientCapability, ServerCapability}; -use crate::imap::command::{anonymous, authenticated, selected}; -use crate::imap::flow; -use crate::imap::request::Request; -use crate::imap::response::{Response, ResponseOrIdle}; -use crate::login::ArcLoginProvider; -use anyhow::{anyhow, bail, Context, Result}; -use imap_codec::imap_types::{command::Command, core::Tag}; - -//----- -pub struct Instance { - pub login_provider: ArcLoginProvider, - pub server_capabilities: ServerCapability, - pub client_capabilities: ClientCapability, - pub state: flow::State, -} -impl Instance { - pub fn new(login_provider: ArcLoginProvider, cap: ServerCapability) -> Self { - let client_cap = ClientCapability::new(&cap); - Self { - login_provider, - state: flow::State::NotAuthenticated, - server_capabilities: cap, - client_capabilities: client_cap, - } - } - - pub async fn request(&mut self, req: Request) -> ResponseOrIdle { - match req { - Request::IdleStart(tag) => self.idle_init(tag), - Request::IdlePoll => self.idle_poll().await, - Request::ImapCommand(cmd) => self.command(cmd).await, - } - } - - pub fn idle_init(&mut self, tag: Tag<'static>) -> ResponseOrIdle { - // Build transition - //@FIXME the notifier should be hidden inside the state and thus not part of the transition! - let transition = flow::Transition::Idle(tag.clone(), tokio::sync::Notify::new()); - - // Try to apply the transition and get the stop notifier - let maybe_stop = self - .state - .apply(transition) - .context("IDLE transition failed") - .and_then(|_| { - self.state - .notify() - .ok_or(anyhow!("IDLE state has no Notify object")) - }); - - // Build an appropriate response - match maybe_stop { - Ok(stop) => ResponseOrIdle::IdleAccept(stop), - Err(e) => { - tracing::error!(err=?e, "unable to init idle due to a transition error"); - //ResponseOrIdle::IdleReject(tag) - let no = Response::build() - .tag(tag) - .message( - "Internal error, processing command triggered an illegal IMAP state transition", - ) - .no() - .unwrap(); - ResponseOrIdle::IdleReject(no) - } - } - } - - pub async fn idle_poll(&mut self) -> ResponseOrIdle { - match self.idle_poll_happy().await { - Ok(r) => r, - Err(e) => { - tracing::error!(err=?e, "something bad happened in idle"); - ResponseOrIdle::Response(Response::bye().unwrap()) - } - } - } - - pub async fn idle_poll_happy(&mut self) -> Result { - let (mbx, tag, stop) = match &mut self.state { - flow::State::Idle(_, ref mut mbx, _, tag, stop) => (mbx, tag.clone(), stop.clone()), - _ => bail!("Invalid session state, can't idle"), - }; - - tokio::select! { - _ = stop.notified() => { - self.state.apply(flow::Transition::UnIdle)?; - return Ok(ResponseOrIdle::Response(Response::build() - .tag(tag.clone()) - .message("IDLE completed") - .ok()?)) - }, - change = mbx.idle_sync() => { - tracing::debug!("idle event"); - return Ok(ResponseOrIdle::IdleEvent(change?)); - } - } - } - - pub async fn command(&mut self, cmd: Command<'static>) -> ResponseOrIdle { - // Command behavior is modulated by the state. - // To prevent state error, we handle the same command in separate code paths. - let (resp, tr) = match &mut self.state { - flow::State::NotAuthenticated => { - let ctx = anonymous::AnonymousContext { - req: &cmd, - login_provider: &self.login_provider, - server_capabilities: &self.server_capabilities, - }; - anonymous::dispatch(ctx).await - } - flow::State::Authenticated(ref user) => { - let ctx = authenticated::AuthenticatedContext { - req: &cmd, - server_capabilities: &self.server_capabilities, - client_capabilities: &mut self.client_capabilities, - user, - }; - authenticated::dispatch(ctx).await - } - flow::State::Selected(ref user, ref mut mailbox, ref perm) => { - let ctx = selected::SelectedContext { - req: &cmd, - server_capabilities: &self.server_capabilities, - client_capabilities: &mut self.client_capabilities, - user, - mailbox, - perm, - }; - selected::dispatch(ctx).await - } - flow::State::Idle(..) => Err(anyhow!("can not receive command while idling")), - flow::State::Logout => Response::build() - .tag(cmd.tag.clone()) - .message("No commands are allowed in the LOGOUT state.") - .bad() - .map(|r| (r, flow::Transition::None)), - } - .unwrap_or_else(|err| { - tracing::error!("Command error {:?} occured while processing {:?}", err, cmd); - ( - Response::build() - .to_req(&cmd) - .message("Internal error while processing command") - .bad() - .unwrap(), - flow::Transition::None, - ) - }); - - if let Err(e) = self.state.apply(tr) { - tracing::error!( - "Transition error {:?} occured while processing on command {:?}", - e, - cmd - ); - return ResponseOrIdle::Response(Response::build() - .to_req(&cmd) - .message( - "Internal error, processing command triggered an illegal IMAP state transition", - ) - .bad() - .unwrap()); - } - ResponseOrIdle::Response(resp) - - /*match &self.state { - flow::State::Idle(_, _, _, _, n) => ResponseOrIdle::StartIdle(n.clone()), - _ => ResponseOrIdle::Response(resp), - }*/ - } -} diff --git a/aero-proto/lmtp.rs b/aero-proto/lmtp.rs deleted file mode 100644 index dcd4bcc..0000000 --- a/aero-proto/lmtp.rs +++ /dev/null @@ -1,221 +0,0 @@ -use std::net::SocketAddr; -use std::{pin::Pin, sync::Arc}; - -use anyhow::Result; -use async_trait::async_trait; -use duplexify::Duplex; -use futures::{io, AsyncRead, AsyncReadExt, AsyncWrite}; -use futures::{ - stream, - stream::{FuturesOrdered, FuturesUnordered}, - StreamExt, -}; -use log::*; -use tokio::net::TcpListener; -use tokio::select; -use tokio::sync::watch; -use tokio_util::compat::*; - -use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode}; -use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata}; - -use crate::config::*; -use crate::login::*; -use crate::mail::incoming::EncryptedMessage; - -pub struct LmtpServer { - bind_addr: SocketAddr, - hostname: String, - login_provider: Arc, -} - -impl LmtpServer { - pub fn new( - config: LmtpConfig, - login_provider: Arc, - ) -> Arc { - Arc::new(Self { - bind_addr: config.bind_addr, - hostname: config.hostname, - login_provider, - }) - } - - pub async fn run(self: &Arc, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - info!("LMTP server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - info!("LMTP: accepted connection from {}", remote_addr); - - let conn = tokio::spawn(smtp_server::interact( - socket.compat(), - smtp_server::IsAlreadyTls::No, - (), - self.clone(), - )); - - connections.push(conn); - } - drop(tcp); - - info!("LMTP server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -// ---- - -pub struct Message { - to: Vec, -} - -#[async_trait] -impl Config for LmtpServer { - type Protocol = smtp_server::protocol::Lmtp; - - type ConnectionUserMeta = (); - type MailUserMeta = Message; - - fn hostname(&self, _conn_meta: &ConnectionMetadata<()>) -> &str { - &self.hostname - } - - async fn new_mail(&self, _conn_meta: &mut ConnectionMetadata<()>) -> Message { - Message { to: vec![] } - } - - async fn tls_accept( - &self, - _io: IO, - _conn_meta: &mut ConnectionMetadata<()>, - ) -> io::Result>, Pin>>> - where - IO: Send + AsyncRead + AsyncWrite, - { - Err(io::Error::new( - io::ErrorKind::InvalidInput, - "TLS not implemented for LMTP server", - )) - } - - async fn filter_from( - &self, - from: Option, - _meta: &mut MailMetadata, - _conn_meta: &mut ConnectionMetadata<()>, - ) -> Decision> { - Decision::Accept { - reply: reply::okay_from().convert(), - res: from, - } - } - - async fn filter_to( - &self, - to: Email, - meta: &mut MailMetadata, - _conn_meta: &mut ConnectionMetadata<()>, - ) -> Decision { - let to_str = match to.hostname.as_ref() { - Some(h) => format!("{}@{}", to.localpart, h), - None => to.localpart.to_string(), - }; - match self.login_provider.public_login(&to_str).await { - Ok(creds) => { - meta.user.to.push(creds); - Decision::Accept { - reply: reply::okay_to().convert(), - res: to, - } - } - Err(e) => Decision::Reject { - reply: Reply { - code: ReplyCode::POLICY_REASON, - ecode: None, - text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], - }, - }, - } - } - - async fn handle_mail<'resp, R>( - &'resp self, - reader: &mut EscapedDataReader<'_, R>, - meta: MailMetadata, - _conn_meta: &'resp mut ConnectionMetadata<()>, - ) -> Pin> + Send + 'resp>> - where - R: Send + Unpin + AsyncRead, - { - let err_response_stream = |meta: MailMetadata, msg: String| { - Box::pin( - stream::iter(meta.user.to.into_iter()).map(move |_| Decision::Reject { - reply: Reply { - code: ReplyCode::POLICY_REASON, - ecode: None, - text: vec![smtp_message::MaybeUtf8::Utf8(msg.clone())], - }, - }), - ) - }; - - let mut text = Vec::new(); - if let Err(e) = reader.read_to_end(&mut text).await { - return err_response_stream(meta, format!("io error: {}", e)); - } - reader.complete(); - let raw_size = text.len(); - - // Unescape email, shrink it also to remove last dot - let unesc_res = DataUnescaper::new(true).unescape(&mut text); - text.truncate(unesc_res.written); - tracing::debug!(prev_sz = raw_size, new_sz = text.len(), "unescaped"); - - let encrypted_message = match EncryptedMessage::new(text) { - Ok(x) => Arc::new(x), - Err(e) => return err_response_stream(meta, e.to_string()), - }; - - Box::pin( - meta.user - .to - .into_iter() - .map(move |creds| { - let encrypted_message = encrypted_message.clone(); - async move { - match encrypted_message.deliver_to(creds).await { - Ok(()) => Decision::Accept { - reply: reply::okay_mail().convert(), - res: (), - }, - Err(e) => Decision::Reject { - reply: Reply { - code: ReplyCode::POLICY_REASON, - ecode: None, - text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], - }, - }, - } - } - }) - .collect::>(), - ) - } -} diff --git a/aero-proto/sasl.rs b/aero-proto/sasl.rs deleted file mode 100644 index fe292e1..0000000 --- a/aero-proto/sasl.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::net::SocketAddr; - -use anyhow::{anyhow, bail, Result}; -use futures::stream::{FuturesUnordered, StreamExt}; -use tokio::io::BufStream; -use tokio::io::{AsyncBufReadExt, AsyncWriteExt}; -use tokio::net::{TcpListener, TcpStream}; -use tokio::sync::watch; - -use aero_user::config::AuthConfig; -use aero_user::login::ArcLoginProvider; - - -pub struct AuthServer { - login_provider: ArcLoginProvider, - bind_addr: SocketAddr, -} - -impl AuthServer { - pub fn new(config: AuthConfig, login_provider: ArcLoginProvider) -> Self { - Self { - bind_addr: config.bind_addr, - login_provider, - } - } - - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!( - "SASL Authentication Protocol listening on {:#}", - self.bind_addr - ); - - let mut connections = FuturesUnordered::new(); - - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - - tracing::info!("AUTH: accepted connection from {}", remote_addr); - let conn = tokio::spawn( - NetLoop::new(socket, self.login_provider.clone(), must_exit.clone()).run_error(), - ); - - connections.push(conn); - } - drop(tcp); - - tracing::info!("AUTH server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } -} - -struct NetLoop { - login: ArcLoginProvider, - stream: BufStream, - stop: watch::Receiver, - state: State, - read_buf: Vec, - write_buf: BytesMut, -} - -impl NetLoop { - fn new(stream: TcpStream, login: ArcLoginProvider, stop: watch::Receiver) -> Self { - Self { - login, - stream: BufStream::new(stream), - state: State::Init, - stop, - read_buf: Vec::new(), - write_buf: BytesMut::new(), - } - } - - async fn run_error(self) { - match self.run().await { - Ok(()) => tracing::info!("Auth session succeeded"), - Err(e) => tracing::error!(err=?e, "Auth session failed"), - } - } - - async fn run(mut self) -> Result<()> { - loop { - tokio::select! { - read_res = self.stream.read_until(b'\n', &mut self.read_buf) => { - // Detect EOF / socket close - let bread = read_res?; - if bread == 0 { - tracing::info!("Reading buffer empty, connection has been closed. Exiting AUTH session."); - return Ok(()) - } - - // Parse command - let (_, cmd) = client_command(&self.read_buf).map_err(|_| anyhow!("Unable to parse command"))?; - tracing::trace!(cmd=?cmd, "Received command"); - - // Make some progress in our local state - self.state.progress(cmd, &self.login).await; - if matches!(self.state, State::Error) { - bail!("Internal state is in error, previous logs explain what went wrong"); - } - - // Build response - let srv_cmds = self.state.response(); - srv_cmds.iter().try_for_each(|r| { - tracing::trace!(cmd=?r, "Sent command"); - r.encode(&mut self.write_buf) - })?; - - // Send responses if at least one command response has been generated - if !srv_cmds.is_empty() { - self.stream.write_all(&self.write_buf).await?; - self.stream.flush().await?; - } - - // Reset buffers - self.read_buf.clear(); - self.write_buf.clear(); - }, - _ = self.stop.changed() => { - tracing::debug!("Server is stopping, quitting this runner"); - return Ok(()) - } - } - } - } -} diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs new file mode 100644 index 0000000..2852d34 --- /dev/null +++ b/aero-proto/src/dav.rs @@ -0,0 +1,146 @@ +use std::net::SocketAddr; + +use anyhow::{anyhow, Result}; +use base64::Engine; +use hyper::service::service_fn; +use hyper::{Request, Response, body::Bytes}; +use hyper::server::conn::http1 as http; +use hyper_util::rt::TokioIo; +use http_body_util::Full; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::net::TcpListener; +use tokio::sync::watch; + +use aero_user::config::DavUnsecureConfig; +use aero_user::login::ArcLoginProvider; +use aero_collections::user::User; + +pub struct Server { + bind_addr: SocketAddr, + login_provider: ArcLoginProvider, +} + +pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { + Server { + bind_addr: config.bind_addr, + login_provider: login, + } +} + +impl Server { + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("DAV server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("Accepted connection from {}", remote_addr); + let stream = TokioIo::new(socket); + let login = self.login_provider.clone(); + let conn = tokio::spawn(async move { + //@FIXME should create a generic "public web" server on which "routers" could be + //abitrarily bound + //@FIXME replace with a handler supporting http2 and TLS + match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { + let login = login.clone(); + async move { + auth(login, req).await + } + })).await { + Err(e) => tracing::warn!(err=?e, "connection failed"), + Ok(()) => tracing::trace!("connection terminated with success"), + } + }); + connections.push(conn); + } + drop(tcp); + + tracing::info!("Server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +//@FIXME We should not support only BasicAuth +async fn auth( + login: ArcLoginProvider, + req: Request, +) -> Result>> { + + let auth_val = match req.headers().get("Authorization") { + Some(hv) => hv.to_str()?, + None => return Ok(Response::builder() + .status(401) + .body(Full::new(Bytes::from("Missing Authorization field")))?), + }; + + let b64_creds_maybe_padded = match auth_val.split_once(" ") { + Some(("Basic", b64)) => b64, + _ => return Ok(Response::builder() + .status(400) + .body(Full::new(Bytes::from("Unsupported Authorization field")))?), + }; + + // base64urlencoded may have trailing equals, base64urlsafe has not + // theoretically authorization is padded but "be liberal in what you accept" + let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); + + // Decode base64 + let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; + let str_creds = std::str::from_utf8(&creds)?; + + // Split username and password + let (username, password) = str_creds + .split_once(':') + .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; + + // Call login provider + let creds = match login.login(username, password).await { + Ok(c) => c, + Err(_) => return Ok(Response::builder() + .status(401) + .body(Full::new(Bytes::from("Wrong credentials")))?), + }; + + // Build a user + let user = User::new(username.into(), creds).await?; + + // Call router with user + router(user, req).await +} + +async fn router(user: std::sync::Arc, req: Request) -> Result>> { + let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); + match path_segments.as_slice() { + [] => tracing::info!("root"), + [ username, ..] if *username != user.username => return Ok(Response::builder() + .status(403) + .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), + [ _ ] => tracing::info!("user home"), + [ _, "calendar" ] => tracing::info!("user calendars"), + [ _, "calendar", colname ] => tracing::info!(name=colname, "selected calendar"), + [ _, "calendar", colname, member ] => tracing::info!(name=colname, obj=member, "selected event"), + _ => return Ok(Response::builder() + .status(404) + .body(Full::new(Bytes::from("Resource not found")))?), + } + Ok(Response::new(Full::new(Bytes::from("Hello World!")))) +} + +#[allow(dead_code)] +async fn collections(_user: std::sync::Arc, _req: Request) -> Result>> { + unimplemented!(); +} diff --git a/aero-proto/src/imap/attributes.rs b/aero-proto/src/imap/attributes.rs new file mode 100644 index 0000000..89446a8 --- /dev/null +++ b/aero-proto/src/imap/attributes.rs @@ -0,0 +1,77 @@ +use imap_codec::imap_types::command::FetchModifier; +use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName, Section}; + +/// Internal decisions based on fetched attributes +/// passed by the client + +pub struct AttributesProxy { + pub attrs: Vec>, +} +impl AttributesProxy { + pub fn new( + attrs: &MacroOrMessageDataItemNames<'static>, + modifiers: &[FetchModifier], + is_uid_fetch: bool, + ) -> Self { + // Expand macros + let mut fetch_attrs = match attrs { + MacroOrMessageDataItemNames::Macro(m) => { + use imap_codec::imap_types::fetch::Macro; + use MessageDataItemName::*; + match m { + Macro::All => vec![Flags, InternalDate, Rfc822Size, Envelope], + Macro::Fast => vec![Flags, InternalDate, Rfc822Size], + Macro::Full => vec![Flags, InternalDate, Rfc822Size, Envelope, Body], + _ => { + tracing::error!("unimplemented macro"); + vec![] + } + } + } + MacroOrMessageDataItemNames::MessageDataItemNames(a) => a.clone(), + }; + + // Handle uids + if is_uid_fetch && !fetch_attrs.contains(&MessageDataItemName::Uid) { + fetch_attrs.push(MessageDataItemName::Uid); + } + + // Handle inferred MODSEQ tag + let is_changed_since = modifiers + .iter() + .any(|m| matches!(m, FetchModifier::ChangedSince(..))); + if is_changed_since && !fetch_attrs.contains(&MessageDataItemName::ModSeq) { + fetch_attrs.push(MessageDataItemName::ModSeq); + } + + Self { attrs: fetch_attrs } + } + + pub fn is_enabling_condstore(&self) -> bool { + self.attrs + .iter() + .any(|x| matches!(x, MessageDataItemName::ModSeq)) + } + + pub fn need_body(&self) -> bool { + self.attrs.iter().any(|x| match x { + MessageDataItemName::Body + | MessageDataItemName::Rfc822 + | MessageDataItemName::Rfc822Text + | MessageDataItemName::BodyStructure => true, + + MessageDataItemName::BodyExt { + section: Some(section), + partial: _, + peek: _, + } => match section { + Section::Header(None) + | Section::HeaderFields(None, _) + | Section::HeaderFieldsNot(None, _) => false, + _ => true, + }, + MessageDataItemName::BodyExt { .. } => true, + _ => false, + }) + } +} diff --git a/aero-proto/src/imap/capability.rs b/aero-proto/src/imap/capability.rs new file mode 100644 index 0000000..c76b51c --- /dev/null +++ b/aero-proto/src/imap/capability.rs @@ -0,0 +1,159 @@ +use imap_codec::imap_types::command::{FetchModifier, SelectExamineModifier, StoreModifier}; +use imap_codec::imap_types::core::Vec1; +use imap_codec::imap_types::extensions::enable::{CapabilityEnable, Utf8Kind}; +use imap_codec::imap_types::response::Capability; +use std::collections::HashSet; + +use crate::imap::attributes::AttributesProxy; + +fn capability_unselect() -> Capability<'static> { + Capability::try_from("UNSELECT").unwrap() +} + +fn capability_condstore() -> Capability<'static> { + Capability::try_from("CONDSTORE").unwrap() +} + +fn capability_uidplus() -> Capability<'static> { + Capability::try_from("UIDPLUS").unwrap() +} + +fn capability_liststatus() -> Capability<'static> { + Capability::try_from("LIST-STATUS").unwrap() +} + +/* +fn capability_qresync() -> Capability<'static> { + Capability::try_from("QRESYNC").unwrap() +} +*/ + +#[derive(Debug, Clone)] +pub struct ServerCapability(HashSet>); + +impl Default for ServerCapability { + fn default() -> Self { + Self(HashSet::from([ + Capability::Imap4Rev1, + Capability::Enable, + Capability::Move, + Capability::LiteralPlus, + Capability::Idle, + capability_unselect(), + capability_condstore(), + capability_uidplus(), + capability_liststatus(), + //capability_qresync(), + ])) + } +} + +impl ServerCapability { + pub fn to_vec(&self) -> Vec1> { + self.0 + .iter() + .map(|v| v.clone()) + .collect::>() + .try_into() + .unwrap() + } + + #[allow(dead_code)] + pub fn support(&self, cap: &Capability<'static>) -> bool { + self.0.contains(cap) + } +} + +#[derive(Clone)] +pub enum ClientStatus { + NotSupportedByServer, + Disabled, + Enabled, +} +impl ClientStatus { + pub fn is_enabled(&self) -> bool { + matches!(self, Self::Enabled) + } + + pub fn enable(&self) -> Self { + match self { + Self::Disabled => Self::Enabled, + other => other.clone(), + } + } +} + +pub struct ClientCapability { + pub condstore: ClientStatus, + pub utf8kind: Option, +} + +impl ClientCapability { + pub fn new(sc: &ServerCapability) -> Self { + Self { + condstore: match sc.0.contains(&capability_condstore()) { + true => ClientStatus::Disabled, + _ => ClientStatus::NotSupportedByServer, + }, + utf8kind: None, + } + } + + pub fn enable_condstore(&mut self) { + self.condstore = self.condstore.enable(); + } + + pub fn attributes_enable(&mut self, ap: &AttributesProxy) { + if ap.is_enabling_condstore() { + self.enable_condstore() + } + } + + pub fn fetch_modifiers_enable(&mut self, mods: &[FetchModifier]) { + if mods + .iter() + .any(|x| matches!(x, FetchModifier::ChangedSince(..))) + { + self.enable_condstore() + } + } + + pub fn store_modifiers_enable(&mut self, mods: &[StoreModifier]) { + if mods + .iter() + .any(|x| matches!(x, StoreModifier::UnchangedSince(..))) + { + self.enable_condstore() + } + } + + pub fn select_enable(&mut self, mods: &[SelectExamineModifier]) { + for m in mods.iter() { + match m { + SelectExamineModifier::Condstore => self.enable_condstore(), + } + } + } + + pub fn try_enable( + &mut self, + caps: &[CapabilityEnable<'static>], + ) -> Vec> { + let mut enabled = vec![]; + for cap in caps { + match cap { + CapabilityEnable::CondStore if matches!(self.condstore, ClientStatus::Disabled) => { + self.condstore = ClientStatus::Enabled; + enabled.push(cap.clone()); + } + CapabilityEnable::Utf8(kind) if Some(kind) != self.utf8kind.as_ref() => { + self.utf8kind = Some(kind.clone()); + enabled.push(cap.clone()); + } + _ => (), + } + } + + enabled + } +} diff --git a/aero-proto/src/imap/command/anonymous.rs b/aero-proto/src/imap/command/anonymous.rs new file mode 100644 index 0000000..2848c30 --- /dev/null +++ b/aero-proto/src/imap/command/anonymous.rs @@ -0,0 +1,84 @@ +use anyhow::Result; +use imap_codec::imap_types::command::{Command, CommandBody}; +use imap_codec::imap_types::core::AString; +use imap_codec::imap_types::response::Code; +use imap_codec::imap_types::secret::Secret; + +use aero_user::login::ArcLoginProvider; +use aero_collections::user::User; + +use crate::imap::capability::ServerCapability; +use crate::imap::command::anystate; +use crate::imap::flow; +use crate::imap::response::Response; + +//--- dispatching + +pub struct AnonymousContext<'a> { + pub req: &'a Command<'static>, + pub server_capabilities: &'a ServerCapability, + pub login_provider: &'a ArcLoginProvider, +} + +pub async fn dispatch(ctx: AnonymousContext<'_>) -> Result<(Response<'static>, flow::Transition)> { + match &ctx.req.body { + // Any State + CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), + CommandBody::Capability => { + anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) + } + CommandBody::Logout => anystate::logout(), + + // Specific to anonymous context (3 commands) + CommandBody::Login { username, password } => ctx.login(username, password).await, + CommandBody::Authenticate { .. } => { + anystate::not_implemented(ctx.req.tag.clone(), "authenticate") + } + //StartTLS is not implemented for now, we will probably go full TLS. + + // Collect other commands + _ => anystate::wrong_state(ctx.req.tag.clone()), + } +} + +//--- Command controllers, private + +impl<'a> AnonymousContext<'a> { + async fn login( + self, + username: &AString<'a>, + password: &Secret>, + ) -> Result<(Response<'static>, flow::Transition)> { + let (u, p) = ( + std::str::from_utf8(username.as_ref())?, + std::str::from_utf8(password.declassify().as_ref())?, + ); + tracing::info!(user = %u, "command.login"); + + let creds = match self.login_provider.login(&u, &p).await { + Err(e) => { + tracing::debug!(error=%e, "authentication failed"); + return Ok(( + Response::build() + .to_req(self.req) + .message("Authentication failed") + .no()?, + flow::Transition::None, + )); + } + Ok(c) => c, + }; + + let user = User::new(u.to_string(), creds).await?; + + tracing::info!(username=%u, "connected"); + Ok(( + Response::build() + .to_req(self.req) + .code(Code::Capability(self.server_capabilities.to_vec())) + .message("Completed") + .ok()?, + flow::Transition::Authenticate(user), + )) + } +} diff --git a/aero-proto/src/imap/command/anystate.rs b/aero-proto/src/imap/command/anystate.rs new file mode 100644 index 0000000..718ba3f --- /dev/null +++ b/aero-proto/src/imap/command/anystate.rs @@ -0,0 +1,54 @@ +use anyhow::Result; +use imap_codec::imap_types::core::Tag; +use imap_codec::imap_types::response::Data; + +use crate::imap::capability::ServerCapability; +use crate::imap::flow; +use crate::imap::response::Response; + +pub(crate) fn capability( + tag: Tag<'static>, + cap: &ServerCapability, +) -> Result<(Response<'static>, flow::Transition)> { + let res = Response::build() + .tag(tag) + .message("Server capabilities") + .data(Data::Capability(cap.to_vec())) + .ok()?; + + Ok((res, flow::Transition::None)) +} + +pub(crate) fn noop_nothing(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build().tag(tag).message("Noop completed.").ok()?, + flow::Transition::None, + )) +} + +pub(crate) fn logout() -> Result<(Response<'static>, flow::Transition)> { + Ok((Response::bye()?, flow::Transition::Logout)) +} + +pub(crate) fn not_implemented<'a>( + tag: Tag<'a>, + what: &str, +) -> Result<(Response<'a>, flow::Transition)> { + Ok(( + Response::build() + .tag(tag) + .message(format!("Command not implemented {}", what)) + .bad()?, + flow::Transition::None, + )) +} + +pub(crate) fn wrong_state(tag: Tag<'static>) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build() + .tag(tag) + .message("Command not authorized in this state") + .bad()?, + flow::Transition::None, + )) +} diff --git a/aero-proto/src/imap/command/authenticated.rs b/aero-proto/src/imap/command/authenticated.rs new file mode 100644 index 0000000..4c8d8c1 --- /dev/null +++ b/aero-proto/src/imap/command/authenticated.rs @@ -0,0 +1,682 @@ +use std::collections::BTreeMap; +use std::sync::Arc; +use thiserror::Error; + +use anyhow::{anyhow, bail, Result}; +use imap_codec::imap_types::command::{ + Command, CommandBody, ListReturnItem, SelectExamineModifier, +}; +use imap_codec::imap_types::core::{Atom, Literal, QuotedChar, Vec1}; +use imap_codec::imap_types::datetime::DateTime; +use imap_codec::imap_types::extensions::enable::CapabilityEnable; +use imap_codec::imap_types::flag::{Flag, FlagNameAttribute}; +use imap_codec::imap_types::mailbox::{ListMailbox, Mailbox as MailboxCodec}; +use imap_codec::imap_types::response::{Code, CodeOther, Data}; +use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName}; + +use aero_collections::mail::uidindex::*; +use aero_collections::user::User; +use aero_collections::mail::IMF; +use aero_collections::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; + +use crate::imap::capability::{ClientCapability, ServerCapability}; +use crate::imap::command::{anystate, MailboxName}; +use crate::imap::flow; +use crate::imap::mailbox_view::MailboxView; +use crate::imap::response::Response; + +pub struct AuthenticatedContext<'a> { + pub req: &'a Command<'static>, + pub server_capabilities: &'a ServerCapability, + pub client_capabilities: &'a mut ClientCapability, + pub user: &'a Arc, +} + +pub async fn dispatch<'a>( + mut ctx: AuthenticatedContext<'a>, +) -> Result<(Response<'static>, flow::Transition)> { + match &ctx.req.body { + // Any state + CommandBody::Noop => anystate::noop_nothing(ctx.req.tag.clone()), + CommandBody::Capability => { + anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) + } + CommandBody::Logout => anystate::logout(), + + // Specific to this state (11 commands) + CommandBody::Create { mailbox } => ctx.create(mailbox).await, + CommandBody::Delete { mailbox } => ctx.delete(mailbox).await, + CommandBody::Rename { from, to } => ctx.rename(from, to).await, + CommandBody::Lsub { + reference, + mailbox_wildcard, + } => ctx.list(reference, mailbox_wildcard, &[], true).await, + CommandBody::List { + reference, + mailbox_wildcard, + r#return, + } => ctx.list(reference, mailbox_wildcard, r#return, false).await, + CommandBody::Status { + mailbox, + item_names, + } => ctx.status(mailbox, item_names).await, + CommandBody::Subscribe { mailbox } => ctx.subscribe(mailbox).await, + CommandBody::Unsubscribe { mailbox } => ctx.unsubscribe(mailbox).await, + CommandBody::Select { mailbox, modifiers } => ctx.select(mailbox, modifiers).await, + CommandBody::Examine { mailbox, modifiers } => ctx.examine(mailbox, modifiers).await, + CommandBody::Append { + mailbox, + flags, + date, + message, + } => ctx.append(mailbox, flags, date, message).await, + + // rfc5161 ENABLE + CommandBody::Enable { capabilities } => ctx.enable(capabilities), + + // Collect other commands + _ => anystate::wrong_state(ctx.req.tag.clone()), + } +} + +// --- PRIVATE --- +impl<'a> AuthenticatedContext<'a> { + async fn create( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name = match mailbox { + MailboxCodec::Inbox => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Cannot create INBOX") + .bad()?, + flow::Transition::None, + )); + } + MailboxCodec::Other(aname) => std::str::from_utf8(aname.as_ref())?, + }; + + match self.user.create_mailbox(&name).await { + Ok(()) => Ok(( + Response::build() + .to_req(self.req) + .message("CREATE complete") + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(&e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + async fn delete( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + match self.user.delete_mailbox(&name).await { + Ok(()) => Ok(( + Response::build() + .to_req(self.req) + .message("DELETE complete") + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + async fn rename( + self, + from: &MailboxCodec<'a>, + to: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(from).try_into()?; + let new_name: &str = MailboxName(to).try_into()?; + + match self.user.rename_mailbox(&name, &new_name).await { + Ok(()) => Ok(( + Response::build() + .to_req(self.req) + .message("RENAME complete") + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + async fn list( + &mut self, + reference: &MailboxCodec<'a>, + mailbox_wildcard: &ListMailbox<'a>, + must_return: &[ListReturnItem], + is_lsub: bool, + ) -> Result<(Response<'static>, flow::Transition)> { + let mbx_hier_delim: QuotedChar = QuotedChar::unvalidated(MBX_HIER_DELIM_RAW); + + let reference: &str = MailboxName(reference).try_into()?; + if !reference.is_empty() { + return Ok(( + Response::build() + .to_req(self.req) + .message("References not supported") + .bad()?, + flow::Transition::None, + )); + } + + let status_item_names = must_return.iter().find_map(|m| match m { + ListReturnItem::Status(v) => Some(v), + _ => None, + }); + + // @FIXME would probably need a rewrite to better use the imap_codec library + let wildcard = match mailbox_wildcard { + ListMailbox::Token(v) => std::str::from_utf8(v.as_ref())?, + ListMailbox::String(v) => std::str::from_utf8(v.as_ref())?, + }; + if wildcard.is_empty() { + if is_lsub { + return Ok(( + Response::build() + .to_req(self.req) + .message("LSUB complete") + .data(Data::Lsub { + items: vec![], + delimiter: Some(mbx_hier_delim), + mailbox: "".try_into().unwrap(), + }) + .ok()?, + flow::Transition::None, + )); + } else { + return Ok(( + Response::build() + .to_req(self.req) + .message("LIST complete") + .data(Data::List { + items: vec![], + delimiter: Some(mbx_hier_delim), + mailbox: "".try_into().unwrap(), + }) + .ok()?, + flow::Transition::None, + )); + } + } + + let mailboxes = self.user.list_mailboxes().await?; + let mut vmailboxes = BTreeMap::new(); + for mb in mailboxes.iter() { + for (i, _) in mb.match_indices(MBX_HIER_DELIM_RAW) { + if i > 0 { + let smb = &mb[..i]; + vmailboxes.entry(smb).or_insert(false); + } + } + vmailboxes.insert(mb, true); + } + + let mut ret = vec![]; + for (mb, is_real) in vmailboxes.iter() { + if matches_wildcard(&wildcard, mb) { + let mailbox: MailboxCodec = mb + .to_string() + .try_into() + .map_err(|_| anyhow!("invalid mailbox name"))?; + let mut items = vec![FlagNameAttribute::from(Atom::unvalidated("Subscribed"))]; + + // Decoration + if !*is_real { + items.push(FlagNameAttribute::Noselect); + } else { + match *mb { + "Drafts" => items.push(Atom::unvalidated("Drafts").into()), + "Archive" => items.push(Atom::unvalidated("Archive").into()), + "Sent" => items.push(Atom::unvalidated("Sent").into()), + "Trash" => items.push(Atom::unvalidated("Trash").into()), + _ => (), + }; + } + + // Result type + if is_lsub { + ret.push(Data::Lsub { + items, + delimiter: Some(mbx_hier_delim), + mailbox: mailbox.clone(), + }); + } else { + ret.push(Data::List { + items, + delimiter: Some(mbx_hier_delim), + mailbox: mailbox.clone(), + }); + } + + // Also collect status + if let Some(sin) = status_item_names { + let ret_attrs = match self.status_items(mb, sin).await { + Ok(a) => a, + Err(e) => { + tracing::error!(err=?e, mailbox=%mb, "Unable to fetch status for mailbox"); + continue; + } + }; + + let data = Data::Status { + mailbox, + items: ret_attrs.into(), + }; + + ret.push(data); + } + } + } + + let msg = if is_lsub { + "LSUB completed" + } else { + "LIST completed" + }; + Ok(( + Response::build() + .to_req(self.req) + .message(msg) + .many_data(ret) + .ok()?, + flow::Transition::None, + )) + } + + async fn status( + &mut self, + mailbox: &MailboxCodec<'static>, + attributes: &[StatusDataItemName], + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + let ret_attrs = match self.status_items(name, attributes).await { + Ok(v) => v, + Err(e) => match e.downcast_ref::() { + Some(CommandError::MailboxNotFound) => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Mailbox does not exist") + .no()?, + flow::Transition::None, + )) + } + _ => return Err(e.into()), + }, + }; + + let data = Data::Status { + mailbox: mailbox.clone(), + items: ret_attrs.into(), + }; + + Ok(( + Response::build() + .to_req(self.req) + .message("STATUS completed") + .data(data) + .ok()?, + flow::Transition::None, + )) + } + + async fn status_items( + &mut self, + name: &str, + attributes: &[StatusDataItemName], + ) -> Result> { + let mb_opt = self.user.open_mailbox(name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => return Err(CommandError::MailboxNotFound.into()), + }; + + let view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + + let mut ret_attrs = vec![]; + for attr in attributes.iter() { + ret_attrs.push(match attr { + StatusDataItemName::Messages => StatusDataItem::Messages(view.exists()?), + StatusDataItemName::Unseen => StatusDataItem::Unseen(view.unseen_count() as u32), + StatusDataItemName::Recent => StatusDataItem::Recent(view.recent()?), + StatusDataItemName::UidNext => StatusDataItem::UidNext(view.uidnext()), + StatusDataItemName::UidValidity => { + StatusDataItem::UidValidity(view.uidvalidity()) + } + StatusDataItemName::Deleted => { + bail!("quota not implemented, can't return deleted elements waiting for EXPUNGE"); + }, + StatusDataItemName::DeletedStorage => { + bail!("quota not implemented, can't return freed storage after EXPUNGE will be run"); + }, + StatusDataItemName::HighestModSeq => { + self.client_capabilities.enable_condstore(); + StatusDataItem::HighestModSeq(view.highestmodseq().get()) + }, + }); + } + Ok(ret_attrs) + } + + async fn subscribe( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + if self.user.has_mailbox(&name).await? { + Ok(( + Response::build() + .to_req(self.req) + .message("SUBSCRIBE complete") + .ok()?, + flow::Transition::None, + )) + } else { + Ok(( + Response::build() + .to_req(self.req) + .message(format!("Mailbox {} does not exist", name)) + .bad()?, + flow::Transition::None, + )) + } + } + + async fn unsubscribe( + self, + mailbox: &MailboxCodec<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let name: &str = MailboxName(mailbox).try_into()?; + + if self.user.has_mailbox(&name).await? { + Ok(( + Response::build() + .to_req(self.req) + .message(format!( + "Cannot unsubscribe from mailbox {}: not supported by Aerogramme", + name + )) + .bad()?, + flow::Transition::None, + )) + } else { + Ok(( + Response::build() + .to_req(self.req) + .message(format!("Mailbox {} does not exist", name)) + .no()?, + flow::Transition::None, + )) + } + } + + /* + * TRACE BEGIN --- + + + Example: C: A142 SELECT INBOX + S: * 172 EXISTS + S: * 1 RECENT + S: * OK [UNSEEN 12] Message 12 is first unseen + S: * OK [UIDVALIDITY 3857529045] UIDs valid + S: * OK [UIDNEXT 4392] Predicted next UID + S: * FLAGS (\Answered \Flagged \Deleted \Seen \Draft) + S: * OK [PERMANENTFLAGS (\Deleted \Seen \*)] Limited + S: A142 OK [READ-WRITE] SELECT completed + + --- a mailbox with no unseen message -> no unseen entry + NOTES: + RFC3501 (imap4rev1) says if there is no OK [UNSEEN] response, client must make no assumption, + it is therefore correct to not return it even if there are unseen messages + RFC9051 (imap4rev2) says that OK [UNSEEN] responses are deprecated after SELECT and EXAMINE + For Aerogramme, we just don't send the OK [UNSEEN], it's correct to do in both specifications. + + + 20 select "INBOX.achats" + * FLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1) + * OK [PERMANENTFLAGS (\Answered \Flagged \Deleted \Seen \Draft $Forwarded JUNK $label1 \*)] Flags permitted. + * 88 EXISTS + * 0 RECENT + * OK [UIDVALIDITY 1347986788] UIDs valid + * OK [UIDNEXT 91] Predicted next UID + * OK [HIGHESTMODSEQ 72] Highest + 20 OK [READ-WRITE] Select completed (0.001 + 0.000 secs). + + * TRACE END --- + */ + async fn select( + self, + mailbox: &MailboxCodec<'a>, + modifiers: &[SelectExamineModifier], + ) -> Result<(Response<'static>, flow::Transition)> { + self.client_capabilities.select_enable(modifiers); + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Mailbox does not exist") + .no()?, + flow::Transition::None, + )) + } + }; + tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.selected"); + + let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + let data = mb.summary()?; + + Ok(( + Response::build() + .message("Select completed") + .to_req(self.req) + .code(Code::ReadWrite) + .set_body(data) + .ok()?, + flow::Transition::Select(mb, flow::MailboxPerm::ReadWrite), + )) + } + + async fn examine( + self, + mailbox: &MailboxCodec<'a>, + modifiers: &[SelectExamineModifier], + ) -> Result<(Response<'static>, flow::Transition)> { + self.client_capabilities.select_enable(modifiers); + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Mailbox does not exist") + .no()?, + flow::Transition::None, + )) + } + }; + tracing::info!(username=%self.user.username, mailbox=%name, "mailbox.examined"); + + let mb = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + let data = mb.summary()?; + + Ok(( + Response::build() + .to_req(self.req) + .message("Examine completed") + .code(Code::ReadOnly) + .set_body(data) + .ok()?, + flow::Transition::Select(mb, flow::MailboxPerm::ReadOnly), + )) + } + + //@FIXME we should write a specific version for the "selected" state + //that returns some unsollicited responses + async fn append( + self, + mailbox: &MailboxCodec<'a>, + flags: &[Flag<'a>], + date: &Option, + message: &Literal<'a>, + ) -> Result<(Response<'static>, flow::Transition)> { + let append_tag = self.req.tag.clone(); + match self.append_internal(mailbox, flags, date, message).await { + Ok((_mb_view, uidvalidity, uid, _modseq)) => Ok(( + Response::build() + .tag(append_tag) + .message("APPEND completed") + .code(Code::Other(CodeOther::unvalidated( + format!("APPENDUID {} {}", uidvalidity, uid).into_bytes(), + ))) + .ok()?, + flow::Transition::None, + )), + Err(e) => Ok(( + Response::build() + .tag(append_tag) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + fn enable( + self, + cap_enable: &Vec1>, + ) -> Result<(Response<'static>, flow::Transition)> { + let mut response_builder = Response::build().to_req(self.req); + let capabilities = self.client_capabilities.try_enable(cap_enable.as_ref()); + if capabilities.len() > 0 { + response_builder = response_builder.data(Data::Enabled { capabilities }); + } + Ok(( + response_builder.message("ENABLE completed").ok()?, + flow::Transition::None, + )) + } + + //@FIXME should be refactored and integrated to the mailbox view + pub(crate) async fn append_internal( + self, + mailbox: &MailboxCodec<'a>, + flags: &[Flag<'a>], + date: &Option, + message: &Literal<'a>, + ) -> Result<(MailboxView, ImapUidvalidity, ImapUid, ModSeq)> { + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => bail!("Mailbox does not exist"), + }; + let view = MailboxView::new(mb, self.client_capabilities.condstore.is_enabled()).await; + + if date.is_some() { + tracing::warn!("Cannot set date when appending message"); + } + + let msg = + IMF::try_from(message.data()).map_err(|_| anyhow!("Could not parse e-mail message"))?; + let flags = flags.iter().map(|x| x.to_string()).collect::>(); + // TODO: filter allowed flags? ping @Quentin + + let (uidvalidity, uid, modseq) = + view.internal.mailbox.append(msg, None, &flags[..]).await?; + //let unsollicited = view.update(UpdateParameters::default()).await?; + + Ok((view, uidvalidity, uid, modseq)) + } +} + +fn matches_wildcard(wildcard: &str, name: &str) -> bool { + let wildcard = wildcard.chars().collect::>(); + let name = name.chars().collect::>(); + + let mut matches = vec![vec![false; wildcard.len() + 1]; name.len() + 1]; + + for i in 0..=name.len() { + for j in 0..=wildcard.len() { + matches[i][j] = (i == 0 && j == 0) + || (j > 0 + && matches[i][j - 1] + && (wildcard[j - 1] == '%' || wildcard[j - 1] == '*')) + || (i > 0 + && j > 0 + && matches[i - 1][j - 1] + && wildcard[j - 1] == name[i - 1] + && wildcard[j - 1] != '%' + && wildcard[j - 1] != '*') + || (i > 0 + && j > 0 + && matches[i - 1][j] + && (wildcard[j - 1] == '*' + || (wildcard[j - 1] == '%' && name[i - 1] != MBX_HIER_DELIM_RAW))); + } + } + + matches[name.len()][wildcard.len()] +} + +#[derive(Error, Debug)] +pub enum CommandError { + #[error("Mailbox not found")] + MailboxNotFound, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_wildcard_matches() { + assert!(matches_wildcard("INBOX", "INBOX")); + assert!(matches_wildcard("*", "INBOX")); + assert!(matches_wildcard("%", "INBOX")); + assert!(!matches_wildcard("%", "Test.Azerty")); + assert!(!matches_wildcard("INBOX.*", "INBOX")); + assert!(matches_wildcard("Sent.*", "Sent.A")); + assert!(matches_wildcard("Sent.*", "Sent.A.B")); + assert!(!matches_wildcard("Sent.%", "Sent.A.B")); + } +} diff --git a/aero-proto/src/imap/command/mod.rs b/aero-proto/src/imap/command/mod.rs new file mode 100644 index 0000000..5382d06 --- /dev/null +++ b/aero-proto/src/imap/command/mod.rs @@ -0,0 +1,20 @@ +pub mod anonymous; +pub mod anystate; +pub mod authenticated; +pub mod selected; + +use aero_collections::mail::namespace::INBOX; +use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; + +/// Convert an IMAP mailbox name/identifier representation +/// to an utf-8 string that is used internally in Aerogramme +struct MailboxName<'a>(&'a MailboxCodec<'a>); +impl<'a> TryInto<&'a str> for MailboxName<'a> { + type Error = std::str::Utf8Error; + fn try_into(self) -> Result<&'a str, Self::Error> { + match self.0 { + MailboxCodec::Inbox => Ok(INBOX), + MailboxCodec::Other(aname) => Ok(std::str::from_utf8(aname.as_ref())?), + } + } +} diff --git a/aero-proto/src/imap/command/selected.rs b/aero-proto/src/imap/command/selected.rs new file mode 100644 index 0000000..190949b --- /dev/null +++ b/aero-proto/src/imap/command/selected.rs @@ -0,0 +1,425 @@ +use std::num::NonZeroU64; +use std::sync::Arc; + +use anyhow::Result; +use imap_codec::imap_types::command::{Command, CommandBody, FetchModifier, StoreModifier}; +use imap_codec::imap_types::core::Charset; +use imap_codec::imap_types::fetch::MacroOrMessageDataItemNames; +use imap_codec::imap_types::flag::{Flag, StoreResponse, StoreType}; +use imap_codec::imap_types::mailbox::Mailbox as MailboxCodec; +use imap_codec::imap_types::response::{Code, CodeOther}; +use imap_codec::imap_types::search::SearchKey; +use imap_codec::imap_types::sequence::SequenceSet; + +use aero_collections::user::User; + +use crate::imap::attributes::AttributesProxy; +use crate::imap::capability::{ClientCapability, ServerCapability}; +use crate::imap::command::{anystate, authenticated, MailboxName}; +use crate::imap::flow; +use crate::imap::mailbox_view::{MailboxView, UpdateParameters}; +use crate::imap::response::Response; + +pub struct SelectedContext<'a> { + pub req: &'a Command<'static>, + pub user: &'a Arc, + pub mailbox: &'a mut MailboxView, + pub server_capabilities: &'a ServerCapability, + pub client_capabilities: &'a mut ClientCapability, + pub perm: &'a flow::MailboxPerm, +} + +pub async fn dispatch<'a>( + ctx: SelectedContext<'a>, +) -> Result<(Response<'static>, flow::Transition)> { + match &ctx.req.body { + // Any State + // noop is specific to this state + CommandBody::Capability => { + anystate::capability(ctx.req.tag.clone(), ctx.server_capabilities) + } + CommandBody::Logout => anystate::logout(), + + // Specific to this state (7 commands + NOOP) + CommandBody::Close => match ctx.perm { + flow::MailboxPerm::ReadWrite => ctx.close().await, + flow::MailboxPerm::ReadOnly => ctx.examine_close().await, + }, + CommandBody::Noop | CommandBody::Check => ctx.noop().await, + CommandBody::Fetch { + sequence_set, + macro_or_item_names, + modifiers, + uid, + } => { + ctx.fetch(sequence_set, macro_or_item_names, modifiers, uid) + .await + } + //@FIXME SearchKey::And is a legacy hack, should be refactored + CommandBody::Search { + charset, + criteria, + uid, + } => { + ctx.search(charset, &SearchKey::And(criteria.clone()), uid) + .await + } + CommandBody::Expunge { + // UIDPLUS (rfc4315) + uid_sequence_set, + } => ctx.expunge(uid_sequence_set).await, + CommandBody::Store { + sequence_set, + kind, + response, + flags, + modifiers, + uid, + } => { + ctx.store(sequence_set, kind, response, flags, modifiers, uid) + .await + } + CommandBody::Copy { + sequence_set, + mailbox, + uid, + } => ctx.copy(sequence_set, mailbox, uid).await, + CommandBody::Move { + sequence_set, + mailbox, + uid, + } => ctx.r#move(sequence_set, mailbox, uid).await, + + // UNSELECT extension (rfc3691) + CommandBody::Unselect => ctx.unselect().await, + + // In selected mode, we fallback to authenticated when needed + _ => { + authenticated::dispatch(authenticated::AuthenticatedContext { + req: ctx.req, + server_capabilities: ctx.server_capabilities, + client_capabilities: ctx.client_capabilities, + user: ctx.user, + }) + .await + } + } +} + +// --- PRIVATE --- + +impl<'a> SelectedContext<'a> { + async fn close(self) -> Result<(Response<'static>, flow::Transition)> { + // We expunge messages, + // but we don't send the untagged EXPUNGE responses + let tag = self.req.tag.clone(); + self.expunge(&None).await?; + Ok(( + Response::build().tag(tag).message("CLOSE completed").ok()?, + flow::Transition::Unselect, + )) + } + + /// CLOSE in examined state is not the same as in selected state + /// (in selected state it also does an EXPUNGE, here it doesn't) + async fn examine_close(self) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build() + .to_req(self.req) + .message("CLOSE completed") + .ok()?, + flow::Transition::Unselect, + )) + } + + async fn unselect(self) -> Result<(Response<'static>, flow::Transition)> { + Ok(( + Response::build() + .to_req(self.req) + .message("UNSELECT completed") + .ok()?, + flow::Transition::Unselect, + )) + } + + pub async fn fetch( + self, + sequence_set: &SequenceSet, + attributes: &'a MacroOrMessageDataItemNames<'static>, + modifiers: &[FetchModifier], + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + let ap = AttributesProxy::new(attributes, modifiers, *uid); + let mut changed_since: Option = None; + modifiers.iter().for_each(|m| match m { + FetchModifier::ChangedSince(val) => { + changed_since = Some(*val); + } + }); + + match self + .mailbox + .fetch(sequence_set, &ap, changed_since, uid) + .await + { + Ok(resp) => { + // Capabilities enabling logic only on successful command + // (according to my understanding of the spec) + self.client_capabilities.attributes_enable(&ap); + self.client_capabilities.fetch_modifiers_enable(modifiers); + + // Response to the client + Ok(( + Response::build() + .to_req(self.req) + .message("FETCH completed") + .set_body(resp) + .ok()?, + flow::Transition::None, + )) + } + Err(e) => Ok(( + Response::build() + .to_req(self.req) + .message(e.to_string()) + .no()?, + flow::Transition::None, + )), + } + } + + pub async fn search( + self, + charset: &Option>, + criteria: &SearchKey<'a>, + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + let (found, enable_condstore) = self.mailbox.search(charset, criteria, *uid).await?; + if enable_condstore { + self.client_capabilities.enable_condstore(); + } + Ok(( + Response::build() + .to_req(self.req) + .set_body(found) + .message("SEARCH completed") + .ok()?, + flow::Transition::None, + )) + } + + pub async fn noop(self) -> Result<(Response<'static>, flow::Transition)> { + self.mailbox.internal.mailbox.force_sync().await?; + + let updates = self.mailbox.update(UpdateParameters::default()).await?; + Ok(( + Response::build() + .to_req(self.req) + .message("NOOP completed.") + .set_body(updates) + .ok()?, + flow::Transition::None, + )) + } + + async fn expunge( + self, + uid_sequence_set: &Option, + ) -> Result<(Response<'static>, flow::Transition)> { + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let tag = self.req.tag.clone(); + let data = self.mailbox.expunge(uid_sequence_set).await?; + + Ok(( + Response::build() + .tag(tag) + .message("EXPUNGE completed") + .set_body(data) + .ok()?, + flow::Transition::None, + )) + } + + async fn store( + self, + sequence_set: &SequenceSet, + kind: &StoreType, + response: &StoreResponse, + flags: &[Flag<'a>], + modifiers: &[StoreModifier], + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let mut unchanged_since: Option = None; + modifiers.iter().for_each(|m| match m { + StoreModifier::UnchangedSince(val) => { + unchanged_since = Some(*val); + } + }); + + let (data, modified) = self + .mailbox + .store(sequence_set, kind, response, flags, unchanged_since, uid) + .await?; + + let mut ok_resp = Response::build() + .to_req(self.req) + .message("STORE completed") + .set_body(data); + + match modified[..] { + [] => (), + [_head, ..] => { + let modified_str = format!( + "MODIFIED {}", + modified + .into_iter() + .map(|x| x.to_string()) + .collect::>() + .join(",") + ); + ok_resp = ok_resp.code(Code::Other(CodeOther::unvalidated( + modified_str.into_bytes(), + ))); + } + }; + + self.client_capabilities.store_modifiers_enable(modifiers); + + Ok((ok_resp.ok()?, flow::Transition::None)) + } + + async fn copy( + self, + sequence_set: &SequenceSet, + mailbox: &MailboxCodec<'a>, + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + //@FIXME Could copy be valid in EXAMINE mode? + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Destination mailbox does not exist") + .code(Code::TryCreate) + .no()?, + flow::Transition::None, + )) + } + }; + + let (uidval, uid_map) = self.mailbox.copy(sequence_set, mb, uid).await?; + + let copyuid_str = format!( + "{} {} {}", + uidval, + uid_map + .iter() + .map(|(sid, _)| format!("{}", sid)) + .collect::>() + .join(","), + uid_map + .iter() + .map(|(_, tuid)| format!("{}", tuid)) + .collect::>() + .join(",") + ); + + Ok(( + Response::build() + .to_req(self.req) + .message("COPY completed") + .code(Code::Other(CodeOther::unvalidated( + format!("COPYUID {}", copyuid_str).into_bytes(), + ))) + .ok()?, + flow::Transition::None, + )) + } + + async fn r#move( + self, + sequence_set: &SequenceSet, + mailbox: &MailboxCodec<'a>, + uid: &bool, + ) -> Result<(Response<'static>, flow::Transition)> { + if let Some(failed) = self.fail_read_only() { + return Ok((failed, flow::Transition::None)); + } + + let name: &str = MailboxName(mailbox).try_into()?; + + let mb_opt = self.user.open_mailbox(&name).await?; + let mb = match mb_opt { + Some(mb) => mb, + None => { + return Ok(( + Response::build() + .to_req(self.req) + .message("Destination mailbox does not exist") + .code(Code::TryCreate) + .no()?, + flow::Transition::None, + )) + } + }; + + let (uidval, uid_map, data) = self.mailbox.r#move(sequence_set, mb, uid).await?; + + // compute code + let copyuid_str = format!( + "{} {} {}", + uidval, + uid_map + .iter() + .map(|(sid, _)| format!("{}", sid)) + .collect::>() + .join(","), + uid_map + .iter() + .map(|(_, tuid)| format!("{}", tuid)) + .collect::>() + .join(",") + ); + + Ok(( + Response::build() + .to_req(self.req) + .message("COPY completed") + .code(Code::Other(CodeOther::unvalidated( + format!("COPYUID {}", copyuid_str).into_bytes(), + ))) + .set_body(data) + .ok()?, + flow::Transition::None, + )) + } + + fn fail_read_only(&self) -> Option> { + match self.perm { + flow::MailboxPerm::ReadWrite => None, + flow::MailboxPerm::ReadOnly => Some( + Response::build() + .to_req(self.req) + .message("Write command are forbidden while exmining mailbox") + .no() + .unwrap(), + ), + } + } +} diff --git a/aero-proto/src/imap/flags.rs b/aero-proto/src/imap/flags.rs new file mode 100644 index 0000000..0f6ec64 --- /dev/null +++ b/aero-proto/src/imap/flags.rs @@ -0,0 +1,30 @@ +use imap_codec::imap_types::core::Atom; +use imap_codec::imap_types::flag::{Flag, FlagFetch}; + +pub fn from_str(f: &str) -> Option> { + match f.chars().next() { + Some('\\') => match f { + "\\Seen" => Some(FlagFetch::Flag(Flag::Seen)), + "\\Answered" => Some(FlagFetch::Flag(Flag::Answered)), + "\\Flagged" => Some(FlagFetch::Flag(Flag::Flagged)), + "\\Deleted" => Some(FlagFetch::Flag(Flag::Deleted)), + "\\Draft" => Some(FlagFetch::Flag(Flag::Draft)), + "\\Recent" => Some(FlagFetch::Recent), + _ => match Atom::try_from(f.strip_prefix('\\').unwrap().to_string()) { + Err(_) => { + tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); + None + } + Ok(a) => Some(FlagFetch::Flag(Flag::system(a))), + }, + }, + Some(_) => match Atom::try_from(f.to_string()) { + Err(_) => { + tracing::error!(flag=%f, "Unable to encode flag as IMAP atom"); + None + } + Ok(a) => Some(FlagFetch::Flag(Flag::keyword(a))), + }, + None => None, + } +} diff --git a/aero-proto/src/imap/flow.rs b/aero-proto/src/imap/flow.rs new file mode 100644 index 0000000..1986447 --- /dev/null +++ b/aero-proto/src/imap/flow.rs @@ -0,0 +1,115 @@ +use std::error::Error as StdError; +use std::fmt; +use std::sync::Arc; + +use imap_codec::imap_types::core::Tag; +use tokio::sync::Notify; + +use aero_collections::user::User; + +use crate::imap::mailbox_view::MailboxView; + +#[derive(Debug)] +pub enum Error { + ForbiddenTransition, +} +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Forbidden Transition") + } +} +impl StdError for Error {} + +pub enum State { + NotAuthenticated, + Authenticated(Arc), + Selected(Arc, MailboxView, MailboxPerm), + Idle( + Arc, + MailboxView, + MailboxPerm, + Tag<'static>, + Arc, + ), + Logout, +} +impl State { + pub fn notify(&self) -> Option> { + match self { + Self::Idle(_, _, _, _, anotif) => Some(anotif.clone()), + _ => None, + } + } +} +impl fmt::Display for State { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use State::*; + match self { + NotAuthenticated => write!(f, "NotAuthenticated"), + Authenticated(..) => write!(f, "Authenticated"), + Selected(..) => write!(f, "Selected"), + Idle(..) => write!(f, "Idle"), + Logout => write!(f, "Logout"), + } + } +} + +#[derive(Clone)] +pub enum MailboxPerm { + ReadOnly, + ReadWrite, +} + +pub enum Transition { + None, + Authenticate(Arc), + Select(MailboxView, MailboxPerm), + Idle(Tag<'static>, Notify), + UnIdle, + Unselect, + Logout, +} +impl fmt::Display for Transition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use Transition::*; + match self { + None => write!(f, "None"), + Authenticate(..) => write!(f, "Authenticated"), + Select(..) => write!(f, "Selected"), + Idle(..) => write!(f, "Idle"), + UnIdle => write!(f, "UnIdle"), + Unselect => write!(f, "Unselect"), + Logout => write!(f, "Logout"), + } + } +} + +// See RFC3501 section 3. +// https://datatracker.ietf.org/doc/html/rfc3501#page-13 +impl State { + pub fn apply(&mut self, tr: Transition) -> Result<(), Error> { + tracing::debug!(state=%self, transition=%tr, "try change state"); + + let new_state = match (std::mem::replace(self, State::Logout), tr) { + (s, Transition::None) => s, + (State::NotAuthenticated, Transition::Authenticate(u)) => State::Authenticated(u), + (State::Authenticated(u) | State::Selected(u, _, _), Transition::Select(m, p)) => { + State::Selected(u, m, p) + } + (State::Selected(u, _, _), Transition::Unselect) => State::Authenticated(u.clone()), + (State::Selected(u, m, p), Transition::Idle(t, s)) => { + State::Idle(u, m, p, t, Arc::new(s)) + } + (State::Idle(u, m, p, _, _), Transition::UnIdle) => State::Selected(u, m, p), + (_, Transition::Logout) => State::Logout, + (s, t) => { + tracing::error!(state=%s, transition=%t, "forbidden transition"); + return Err(Error::ForbiddenTransition); + } + }; + *self = new_state; + tracing::debug!(state=%self, "transition succeeded"); + + Ok(()) + } +} diff --git a/aero-proto/src/imap/imf_view.rs b/aero-proto/src/imap/imf_view.rs new file mode 100644 index 0000000..a4ca2e8 --- /dev/null +++ b/aero-proto/src/imap/imf_view.rs @@ -0,0 +1,109 @@ +use anyhow::{anyhow, Result}; +use chrono::naive::NaiveDate; + +use imap_codec::imap_types::core::{IString, NString}; +use imap_codec::imap_types::envelope::{Address, Envelope}; + +use eml_codec::imf; + +pub struct ImfView<'a>(pub &'a imf::Imf<'a>); + +impl<'a> ImfView<'a> { + pub fn naive_date(&self) -> Result { + Ok(self.0.date.ok_or(anyhow!("date is not set"))?.date_naive()) + } + + /// Envelope rules are defined in RFC 3501, section 7.4.2 + /// https://datatracker.ietf.org/doc/html/rfc3501#section-7.4.2 + /// + /// Some important notes: + /// + /// If the Sender or Reply-To lines are absent in the [RFC-2822] + /// header, or are present but empty, the server sets the + /// corresponding member of the envelope to be the same value as + /// the from member (the client is not expected to know to do + /// this). Note: [RFC-2822] requires that all messages have a valid + /// From header. Therefore, the from, sender, and reply-to + /// members in the envelope can not be NIL. + /// + /// If the Date, Subject, In-Reply-To, and Message-ID header lines + /// are absent in the [RFC-2822] header, the corresponding member + /// of the envelope is NIL; if these header lines are present but + /// empty the corresponding member of the envelope is the empty + /// string. + + //@FIXME return an error if the envelope is invalid instead of panicking + //@FIXME some fields must be defaulted if there are not set. + pub fn message_envelope(&self) -> Envelope<'static> { + let msg = self.0; + let from = msg.from.iter().map(convert_mbx).collect::>(); + + Envelope { + date: NString( + msg.date + .as_ref() + .map(|d| IString::try_from(d.to_rfc3339()).unwrap()), + ), + subject: NString( + msg.subject + .as_ref() + .map(|d| IString::try_from(d.to_string()).unwrap()), + ), + sender: msg + .sender + .as_ref() + .map(|v| vec![convert_mbx(v)]) + .unwrap_or(from.clone()), + reply_to: if msg.reply_to.is_empty() { + from.clone() + } else { + convert_addresses(&msg.reply_to) + }, + from, + to: convert_addresses(&msg.to), + cc: convert_addresses(&msg.cc), + bcc: convert_addresses(&msg.bcc), + in_reply_to: NString( + msg.in_reply_to + .iter() + .next() + .map(|d| IString::try_from(d.to_string()).unwrap()), + ), + message_id: NString( + msg.msg_id + .as_ref() + .map(|d| IString::try_from(d.to_string()).unwrap()), + ), + } + } +} + +pub fn convert_addresses(addrlist: &Vec) -> Vec> { + let mut acc = vec![]; + for item in addrlist { + match item { + imf::address::AddressRef::Single(a) => acc.push(convert_mbx(a)), + imf::address::AddressRef::Many(l) => acc.extend(l.participants.iter().map(convert_mbx)), + } + } + return acc; +} + +pub fn convert_mbx(addr: &imf::mailbox::MailboxRef) -> Address<'static> { + Address { + name: NString( + addr.name + .as_ref() + .map(|x| IString::try_from(x.to_string()).unwrap()), + ), + // SMTP at-domain-list (source route) seems obsolete since at least 1991 + // https://www.mhonarc.org/archive/html/ietf-822/1991-06/msg00060.html + adl: NString(None), + mailbox: NString(Some( + IString::try_from(addr.addrspec.local_part.to_string()).unwrap(), + )), + host: NString(Some( + IString::try_from(addr.addrspec.domain.to_string()).unwrap(), + )), + } +} diff --git a/aero-proto/src/imap/index.rs b/aero-proto/src/imap/index.rs new file mode 100644 index 0000000..3de46be --- /dev/null +++ b/aero-proto/src/imap/index.rs @@ -0,0 +1,211 @@ +use std::num::{NonZeroU32, NonZeroU64}; + +use anyhow::{anyhow, Result}; +use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; + +use aero_collections::mail::uidindex::{ImapUid, ModSeq, UidIndex}; +use aero_collections::mail::unique_ident::UniqueIdent; + +pub struct Index<'a> { + pub imap_index: Vec>, + pub internal: &'a UidIndex, +} +impl<'a> Index<'a> { + pub fn new(internal: &'a UidIndex) -> Result { + let imap_index = internal + .idx_by_uid + .iter() + .enumerate() + .map(|(i_enum, (&uid, &uuid))| { + let (_, modseq, flags) = internal + .table + .get(&uuid) + .ok_or(anyhow!("mail is missing from index"))?; + let i_int: u32 = (i_enum + 1).try_into()?; + let i: NonZeroU32 = i_int.try_into()?; + + Ok(MailIndex { + i, + uid, + uuid, + modseq: *modseq, + flags, + }) + }) + .collect::>>()?; + + Ok(Self { + imap_index, + internal, + }) + } + + pub fn last(&'a self) -> Option<&'a MailIndex<'a>> { + self.imap_index.last() + } + + /// Fetch mail descriptors based on a sequence of UID + /// + /// Complexity analysis: + /// - Sort is O(n * log n) where n is the number of uid generated by the sequence + /// - Finding the starting point in the index O(log m) where m is the size of the mailbox + /// While n =< m, it's not clear if the difference is big or not. + /// + /// For now, the algorithm tries to be fast for small values of n, + /// as it is what is expected by clients. + /// + /// So we assume for our implementation that : n << m. + /// It's not true for full mailbox searches for example... + pub fn fetch_on_uid(&'a self, sequence_set: &SequenceSet) -> Vec<&'a MailIndex<'a>> { + if self.imap_index.is_empty() { + return vec![]; + } + let largest = self.last().expect("The mailbox is not empty").uid; + let mut unroll_seq = sequence_set.iter(largest).collect::>(); + unroll_seq.sort(); + + let start_seq = match unroll_seq.iter().next() { + Some(elem) => elem, + None => return vec![], + }; + + // Quickly jump to the right point in the mailbox vector O(log m) instead + // of iterating one by one O(m). Works only because both unroll_seq & imap_index are sorted per uid. + let mut imap_idx = { + let start_idx = self + .imap_index + .partition_point(|mail_idx| &mail_idx.uid < start_seq); + &self.imap_index[start_idx..] + }; + + let mut acc = vec![]; + for wanted_uid in unroll_seq.iter() { + // Slide the window forward as long as its first element is lower than our wanted uid. + let start_idx = match imap_idx.iter().position(|midx| &midx.uid >= wanted_uid) { + Some(v) => v, + None => break, + }; + imap_idx = &imap_idx[start_idx..]; + + // If the beginning of our new window is the uid we want, we collect it + if &imap_idx[0].uid == wanted_uid { + acc.push(&imap_idx[0]); + } + } + + acc + } + + pub fn fetch_on_id(&'a self, sequence_set: &SequenceSet) -> Result>> { + if self.imap_index.is_empty() { + return Ok(vec![]); + } + let largest = NonZeroU32::try_from(self.imap_index.len() as u32)?; + let mut acc = sequence_set + .iter(largest) + .map(|wanted_id| { + self.imap_index + .get((wanted_id.get() as usize) - 1) + .ok_or(anyhow!("Mail not found")) + }) + .collect::>>()?; + + // Sort the result to be consistent with UID + acc.sort_by(|a, b| a.i.cmp(&b.i)); + + Ok(acc) + } + + pub fn fetch( + self: &'a Index<'a>, + sequence_set: &SequenceSet, + by_uid: bool, + ) -> Result>> { + match by_uid { + true => Ok(self.fetch_on_uid(sequence_set)), + _ => self.fetch_on_id(sequence_set), + } + } + + pub fn fetch_changed_since( + self: &'a Index<'a>, + sequence_set: &SequenceSet, + maybe_modseq: Option, + by_uid: bool, + ) -> Result>> { + let raw = self.fetch(sequence_set, by_uid)?; + let res = match maybe_modseq { + Some(pit) => raw.into_iter().filter(|midx| midx.modseq > pit).collect(), + None => raw, + }; + + Ok(res) + } + + pub fn fetch_unchanged_since( + self: &'a Index<'a>, + sequence_set: &SequenceSet, + maybe_modseq: Option, + by_uid: bool, + ) -> Result<(Vec<&'a MailIndex<'a>>, Vec<&'a MailIndex<'a>>)> { + let raw = self.fetch(sequence_set, by_uid)?; + let res = match maybe_modseq { + Some(pit) => raw.into_iter().partition(|midx| midx.modseq <= pit), + None => (raw, vec![]), + }; + + Ok(res) + } +} + +#[derive(Clone, Debug)] +pub struct MailIndex<'a> { + pub i: NonZeroU32, + pub uid: ImapUid, + pub uuid: UniqueIdent, + pub modseq: ModSeq, + pub flags: &'a Vec, +} + +impl<'a> MailIndex<'a> { + // The following functions are used to implement the SEARCH command + pub fn is_in_sequence_i(&self, seq: &Sequence) -> bool { + match seq { + Sequence::Single(SeqOrUid::Asterisk) => true, + Sequence::Single(SeqOrUid::Value(target)) => target == &self.i, + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) + | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.i, + Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { + if x1 < x2 { + x1 <= &self.i && &self.i <= x2 + } else { + x1 >= &self.i && &self.i >= x2 + } + } + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, + } + } + + pub fn is_in_sequence_uid(&self, seq: &Sequence) -> bool { + match seq { + Sequence::Single(SeqOrUid::Asterisk) => true, + Sequence::Single(SeqOrUid::Value(target)) => target == &self.uid, + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Value(x)) + | Sequence::Range(SeqOrUid::Value(x), SeqOrUid::Asterisk) => x <= &self.uid, + Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { + if x1 < x2 { + x1 <= &self.uid && &self.uid <= x2 + } else { + x1 >= &self.uid && &self.uid >= x2 + } + } + Sequence::Range(SeqOrUid::Asterisk, SeqOrUid::Asterisk) => true, + } + } + + pub fn is_flag_set(&self, flag: &str) -> bool { + self.flags + .iter() + .any(|candidate| candidate.as_str() == flag) + } +} diff --git a/aero-proto/src/imap/mail_view.rs b/aero-proto/src/imap/mail_view.rs new file mode 100644 index 0000000..054014a --- /dev/null +++ b/aero-proto/src/imap/mail_view.rs @@ -0,0 +1,306 @@ +use std::num::NonZeroU32; + +use anyhow::{anyhow, bail, Result}; +use chrono::{naive::NaiveDate, DateTime as ChronoDateTime, Local, Offset, TimeZone, Utc}; + +use imap_codec::imap_types::core::NString; +use imap_codec::imap_types::datetime::DateTime; +use imap_codec::imap_types::fetch::{ + MessageDataItem, MessageDataItemName, Section as FetchSection, +}; +use imap_codec::imap_types::flag::Flag; +use imap_codec::imap_types::response::Data; + +use eml_codec::{ + imf, + part::{composite::Message, AnyPart}, +}; + +use aero_collections::mail::query::QueryResult; + +use crate::imap::attributes::AttributesProxy; +use crate::imap::flags; +use crate::imap::imf_view::ImfView; +use crate::imap::index::MailIndex; +use crate::imap::mime_view; +use crate::imap::response::Body; + +pub struct MailView<'a> { + pub in_idx: &'a MailIndex<'a>, + pub query_result: &'a QueryResult, + pub content: FetchedMail<'a>, +} + +impl<'a> MailView<'a> { + pub fn new(query_result: &'a QueryResult, in_idx: &'a MailIndex<'a>) -> Result> { + Ok(Self { + in_idx, + query_result, + content: match query_result { + QueryResult::FullResult { content, .. } => { + let (_, parsed) = + eml_codec::parse_message(&content).or(Err(anyhow!("Invalid mail body")))?; + FetchedMail::full_from_message(parsed) + } + QueryResult::PartialResult { metadata, .. } => { + let (_, parsed) = eml_codec::parse_message(&metadata.headers) + .or(Err(anyhow!("unable to parse email headers")))?; + FetchedMail::partial_from_message(parsed) + } + QueryResult::IndexResult { .. } => FetchedMail::IndexOnly, + }, + }) + } + + pub fn imf(&self) -> Option { + self.content.as_imf().map(ImfView) + } + + pub fn selected_mime(&'a self) -> Option> { + self.content.as_anypart().ok().map(mime_view::SelectedMime) + } + + pub fn filter(&self, ap: &AttributesProxy) -> Result<(Body<'static>, SeenFlag)> { + let mut seen = SeenFlag::DoNothing; + let res_attrs = ap + .attrs + .iter() + .map(|attr| match attr { + MessageDataItemName::Uid => Ok(self.uid()), + MessageDataItemName::Flags => Ok(self.flags()), + MessageDataItemName::Rfc822Size => self.rfc_822_size(), + MessageDataItemName::Rfc822Header => self.rfc_822_header(), + MessageDataItemName::Rfc822Text => self.rfc_822_text(), + MessageDataItemName::Rfc822 => { + if self.is_not_yet_seen() { + seen = SeenFlag::MustAdd; + } + self.rfc822() + } + MessageDataItemName::Envelope => Ok(self.envelope()), + MessageDataItemName::Body => self.body(), + MessageDataItemName::BodyStructure => self.body_structure(), + MessageDataItemName::BodyExt { + section, + partial, + peek, + } => { + let (body, has_seen) = self.body_ext(section, partial, peek)?; + seen = has_seen; + Ok(body) + } + MessageDataItemName::InternalDate => self.internal_date(), + MessageDataItemName::ModSeq => Ok(self.modseq()), + }) + .collect::, _>>()?; + + Ok(( + Body::Data(Data::Fetch { + seq: self.in_idx.i, + items: res_attrs.try_into()?, + }), + seen, + )) + } + + pub fn stored_naive_date(&self) -> Result { + let mail_meta = self.query_result.metadata().expect("metadata were fetched"); + let mail_ts: i64 = mail_meta.internaldate.try_into()?; + let msg_date: ChronoDateTime = ChronoDateTime::from_timestamp(mail_ts, 0) + .ok_or(anyhow!("unable to parse timestamp"))? + .with_timezone(&Local); + + Ok(msg_date.date_naive()) + } + + pub fn is_header_contains_pattern(&self, hdr: &[u8], pattern: &[u8]) -> bool { + let mime = match self.selected_mime() { + None => return false, + Some(x) => x, + }; + + let val = match mime.header_value(hdr) { + None => return false, + Some(x) => x, + }; + + val.windows(pattern.len()).any(|win| win == pattern) + } + + // Private function, mainly for filter! + fn uid(&self) -> MessageDataItem<'static> { + MessageDataItem::Uid(self.in_idx.uid.clone()) + } + + fn flags(&self) -> MessageDataItem<'static> { + MessageDataItem::Flags( + self.in_idx + .flags + .iter() + .filter_map(|f| flags::from_str(f)) + .collect(), + ) + } + + fn rfc_822_size(&self) -> Result> { + let sz = self + .query_result + .metadata() + .ok_or(anyhow!("mail metadata are required"))? + .rfc822_size; + Ok(MessageDataItem::Rfc822Size(sz as u32)) + } + + fn rfc_822_header(&self) -> Result> { + let hdrs: NString = self + .query_result + .metadata() + .ok_or(anyhow!("mail metadata are required"))? + .headers + .to_vec() + .try_into()?; + Ok(MessageDataItem::Rfc822Header(hdrs)) + } + + fn rfc_822_text(&self) -> Result> { + let txt: NString = self.content.as_msg()?.raw_body.to_vec().try_into()?; + Ok(MessageDataItem::Rfc822Text(txt)) + } + + fn rfc822(&self) -> Result> { + let full: NString = self.content.as_msg()?.raw_part.to_vec().try_into()?; + Ok(MessageDataItem::Rfc822(full)) + } + + fn envelope(&self) -> MessageDataItem<'static> { + MessageDataItem::Envelope( + self.imf() + .expect("an imf object is derivable from fetchedmail") + .message_envelope(), + ) + } + + fn body(&self) -> Result> { + Ok(MessageDataItem::Body(mime_view::bodystructure( + self.content.as_msg()?.child.as_ref(), + false, + )?)) + } + + fn body_structure(&self) -> Result> { + Ok(MessageDataItem::BodyStructure(mime_view::bodystructure( + self.content.as_msg()?.child.as_ref(), + true, + )?)) + } + + fn is_not_yet_seen(&self) -> bool { + let seen_flag = Flag::Seen.to_string(); + !self.in_idx.flags.iter().any(|x| *x == seen_flag) + } + + /// maps to BODY[
]<> and BODY.PEEK[
]<> + /// peek does not implicitly set the \Seen flag + /// eg. BODY[HEADER.FIELDS (DATE FROM)] + /// eg. BODY[]<0.2048> + fn body_ext( + &self, + section: &Option>, + partial: &Option<(u32, NonZeroU32)>, + peek: &bool, + ) -> Result<(MessageDataItem<'static>, SeenFlag)> { + // Manage Seen flag + let mut seen = SeenFlag::DoNothing; + if !peek && self.is_not_yet_seen() { + // Add \Seen flag + //self.mailbox.add_flags(uuid, &[seen_flag]).await?; + seen = SeenFlag::MustAdd; + } + + // Process message + let (text, origin) = + match mime_view::body_ext(self.content.as_anypart()?, section, partial)? { + mime_view::BodySection::Full(body) => (body, None), + mime_view::BodySection::Slice { body, origin_octet } => (body, Some(origin_octet)), + }; + + let data: NString = text.to_vec().try_into()?; + + return Ok(( + MessageDataItem::BodyExt { + section: section.as_ref().map(|fs| fs.clone()), + origin, + data, + }, + seen, + )); + } + + fn internal_date(&self) -> Result> { + let dt = Utc + .fix() + .timestamp_opt( + i64::try_from( + self.query_result + .metadata() + .ok_or(anyhow!("mail metadata were not fetched"))? + .internaldate + / 1000, + )?, + 0, + ) + .earliest() + .ok_or(anyhow!("Unable to parse internal date"))?; + Ok(MessageDataItem::InternalDate(DateTime::unvalidated(dt))) + } + + fn modseq(&self) -> MessageDataItem<'static> { + MessageDataItem::ModSeq(self.in_idx.modseq) + } +} + +pub enum SeenFlag { + DoNothing, + MustAdd, +} + +// ------------------- + +pub enum FetchedMail<'a> { + IndexOnly, + Partial(AnyPart<'a>), + Full(AnyPart<'a>), +} +impl<'a> FetchedMail<'a> { + pub fn full_from_message(msg: Message<'a>) -> Self { + Self::Full(AnyPart::Msg(msg)) + } + + pub fn partial_from_message(msg: Message<'a>) -> Self { + Self::Partial(AnyPart::Msg(msg)) + } + + pub fn as_anypart(&self) -> Result<&AnyPart<'a>> { + match self { + FetchedMail::Full(x) => Ok(&x), + FetchedMail::Partial(x) => Ok(&x), + _ => bail!("The full message must be fetched, not only its headers"), + } + } + + pub fn as_msg(&self) -> Result<&Message<'a>> { + match self { + FetchedMail::Full(AnyPart::Msg(x)) => Ok(&x), + FetchedMail::Partial(AnyPart::Msg(x)) => Ok(&x), + _ => bail!("The full message must be fetched, not only its headers AND it must be an AnyPart::Msg."), + } + } + + pub fn as_imf(&self) -> Option<&imf::Imf<'a>> { + match self { + FetchedMail::Full(AnyPart::Msg(x)) => Some(&x.imf), + FetchedMail::Partial(AnyPart::Msg(x)) => Some(&x.imf), + _ => None, + } + } +} diff --git a/aero-proto/src/imap/mailbox_view.rs b/aero-proto/src/imap/mailbox_view.rs new file mode 100644 index 0000000..5154359 --- /dev/null +++ b/aero-proto/src/imap/mailbox_view.rs @@ -0,0 +1,772 @@ +use std::collections::HashSet; +use std::num::{NonZeroU32, NonZeroU64}; +use std::sync::Arc; + +use anyhow::{anyhow, Error, Result}; + +use futures::stream::{StreamExt, TryStreamExt}; + +use imap_codec::imap_types::core::Charset; +use imap_codec::imap_types::fetch::MessageDataItem; +use imap_codec::imap_types::flag::{Flag, FlagFetch, FlagPerm, StoreResponse, StoreType}; +use imap_codec::imap_types::response::{Code, CodeOther, Data, Status}; +use imap_codec::imap_types::search::SearchKey; +use imap_codec::imap_types::sequence::SequenceSet; + +use aero_collections::mail::mailbox::Mailbox; +use aero_collections::mail::query::QueryScope; +use aero_collections::mail::snapshot::FrozenMailbox; +use aero_collections::mail::uidindex::{ImapUid, ImapUidvalidity, ModSeq}; +use aero_collections::mail::unique_ident::UniqueIdent; + +use crate::imap::attributes::AttributesProxy; +use crate::imap::flags; +use crate::imap::index::Index; +use crate::imap::mail_view::{MailView, SeenFlag}; +use crate::imap::response::Body; +use crate::imap::search; + +const DEFAULT_FLAGS: [Flag; 5] = [ + Flag::Seen, + Flag::Answered, + Flag::Flagged, + Flag::Deleted, + Flag::Draft, +]; + +pub struct UpdateParameters { + pub silence: HashSet, + pub with_modseq: bool, + pub with_uid: bool, +} +impl Default for UpdateParameters { + fn default() -> Self { + Self { + silence: HashSet::new(), + with_modseq: false, + with_uid: false, + } + } +} + +/// A MailboxView is responsible for giving the client the information +/// it needs about a mailbox, such as an initial summary of the mailbox's +/// content and continuous updates indicating when the content +/// of the mailbox has been changed. +/// To do this, it keeps a variable `known_state` that corresponds to +/// what the client knows, and produces IMAP messages to be sent to the +/// client that go along updates to `known_state`. +pub struct MailboxView { + pub internal: FrozenMailbox, + pub is_condstore: bool, +} + +impl MailboxView { + /// Creates a new IMAP view into a mailbox. + pub async fn new(mailbox: Arc, is_cond: bool) -> Self { + Self { + internal: mailbox.frozen().await, + is_condstore: is_cond, + } + } + + /// Create an updated view, useful to make a diff + /// between what the client knows and new stuff + /// Produces a set of IMAP responses describing the change between + /// what the client knows and what is actually in the mailbox. + /// This does NOT trigger a sync, it bases itself on what is currently + /// loaded in RAM by Bayou. + pub async fn update(&mut self, params: UpdateParameters) -> Result>> { + let old_snapshot = self.internal.update().await; + let new_snapshot = &self.internal.snapshot; + + let mut data = Vec::::new(); + + // Calculate diff between two mailbox states + // See example in IMAP RFC in section on NOOP command: + // we want to produce something like this: + // C: a047 NOOP + // S: * 22 EXPUNGE + // S: * 23 EXISTS + // S: * 14 FETCH (UID 1305 FLAGS (\Seen \Deleted)) + // S: a047 OK Noop completed + // In other words: + // - notify client of expunged mails + // - if new mails arrived, notify client of number of existing mails + // - if flags changed for existing mails, tell client + // (for this last step: if uidvalidity changed, do nothing, + // just notify of new uidvalidity and they will resync) + + // - notify client of expunged mails + let mut n_expunge = 0; + for (i, (_uid, uuid)) in old_snapshot.idx_by_uid.iter().enumerate() { + if !new_snapshot.table.contains_key(uuid) { + data.push(Body::Data(Data::Expunge( + NonZeroU32::try_from((i + 1 - n_expunge) as u32).unwrap(), + ))); + n_expunge += 1; + } + } + + // - if new mails arrived, notify client of number of existing mails + if new_snapshot.table.len() != old_snapshot.table.len() - n_expunge + || new_snapshot.uidvalidity != old_snapshot.uidvalidity + { + data.push(self.exists_status()?); + } + + if new_snapshot.uidvalidity != old_snapshot.uidvalidity { + // TODO: do we want to push less/more info than this? + data.push(self.uidvalidity_status()?); + data.push(self.uidnext_status()?); + } else { + // - if flags changed for existing mails, tell client + for (i, (_uid, uuid)) in new_snapshot.idx_by_uid.iter().enumerate() { + if params.silence.contains(uuid) { + continue; + } + + let old_mail = old_snapshot.table.get(uuid); + let new_mail = new_snapshot.table.get(uuid); + if old_mail.is_some() && old_mail != new_mail { + if let Some((uid, modseq, flags)) = new_mail { + let mut items = vec![MessageDataItem::Flags( + flags.iter().filter_map(|f| flags::from_str(f)).collect(), + )]; + + if params.with_uid { + items.push(MessageDataItem::Uid(*uid)); + } + + if params.with_modseq { + items.push(MessageDataItem::ModSeq(*modseq)); + } + + data.push(Body::Data(Data::Fetch { + seq: NonZeroU32::try_from((i + 1) as u32).unwrap(), + items: items.try_into()?, + })); + } + } + } + } + Ok(data) + } + + /// Generates the necessary IMAP messages so that the client + /// has a satisfactory summary of the current mailbox's state. + /// These are the messages that are sent in response to a SELECT command. + pub fn summary(&self) -> Result>> { + let mut data = Vec::::new(); + data.push(self.exists_status()?); + data.push(self.recent_status()?); + data.extend(self.flags_status()?.into_iter()); + data.push(self.uidvalidity_status()?); + data.push(self.uidnext_status()?); + if self.is_condstore { + data.push(self.highestmodseq_status()?); + } + /*self.unseen_first_status()? + .map(|unseen_status| data.push(unseen_status));*/ + + Ok(data) + } + + pub async fn store<'a>( + &mut self, + sequence_set: &SequenceSet, + kind: &StoreType, + response: &StoreResponse, + flags: &[Flag<'a>], + unchanged_since: Option, + is_uid_store: &bool, + ) -> Result<(Vec>, Vec)> { + self.internal.sync().await?; + + let flags = flags.iter().map(|x| x.to_string()).collect::>(); + + let idx = self.index()?; + let (editable, in_conflict) = + idx.fetch_unchanged_since(sequence_set, unchanged_since, *is_uid_store)?; + + for mi in editable.iter() { + match kind { + StoreType::Add => { + self.internal.mailbox.add_flags(mi.uuid, &flags[..]).await?; + } + StoreType::Remove => { + self.internal.mailbox.del_flags(mi.uuid, &flags[..]).await?; + } + StoreType::Replace => { + self.internal.mailbox.set_flags(mi.uuid, &flags[..]).await?; + } + } + } + + let silence = match response { + StoreResponse::Answer => HashSet::new(), + StoreResponse::Silent => editable.iter().map(|midx| midx.uuid).collect(), + }; + + let conflict_id_or_uid = match is_uid_store { + true => in_conflict.into_iter().map(|midx| midx.uid).collect(), + _ => in_conflict.into_iter().map(|midx| midx.i).collect(), + }; + + let summary = self + .update(UpdateParameters { + with_uid: *is_uid_store, + with_modseq: unchanged_since.is_some(), + silence, + }) + .await?; + + Ok((summary, conflict_id_or_uid)) + } + + pub async fn idle_sync(&mut self) -> Result>> { + self.internal + .mailbox + .notify() + .await + .upgrade() + .ok_or(anyhow!("test"))? + .notified() + .await; + self.internal.mailbox.opportunistic_sync().await?; + self.update(UpdateParameters::default()).await + } + + pub async fn expunge( + &mut self, + maybe_seq_set: &Option, + ) -> Result>> { + // Get a recent view to apply our change + self.internal.sync().await?; + let state = self.internal.peek().await; + let idx = Index::new(&state)?; + + // Build a default sequence set for the default case + use imap_codec::imap_types::sequence::{SeqOrUid, Sequence}; + let seq = match maybe_seq_set { + Some(s) => s.clone(), + None => SequenceSet( + vec![Sequence::Range( + SeqOrUid::Value(NonZeroU32::MIN), + SeqOrUid::Asterisk, + )] + .try_into() + .unwrap(), + ), + }; + + let deleted_flag = Flag::Deleted.to_string(); + let msgs = idx + .fetch_on_uid(&seq) + .into_iter() + .filter(|midx| midx.flags.iter().any(|x| *x == deleted_flag)) + .map(|midx| midx.uuid); + + for msg in msgs { + self.internal.mailbox.delete(msg).await?; + } + + self.update(UpdateParameters::default()).await + } + + pub async fn copy( + &self, + sequence_set: &SequenceSet, + to: Arc, + is_uid_copy: &bool, + ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>)> { + let idx = self.index()?; + let mails = idx.fetch(sequence_set, *is_uid_copy)?; + + let mut new_uuids = vec![]; + for mi in mails.iter() { + new_uuids.push(to.copy_from(&self.internal.mailbox, mi.uuid).await?); + } + + let mut ret = vec![]; + let to_state = to.current_uid_index().await; + for (mi, new_uuid) in mails.iter().zip(new_uuids.iter()) { + let dest_uid = to_state + .table + .get(new_uuid) + .ok_or(anyhow!("copied mail not in destination mailbox"))? + .0; + ret.push((mi.uid, dest_uid)); + } + + Ok((to_state.uidvalidity, ret)) + } + + pub async fn r#move( + &mut self, + sequence_set: &SequenceSet, + to: Arc, + is_uid_copy: &bool, + ) -> Result<(ImapUidvalidity, Vec<(ImapUid, ImapUid)>, Vec>)> { + let idx = self.index()?; + let mails = idx.fetch(sequence_set, *is_uid_copy)?; + + for mi in mails.iter() { + to.move_from(&self.internal.mailbox, mi.uuid).await?; + } + + let mut ret = vec![]; + let to_state = to.current_uid_index().await; + for mi in mails.iter() { + let dest_uid = to_state + .table + .get(&mi.uuid) + .ok_or(anyhow!("moved mail not in destination mailbox"))? + .0; + ret.push((mi.uid, dest_uid)); + } + + let update = self + .update(UpdateParameters { + with_uid: *is_uid_copy, + ..UpdateParameters::default() + }) + .await?; + + Ok((to_state.uidvalidity, ret, update)) + } + + /// Looks up state changes in the mailbox and produces a set of IMAP + /// responses describing the new state. + pub async fn fetch<'b>( + &self, + sequence_set: &SequenceSet, + ap: &AttributesProxy, + changed_since: Option, + is_uid_fetch: &bool, + ) -> Result>> { + // [1/6] Pre-compute data + // a. what are the uuids of the emails we want? + // b. do we need to fetch the full body? + //let ap = AttributesProxy::new(attributes, *is_uid_fetch); + let query_scope = match ap.need_body() { + true => QueryScope::Full, + _ => QueryScope::Partial, + }; + tracing::debug!("Query scope {:?}", query_scope); + let idx = self.index()?; + let mail_idx_list = idx.fetch_changed_since(sequence_set, changed_since, *is_uid_fetch)?; + + // [2/6] Fetch the emails + let uuids = mail_idx_list + .iter() + .map(|midx| midx.uuid) + .collect::>(); + + let query = self.internal.query(&uuids, query_scope); + //let query_result = self.internal.query(&uuids, query_scope).fetch().await?; + + let query_stream = query + .fetch() + .zip(futures::stream::iter(mail_idx_list)) + // [3/6] Derive an IMAP-specific view from the results, apply the filters + .map(|(maybe_qr, midx)| match maybe_qr { + Ok(qr) => Ok((MailView::new(&qr, midx)?.filter(&ap)?, midx)), + Err(e) => Err(e), + }) + // [4/6] Apply the IMAP transformation + .then(|maybe_ret| async move { + let ((body, seen), midx) = maybe_ret?; + + // [5/6] Register the \Seen flags + if matches!(seen, SeenFlag::MustAdd) { + let seen_flag = Flag::Seen.to_string(); + self.internal + .mailbox + .add_flags(midx.uuid, &[seen_flag]) + .await?; + } + + Ok::<_, anyhow::Error>(body) + }); + + // [6/6] Build the final result that will be sent to the client. + query_stream.try_collect().await + } + + /// A naive search implementation... + pub async fn search<'a>( + &self, + _charset: &Option>, + search_key: &SearchKey<'a>, + uid: bool, + ) -> Result<(Vec>, bool)> { + // 1. Compute the subset of sequence identifiers we need to fetch + // based on the search query + let crit = search::Criteria(search_key); + let (seq_set, seq_type) = crit.to_sequence_set(); + + // 2. Get the selection + let idx = self.index()?; + let selection = idx.fetch(&seq_set, seq_type.is_uid())?; + + // 3. Filter the selection based on the ID / UID / Flags + let (kept_idx, to_fetch) = crit.filter_on_idx(&selection); + + // 4.a Fetch additional info about the emails + let query_scope = crit.query_scope(); + let uuids = to_fetch.iter().map(|midx| midx.uuid).collect::>(); + let query = self.internal.query(&uuids, query_scope); + + // 4.b We don't want to keep all data in memory, so we do the computing in a stream + let query_stream = query + .fetch() + .zip(futures::stream::iter(&to_fetch)) + // 5.a Build a mailview with the body, might fail with an error + // 5.b If needed, filter the selection based on the body, but keep the errors + // 6. Drop the query+mailbox, keep only the mail index + // Here we release a lot of memory, this is the most important part ^^ + .filter_map(|(maybe_qr, midx)| { + let r = match maybe_qr { + Ok(qr) => match MailView::new(&qr, midx).map(|mv| crit.is_keep_on_query(&mv)) { + Ok(true) => Some(Ok(*midx)), + Ok(_) => None, + Err(e) => Some(Err(e)), + }, + Err(e) => Some(Err(e)), + }; + futures::future::ready(r) + }); + + // 7. Chain both streams (part resolved from index, part resolved from metadata+body) + let main_stream = futures::stream::iter(kept_idx) + .map(Ok) + .chain(query_stream) + .map_ok(|idx| match uid { + true => (idx.uid, idx.modseq), + _ => (idx.i, idx.modseq), + }); + + // 8. Do the actual computation + let internal_result: Vec<_> = main_stream.try_collect().await?; + let (selection, modseqs): (Vec<_>, Vec<_>) = internal_result.into_iter().unzip(); + + // 9. Aggregate the maximum modseq value + let maybe_modseq = match crit.is_modseq() { + true => modseqs.into_iter().max(), + _ => None, + }; + + // 10. Return the final result + Ok(( + vec![Body::Data(Data::Search(selection, maybe_modseq))], + maybe_modseq.is_some(), + )) + } + + // ---- + /// @FIXME index should be stored for longer than a single request + /// Instead they should be tied to the FrozenMailbox refresh + /// It's not trivial to refactor the code to do that, so we are doing + /// some useless computation for now... + fn index<'a>(&'a self) -> Result> { + Index::new(&self.internal.snapshot) + } + + /// Produce an OK [UIDVALIDITY _] message corresponding to `known_state` + fn uidvalidity_status(&self) -> Result> { + let uid_validity = Status::ok( + None, + Some(Code::UidValidity(self.uidvalidity())), + "UIDs valid", + ) + .map_err(Error::msg)?; + Ok(Body::Status(uid_validity)) + } + + pub(crate) fn uidvalidity(&self) -> ImapUidvalidity { + self.internal.snapshot.uidvalidity + } + + /// Produce an OK [UIDNEXT _] message corresponding to `known_state` + fn uidnext_status(&self) -> Result> { + let next_uid = Status::ok( + None, + Some(Code::UidNext(self.uidnext())), + "Predict next UID", + ) + .map_err(Error::msg)?; + Ok(Body::Status(next_uid)) + } + + pub(crate) fn uidnext(&self) -> ImapUid { + self.internal.snapshot.uidnext + } + + pub(crate) fn highestmodseq_status(&self) -> Result> { + Ok(Body::Status(Status::ok( + None, + Some(Code::Other(CodeOther::unvalidated( + format!("HIGHESTMODSEQ {}", self.highestmodseq()).into_bytes(), + ))), + "Highest", + )?)) + } + + pub(crate) fn highestmodseq(&self) -> ModSeq { + self.internal.snapshot.highestmodseq + } + + /// Produce an EXISTS message corresponding to the number of mails + /// in `known_state` + fn exists_status(&self) -> Result> { + Ok(Body::Data(Data::Exists(self.exists()?))) + } + + pub(crate) fn exists(&self) -> Result { + Ok(u32::try_from(self.internal.snapshot.idx_by_uid.len())?) + } + + /// Produce a RECENT message corresponding to the number of + /// recent mails in `known_state` + fn recent_status(&self) -> Result> { + Ok(Body::Data(Data::Recent(self.recent()?))) + } + + #[allow(dead_code)] + fn unseen_first_status(&self) -> Result>> { + Ok(self + .unseen_first()? + .map(|unseen_id| { + Status::ok(None, Some(Code::Unseen(unseen_id)), "First unseen.").map(Body::Status) + }) + .transpose()?) + } + + #[allow(dead_code)] + fn unseen_first(&self) -> Result> { + Ok(self + .internal + .snapshot + .table + .values() + .enumerate() + .find(|(_i, (_imap_uid, _modseq, flags))| !flags.contains(&"\\Seen".to_string())) + .map(|(i, _)| NonZeroU32::try_from(i as u32 + 1)) + .transpose()?) + } + + pub(crate) fn recent(&self) -> Result { + let recent = self + .internal + .snapshot + .idx_by_flag + .get(&"\\Recent".to_string()) + .map(|os| os.len()) + .unwrap_or(0); + Ok(u32::try_from(recent)?) + } + + /// Produce a FLAGS and a PERMANENTFLAGS message that indicates + /// the flags that are in `known_state` + default flags + fn flags_status(&self) -> Result>> { + let mut body = vec![]; + + // 1. Collecting all the possible flags in the mailbox + // 1.a Fetch them from our index + let mut known_flags: Vec = self + .internal + .snapshot + .idx_by_flag + .flags() + .filter_map(|f| match flags::from_str(f) { + Some(FlagFetch::Flag(fl)) => Some(fl), + _ => None, + }) + .collect(); + // 1.b Merge it with our default flags list + for f in DEFAULT_FLAGS.iter() { + if !known_flags.contains(f) { + known_flags.push(f.clone()); + } + } + // 1.c Create the IMAP message + body.push(Body::Data(Data::Flags(known_flags.clone()))); + + // 2. Returning flags that are persisted + // 2.a Always advertise our default flags + let mut permanent = DEFAULT_FLAGS + .iter() + .map(|f| FlagPerm::Flag(f.clone())) + .collect::>(); + // 2.b Say that we support any keyword flag + permanent.push(FlagPerm::Asterisk); + // 2.c Create the IMAP message + let permanent_flags = Status::ok( + None, + Some(Code::PermanentFlags(permanent)), + "Flags permitted", + ) + .map_err(Error::msg)?; + body.push(Body::Status(permanent_flags)); + + // Done! + Ok(body) + } + + pub(crate) fn unseen_count(&self) -> usize { + let total = self.internal.snapshot.table.len(); + let seen = self + .internal + .snapshot + .idx_by_flag + .get(&Flag::Seen.to_string()) + .map(|x| x.len()) + .unwrap_or(0); + total - seen + } +} + +#[cfg(test)] +mod tests { + use super::*; + use imap_codec::encode::Encoder; + use imap_codec::imap_types::core::Vec1; + use imap_codec::imap_types::fetch::Section; + use imap_codec::imap_types::fetch::{MacroOrMessageDataItemNames, MessageDataItemName}; + use imap_codec::imap_types::response::Response; + use imap_codec::ResponseCodec; + use std::fs; + + use crate::cryptoblob; + use crate::imap::index::MailIndex; + use crate::imap::mail_view::MailView; + use crate::imap::mime_view; + use crate::mail::mailbox::MailMeta; + use crate::mail::query::QueryResult; + use crate::mail::unique_ident; + + #[test] + fn mailview_body_ext() -> Result<()> { + let ap = AttributesProxy::new( + &MacroOrMessageDataItemNames::MessageDataItemNames(vec![ + MessageDataItemName::BodyExt { + section: Some(Section::Header(None)), + partial: None, + peek: false, + }, + ]), + &[], + false, + ); + + let key = cryptoblob::gen_key(); + let meta = MailMeta { + internaldate: 0u64, + headers: vec![], + message_key: key, + rfc822_size: 8usize, + }; + + let index_entry = (NonZeroU32::MIN, NonZeroU64::MIN, vec![]); + let mail_in_idx = MailIndex { + i: NonZeroU32::MIN, + uid: index_entry.0, + modseq: index_entry.1, + uuid: unique_ident::gen_ident(), + flags: &index_entry.2, + }; + let rfc822 = b"Subject: hello\r\nFrom: a@a.a\r\nTo: b@b.b\r\nDate: Thu, 12 Oct 2023 08:45:28 +0000\r\n\r\nhello world"; + let qr = QueryResult::FullResult { + uuid: mail_in_idx.uuid.clone(), + metadata: meta, + content: rfc822.to_vec(), + }; + + let mv = MailView::new(&qr, &mail_in_idx)?; + let (res_body, _seen) = mv.filter(&ap)?; + + let fattr = match res_body { + Body::Data(Data::Fetch { + seq: _seq, + items: attr, + }) => Ok(attr), + _ => Err(anyhow!("Not a fetch body")), + }?; + + assert_eq!(fattr.as_ref().len(), 1); + + let (sec, _orig, _data) = match &fattr.as_ref()[0] { + MessageDataItem::BodyExt { + section, + origin, + data, + } => Ok((section, origin, data)), + _ => Err(anyhow!("not a body ext message attribute")), + }?; + + assert_eq!(sec.as_ref().unwrap(), &Section::Header(None)); + + Ok(()) + } + + /// Future automated test. We use lossy utf8 conversion + lowercase everything, + /// so this test might allow invalid results. But at least it allows us to quickly test a + /// large variety of emails. + /// Keep in mind that special cases must still be tested manually! + #[test] + fn fetch_body() -> Result<()> { + let prefixes = [ + /* *** MY OWN DATASET *** */ + "tests/emails/dxflrs/0001_simple", + "tests/emails/dxflrs/0002_mime", + "tests/emails/dxflrs/0003_mime-in-mime", + "tests/emails/dxflrs/0004_msg-in-msg", + // eml_codec do not support continuation for the moment + //"tests/emails/dxflrs/0005_mail-parser-readme", + "tests/emails/dxflrs/0006_single-mime", + "tests/emails/dxflrs/0007_raw_msg_in_rfc822", + /* *** (STRANGE) RFC *** */ + //"tests/emails/rfc/000", // must return text/enriched, we return text/plain + //"tests/emails/rfc/001", // does not recognize the multipart/external-body, breaks the + // whole parsing + //"tests/emails/rfc/002", // wrong date in email + + //"tests/emails/rfc/003", // dovecot fixes \r\r: the bytes number is wrong + text/enriched + + /* *** THIRD PARTY *** */ + //"tests/emails/thirdparty/000", // dovecot fixes \r\r: the bytes number is wrong + //"tests/emails/thirdparty/001", // same + "tests/emails/thirdparty/002", // same + + /* *** LEGACY *** */ + //"tests/emails/legacy/000", // same issue with \r\r + ]; + + for pref in prefixes.iter() { + println!("{}", pref); + let txt = fs::read(format!("{}.eml", pref))?; + let oracle = fs::read(format!("{}.dovecot.body", pref))?; + let message = eml_codec::parse_message(&txt).unwrap().1; + + let test_repr = Response::Data(Data::Fetch { + seq: NonZeroU32::new(1).unwrap(), + items: Vec1::from(MessageDataItem::Body(mime_view::bodystructure( + &message.child, + false, + )?)), + }); + let test_bytes = ResponseCodec::new().encode(&test_repr).dump(); + let test_str = String::from_utf8_lossy(&test_bytes).to_lowercase(); + + let oracle_str = + format!("* 1 FETCH {}\r\n", String::from_utf8_lossy(&oracle)).to_lowercase(); + + println!("aerogramme: {}\n\ndovecot: {}\n\n", test_str, oracle_str); + //println!("\n\n {} \n\n", String::from_utf8_lossy(&resp)); + assert_eq!(test_str, oracle_str); + } + + Ok(()) + } +} diff --git a/aero-proto/src/imap/mime_view.rs b/aero-proto/src/imap/mime_view.rs new file mode 100644 index 0000000..720f20a --- /dev/null +++ b/aero-proto/src/imap/mime_view.rs @@ -0,0 +1,582 @@ +use std::borrow::Cow; +use std::collections::HashSet; +use std::num::NonZeroU32; + +use anyhow::{anyhow, bail, Result}; + +use imap_codec::imap_types::body::{ + BasicFields, Body as FetchBody, BodyStructure, MultiPartExtensionData, SinglePartExtensionData, + SpecificFields, +}; +use imap_codec::imap_types::core::{AString, IString, NString, Vec1}; +use imap_codec::imap_types::fetch::{Part as FetchPart, Section as FetchSection}; + +use eml_codec::{ + header, mime, mime::r#type::Deductible, part::composite, part::discrete, part::AnyPart, +}; + +use crate::imap::imf_view::ImfView; + +pub enum BodySection<'a> { + Full(Cow<'a, [u8]>), + Slice { + body: Cow<'a, [u8]>, + origin_octet: u32, + }, +} + +/// Logic for BODY[
]<> +/// Works in 3 times: +/// 1. Find the section (RootMime::subset) +/// 2. Apply the extraction logic (SelectedMime::extract), like TEXT, HEADERS, etc. +/// 3. Keep only the given subset provided by partial +/// +/// Example of message sections: +/// +/// ``` +/// HEADER ([RFC-2822] header of the message) +/// TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED +/// 1 TEXT/PLAIN +/// 2 APPLICATION/OCTET-STREAM +/// 3 MESSAGE/RFC822 +/// 3.HEADER ([RFC-2822] header of the message) +/// 3.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED +/// 3.1 TEXT/PLAIN +/// 3.2 APPLICATION/OCTET-STREAM +/// 4 MULTIPART/MIXED +/// 4.1 IMAGE/GIF +/// 4.1.MIME ([MIME-IMB] header for the IMAGE/GIF) +/// 4.2 MESSAGE/RFC822 +/// 4.2.HEADER ([RFC-2822] header of the message) +/// 4.2.TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED +/// 4.2.1 TEXT/PLAIN +/// 4.2.2 MULTIPART/ALTERNATIVE +/// 4.2.2.1 TEXT/PLAIN +/// 4.2.2.2 TEXT/RICHTEXT +/// ``` +pub fn body_ext<'a>( + part: &'a AnyPart<'a>, + section: &'a Option>, + partial: &'a Option<(u32, NonZeroU32)>, +) -> Result> { + let root_mime = NodeMime(part); + let (extractor, path) = SubsettedSection::from(section); + let selected_mime = root_mime.subset(path)?; + let extracted_full = selected_mime.extract(&extractor)?; + Ok(extracted_full.to_body_section(partial)) +} + +/// Logic for BODY and BODYSTRUCTURE +/// +/// ```raw +/// b fetch 29878:29879 (BODY) +/// * 29878 FETCH (BODY (("text" "plain" ("charset" "utf-8") NIL NIL "quoted-printable" 3264 82)("text" "html" ("charset" "utf-8") NIL NIL "quoted-printable" 31834 643) "alternative")) +/// * 29879 FETCH (BODY ("text" "html" ("charset" "us-ascii") NIL NIL "7bit" 4107 131)) +/// ^^^^^^^^^^^^^^^^^^^^^^ ^^^ ^^^ ^^^^^^ ^^^^ ^^^ +/// | | | | | | number of lines +/// | | | | | size +/// | | | | content transfer encoding +/// | | | description +/// | | id +/// | parameter list +/// b OK Fetch completed (0.001 + 0.000 secs). +/// ``` +pub fn bodystructure(part: &AnyPart, is_ext: bool) -> Result> { + NodeMime(part).structure(is_ext) +} + +/// NodeMime +/// +/// Used for recursive logic on MIME. +/// See SelectedMime for inspection. +struct NodeMime<'a>(&'a AnyPart<'a>); +impl<'a> NodeMime<'a> { + /// A MIME object is a tree of elements. + /// The path indicates which element must be picked. + /// This function returns the picked element as the new view + fn subset(self, path: Option<&'a FetchPart>) -> Result> { + match path { + None => Ok(SelectedMime(self.0)), + Some(v) => self.rec_subset(v.0.as_ref()), + } + } + + fn rec_subset(self, path: &'a [NonZeroU32]) -> Result { + if path.is_empty() { + Ok(SelectedMime(self.0)) + } else { + match self.0 { + AnyPart::Mult(x) => { + let next = Self(x.children + .get(path[0].get() as usize - 1) + .ok_or(anyhow!("Unable to resolve subpath {:?}, current multipart has only {} elements", path, x.children.len()))?); + next.rec_subset(&path[1..]) + }, + AnyPart::Msg(x) => { + let next = Self(x.child.as_ref()); + next.rec_subset(path) + }, + _ => bail!("You tried to access a subpart on an atomic part (text or binary). Unresolved subpath {:?}", path), + } + } + } + + fn structure(&self, is_ext: bool) -> Result> { + match self.0 { + AnyPart::Txt(x) => NodeTxt(self, x).structure(is_ext), + AnyPart::Bin(x) => NodeBin(self, x).structure(is_ext), + AnyPart::Mult(x) => NodeMult(self, x).structure(is_ext), + AnyPart::Msg(x) => NodeMsg(self, x).structure(is_ext), + } + } +} + +//---------------------------------------------------------- + +/// A FetchSection must be handled in 2 times: +/// - First we must extract the MIME part +/// - Then we must process it as desired +/// The given struct mixes both work, so +/// we separate this work here. +enum SubsettedSection<'a> { + Part, + Header, + HeaderFields(&'a Vec1>), + HeaderFieldsNot(&'a Vec1>), + Text, + Mime, +} +impl<'a> SubsettedSection<'a> { + fn from(section: &'a Option) -> (Self, Option<&'a FetchPart>) { + match section { + Some(FetchSection::Text(maybe_part)) => (Self::Text, maybe_part.as_ref()), + Some(FetchSection::Header(maybe_part)) => (Self::Header, maybe_part.as_ref()), + Some(FetchSection::HeaderFields(maybe_part, fields)) => { + (Self::HeaderFields(fields), maybe_part.as_ref()) + } + Some(FetchSection::HeaderFieldsNot(maybe_part, fields)) => { + (Self::HeaderFieldsNot(fields), maybe_part.as_ref()) + } + Some(FetchSection::Mime(part)) => (Self::Mime, Some(part)), + Some(FetchSection::Part(part)) => (Self::Part, Some(part)), + None => (Self::Part, None), + } + } +} + +/// Used for current MIME inspection +/// +/// See NodeMime for recursive logic +pub struct SelectedMime<'a>(pub &'a AnyPart<'a>); +impl<'a> SelectedMime<'a> { + pub fn header_value(&'a self, to_match_ext: &[u8]) -> Option<&'a [u8]> { + let to_match = to_match_ext.to_ascii_lowercase(); + + self.eml_mime() + .kv + .iter() + .filter_map(|field| match field { + header::Field::Good(header::Kv2(k, v)) => Some((k, v)), + _ => None, + }) + .find(|(k, _)| k.to_ascii_lowercase() == to_match) + .map(|(_, v)| v) + .copied() + } + + /// The subsetted fetch section basically tells us the + /// extraction logic to apply on our selected MIME. + /// This function acts as a router for these logic. + fn extract(&self, extractor: &SubsettedSection<'a>) -> Result> { + match extractor { + SubsettedSection::Text => self.text(), + SubsettedSection::Header => self.header(), + SubsettedSection::HeaderFields(fields) => self.header_fields(fields, false), + SubsettedSection::HeaderFieldsNot(fields) => self.header_fields(fields, true), + SubsettedSection::Part => self.part(), + SubsettedSection::Mime => self.mime(), + } + } + + fn mime(&self) -> Result> { + let bytes = match &self.0 { + AnyPart::Txt(p) => p.mime.fields.raw, + AnyPart::Bin(p) => p.mime.fields.raw, + AnyPart::Msg(p) => p.child.mime().raw, + AnyPart::Mult(p) => p.mime.fields.raw, + }; + Ok(ExtractedFull(bytes.into())) + } + + fn part(&self) -> Result> { + let bytes = match &self.0 { + AnyPart::Txt(p) => p.body, + AnyPart::Bin(p) => p.body, + AnyPart::Msg(p) => p.raw_part, + AnyPart::Mult(_) => bail!("Multipart part has no body"), + }; + Ok(ExtractedFull(bytes.to_vec().into())) + } + + fn eml_mime(&self) -> &eml_codec::mime::NaiveMIME<'_> { + match &self.0 { + AnyPart::Msg(msg) => msg.child.mime(), + other => other.mime(), + } + } + + /// The [...] HEADER.FIELDS, and HEADER.FIELDS.NOT part + /// specifiers refer to the [RFC-2822] header of the message or of + /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. + /// HEADER.FIELDS and HEADER.FIELDS.NOT are followed by a list of + /// field-name (as defined in [RFC-2822]) names, and return a + /// subset of the header. The subset returned by HEADER.FIELDS + /// contains only those header fields with a field-name that + /// matches one of the names in the list; similarly, the subset + /// returned by HEADER.FIELDS.NOT contains only the header fields + /// with a non-matching field-name. The field-matching is + /// case-insensitive but otherwise exact. + fn header_fields( + &self, + fields: &'a Vec1>, + invert: bool, + ) -> Result> { + // Build a lowercase ascii hashset with the fields to fetch + let index = fields + .as_ref() + .iter() + .map(|x| { + match x { + AString::Atom(a) => a.inner().as_bytes(), + AString::String(IString::Literal(l)) => l.as_ref(), + AString::String(IString::Quoted(q)) => q.inner().as_bytes(), + } + .to_ascii_lowercase() + }) + .collect::>(); + + // Extract MIME headers + let mime = self.eml_mime(); + + // Filter our MIME headers based on the field index + // 1. Keep only the correctly formatted headers + // 2. Keep only based on the index presence or absence + // 3. Reduce as a byte vector + let buffer = mime + .kv + .iter() + .filter_map(|field| match field { + header::Field::Good(header::Kv2(k, v)) => Some((k, v)), + _ => None, + }) + .filter(|(k, _)| index.contains(&k.to_ascii_lowercase()) ^ invert) + .fold(vec![], |mut acc, (k, v)| { + acc.extend(*k); + acc.extend(b": "); + acc.extend(*v); + acc.extend(b"\r\n"); + acc + }); + + Ok(ExtractedFull(buffer.into())) + } + + /// The HEADER [...] part specifiers refer to the [RFC-2822] header of the message or of + /// an encapsulated [MIME-IMT] MESSAGE/RFC822 message. + /// ```raw + /// HEADER ([RFC-2822] header of the message) + /// ``` + fn header(&self) -> Result> { + let msg = self + .0 + .as_message() + .ok_or(anyhow!("Selected part must be a message/rfc822"))?; + Ok(ExtractedFull(msg.raw_headers.into())) + } + + /// The TEXT part specifier refers to the text body of the message, omitting the [RFC-2822] header. + fn text(&self) -> Result> { + let msg = self + .0 + .as_message() + .ok_or(anyhow!("Selected part must be a message/rfc822"))?; + Ok(ExtractedFull(msg.raw_body.into())) + } + + // ------------ + + /// Basic field of a MIME part that is + /// common to all parts + fn basic_fields(&self) -> Result> { + let sz = match self.0 { + AnyPart::Txt(x) => x.body.len(), + AnyPart::Bin(x) => x.body.len(), + AnyPart::Msg(x) => x.raw_part.len(), + AnyPart::Mult(_) => 0, + }; + let m = self.0.mime(); + let parameter_list = m + .ctype + .as_ref() + .map(|x| { + x.params + .iter() + .map(|p| { + ( + IString::try_from(String::from_utf8_lossy(p.name).to_string()), + IString::try_from(p.value.to_string()), + ) + }) + .filter(|(k, v)| k.is_ok() && v.is_ok()) + .map(|(k, v)| (k.unwrap(), v.unwrap())) + .collect() + }) + .unwrap_or(vec![]); + + Ok(BasicFields { + parameter_list, + id: NString( + m.id.as_ref() + .and_then(|ci| IString::try_from(ci.to_string()).ok()), + ), + description: NString( + m.description + .as_ref() + .and_then(|cd| IString::try_from(cd.to_string()).ok()), + ), + content_transfer_encoding: match m.transfer_encoding { + mime::mechanism::Mechanism::_8Bit => unchecked_istring("8bit"), + mime::mechanism::Mechanism::Binary => unchecked_istring("binary"), + mime::mechanism::Mechanism::QuotedPrintable => { + unchecked_istring("quoted-printable") + } + mime::mechanism::Mechanism::Base64 => unchecked_istring("base64"), + _ => unchecked_istring("7bit"), + }, + // @FIXME we can't compute the size of the message currently... + size: u32::try_from(sz)?, + }) + } +} + +// --------------------------- +struct NodeMsg<'a>(&'a NodeMime<'a>, &'a composite::Message<'a>); +impl<'a> NodeMsg<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let basic = SelectedMime(self.0 .0).basic_fields()?; + + Ok(BodyStructure::Single { + body: FetchBody { + basic, + specific: SpecificFields::Message { + envelope: Box::new(ImfView(&self.1.imf).message_envelope()), + body_structure: Box::new(NodeMime(&self.1.child).structure(is_ext)?), + number_of_lines: nol(self.1.raw_part), + }, + }, + extension_data: match is_ext { + true => Some(SinglePartExtensionData { + md5: NString(None), + tail: None, + }), + _ => None, + }, + }) + } +} + +#[allow(dead_code)] +struct NodeMult<'a>(&'a NodeMime<'a>, &'a composite::Multipart<'a>); +impl<'a> NodeMult<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let itype = &self.1.mime.interpreted_type; + let subtype = IString::try_from(itype.subtype.to_string()) + .unwrap_or(unchecked_istring("alternative")); + + let inner_bodies = self + .1 + .children + .iter() + .filter_map(|inner| NodeMime(&inner).structure(is_ext).ok()) + .collect::>(); + + Vec1::validate(&inner_bodies)?; + let bodies = Vec1::unvalidated(inner_bodies); + + Ok(BodyStructure::Multi { + bodies, + subtype, + extension_data: match is_ext { + true => Some(MultiPartExtensionData { + parameter_list: vec![( + IString::try_from("boundary").unwrap(), + IString::try_from(self.1.mime.interpreted_type.boundary.to_string())?, + )], + tail: None, + }), + _ => None, + }, + }) + } +} +struct NodeTxt<'a>(&'a NodeMime<'a>, &'a discrete::Text<'a>); +impl<'a> NodeTxt<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let mut basic = SelectedMime(self.0 .0).basic_fields()?; + + // Get the interpreted content type, set it + let itype = match &self.1.mime.interpreted_type { + Deductible::Inferred(v) | Deductible::Explicit(v) => v, + }; + let subtype = + IString::try_from(itype.subtype.to_string()).unwrap_or(unchecked_istring("plain")); + + // Add charset to the list of parameters if we know it has been inferred as it will be + // missing from the parsed content. + if let Deductible::Inferred(charset) = &itype.charset { + basic.parameter_list.push(( + unchecked_istring("charset"), + IString::try_from(charset.to_string()).unwrap_or(unchecked_istring("us-ascii")), + )); + } + + Ok(BodyStructure::Single { + body: FetchBody { + basic, + specific: SpecificFields::Text { + subtype, + number_of_lines: nol(self.1.body), + }, + }, + extension_data: match is_ext { + true => Some(SinglePartExtensionData { + md5: NString(None), + tail: None, + }), + _ => None, + }, + }) + } +} + +struct NodeBin<'a>(&'a NodeMime<'a>, &'a discrete::Binary<'a>); +impl<'a> NodeBin<'a> { + fn structure(&self, is_ext: bool) -> Result> { + let basic = SelectedMime(self.0 .0).basic_fields()?; + + let default = mime::r#type::NaiveType { + main: &b"application"[..], + sub: &b"octet-stream"[..], + params: vec![], + }; + let ct = self.1.mime.fields.ctype.as_ref().unwrap_or(&default); + + let r#type = IString::try_from(String::from_utf8_lossy(ct.main).to_string()).or(Err( + anyhow!("Unable to build IString from given Content-Type type given"), + ))?; + + let subtype = IString::try_from(String::from_utf8_lossy(ct.sub).to_string()).or(Err( + anyhow!("Unable to build IString from given Content-Type subtype given"), + ))?; + + Ok(BodyStructure::Single { + body: FetchBody { + basic, + specific: SpecificFields::Basic { r#type, subtype }, + }, + extension_data: match is_ext { + true => Some(SinglePartExtensionData { + md5: NString(None), + tail: None, + }), + _ => None, + }, + }) + } +} + +// --------------------------- + +struct ExtractedFull<'a>(Cow<'a, [u8]>); +impl<'a> ExtractedFull<'a> { + /// It is possible to fetch a substring of the designated text. + /// This is done by appending an open angle bracket ("<"), the + /// octet position of the first desired octet, a period, the + /// maximum number of octets desired, and a close angle bracket + /// (">") to the part specifier. If the starting octet is beyond + /// the end of the text, an empty string is returned. + /// + /// Any partial fetch that attempts to read beyond the end of the + /// text is truncated as appropriate. A partial fetch that starts + /// at octet 0 is returned as a partial fetch, even if this + /// truncation happened. + /// + /// Note: This means that BODY[]<0.2048> of a 1500-octet message + /// will return BODY[]<0> with a literal of size 1500, not + /// BODY[]. + /// + /// Note: A substring fetch of a HEADER.FIELDS or + /// HEADER.FIELDS.NOT part specifier is calculated after + /// subsetting the header. + fn to_body_section(self, partial: &'_ Option<(u32, NonZeroU32)>) -> BodySection<'a> { + match partial { + Some((begin, len)) => self.partialize(*begin, *len), + None => BodySection::Full(self.0), + } + } + + fn partialize(self, begin: u32, len: NonZeroU32) -> BodySection<'a> { + // Asked range is starting after the end of the content, + // returning an empty buffer + if begin as usize > self.0.len() { + return BodySection::Slice { + body: Cow::Borrowed(&[][..]), + origin_octet: begin, + }; + } + + // Asked range is ending after the end of the content, + // slice only the beginning of the buffer + if (begin + len.get()) as usize >= self.0.len() { + return BodySection::Slice { + body: match self.0 { + Cow::Borrowed(body) => Cow::Borrowed(&body[begin as usize..]), + Cow::Owned(body) => Cow::Owned(body[begin as usize..].to_vec()), + }, + origin_octet: begin, + }; + } + + // Range is included inside the considered content, + // this is the "happy case" + BodySection::Slice { + body: match self.0 { + Cow::Borrowed(body) => { + Cow::Borrowed(&body[begin as usize..(begin + len.get()) as usize]) + } + Cow::Owned(body) => { + Cow::Owned(body[begin as usize..(begin + len.get()) as usize].to_vec()) + } + }, + origin_octet: begin, + } + } +} + +/// ---- LEGACY + +/// s is set to static to ensure that only compile time values +/// checked by developpers are passed. +fn unchecked_istring(s: &'static str) -> IString { + IString::try_from(s).expect("this value is expected to be a valid imap-codec::IString") +} + +// Number Of Lines +fn nol(input: &[u8]) -> u32 { + input + .iter() + .filter(|x| **x == b'\n') + .count() + .try_into() + .unwrap_or(0) +} diff --git a/aero-proto/src/imap/mod.rs b/aero-proto/src/imap/mod.rs new file mode 100644 index 0000000..ae3b58f --- /dev/null +++ b/aero-proto/src/imap/mod.rs @@ -0,0 +1,417 @@ +mod attributes; +mod capability; +mod command; +mod flags; +mod flow; +mod imf_view; +mod index; +mod mail_view; +mod mailbox_view; +mod mime_view; +mod request; +mod response; +mod search; +mod session; + +use std::net::SocketAddr; + +use anyhow::{anyhow, bail, Result}; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::net::TcpListener; +use tokio::sync::mpsc; +use tokio::sync::watch; +use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status}; +use imap_codec::imap_types::{core::Text, response::Greeting}; +use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions}; +use imap_flow::stream::AnyStream; +use rustls_pemfile::{certs, private_key}; +use tokio_rustls::TlsAcceptor; + +use aero_user::config::{ImapConfig, ImapUnsecureConfig}; +use aero_user::login::ArcLoginProvider; + +use crate::imap::capability::ServerCapability; +use crate::imap::request::Request; +use crate::imap::response::{Body, ResponseOrIdle}; +use crate::imap::session::Instance; + +/// Server is a thin wrapper to register our Services in BàL +pub struct Server { + bind_addr: SocketAddr, + login_provider: ArcLoginProvider, + capabilities: ServerCapability, + tls: Option, +} + +#[derive(Clone)] +struct ClientContext { + addr: SocketAddr, + login_provider: ArcLoginProvider, + must_exit: watch::Receiver, + server_capabilities: ServerCapability, +} + +pub fn new(config: ImapConfig, login: ArcLoginProvider) -> Result { + let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( + config.certs, + )?)) + .collect::, _>>()?; + let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( + config.key, + )?))? + .unwrap(); + + let tls_config = rustls::ServerConfig::builder() + .with_no_client_auth() + .with_single_cert(loaded_certs, loaded_key)?; + let acceptor = TlsAcceptor::from(Arc::new(tls_config)); + + Ok(Server { + bind_addr: config.bind_addr, + login_provider: login, + capabilities: ServerCapability::default(), + tls: Some(acceptor), + }) +} + +pub fn new_unsecure(config: ImapUnsecureConfig, login: ArcLoginProvider) -> Server { + Server { + bind_addr: config.bind_addr, + login_provider: login, + capabilities: ServerCapability::default(), + tls: None, + } +} + +impl Server { + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("IMAP server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("IMAP: accepted connection from {}", remote_addr); + let stream = match self.tls.clone() { + Some(acceptor) => { + let stream = match acceptor.accept(socket).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "TLS negociation failed"); + continue; + } + }; + AnyStream::new(stream) + } + None => AnyStream::new(socket), + }; + + let client = ClientContext { + addr: remote_addr.clone(), + login_provider: self.login_provider.clone(), + must_exit: must_exit.clone(), + server_capabilities: self.capabilities.clone(), + }; + let conn = tokio::spawn(NetLoop::handler(client, stream)); + connections.push(conn); + } + drop(tcp); + + tracing::info!("IMAP server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +use std::sync::Arc; +use tokio::sync::mpsc::*; +use tokio::sync::Notify; + +const PIPELINABLE_COMMANDS: usize = 64; + +// @FIXME a full refactor of this part of the code will be needed sooner or later +struct NetLoop { + ctx: ClientContext, + server: ServerFlow, + cmd_tx: Sender, + resp_rx: UnboundedReceiver, +} + +impl NetLoop { + async fn handler(ctx: ClientContext, sock: AnyStream) { + let addr = ctx.addr.clone(); + + let mut nl = match Self::new(ctx, sock).await { + Ok(nl) => { + tracing::debug!(addr=?addr, "netloop successfully initialized"); + nl + } + Err(e) => { + tracing::error!(addr=?addr, err=?e, "netloop can not be initialized, closing session"); + return; + } + }; + + match nl.core().await { + Ok(()) => { + tracing::debug!("closing successful netloop core for {:?}", addr); + } + Err(e) => { + tracing::error!("closing errored netloop core for {:?}: {}", addr, e); + } + } + } + + async fn new(ctx: ClientContext, sock: AnyStream) -> Result { + let mut opts = ServerFlowOptions::default(); + opts.crlf_relaxed = false; + opts.literal_accept_text = Text::unvalidated("OK"); + opts.literal_reject_text = Text::unvalidated("Literal rejected"); + + // Send greeting + let (server, _) = ServerFlow::send_greeting( + sock, + opts, + Greeting::ok( + Some(Code::Capability(ctx.server_capabilities.to_vec())), + "Aerogramme", + ) + .unwrap(), + ) + .await?; + + // Start a mailbox session in background + let (cmd_tx, cmd_rx) = mpsc::channel::(PIPELINABLE_COMMANDS); + let (resp_tx, resp_rx) = mpsc::unbounded_channel::(); + tokio::spawn(Self::session(ctx.clone(), cmd_rx, resp_tx)); + + // Return the object + Ok(NetLoop { + ctx, + server, + cmd_tx, + resp_rx, + }) + } + + /// Coms with the background session + async fn session( + ctx: ClientContext, + mut cmd_rx: Receiver, + resp_tx: UnboundedSender, + ) -> () { + let mut session = Instance::new(ctx.login_provider, ctx.server_capabilities); + loop { + let cmd = match cmd_rx.recv().await { + None => break, + Some(cmd_recv) => cmd_recv, + }; + + tracing::debug!(cmd=?cmd, sock=%ctx.addr, "command"); + let maybe_response = session.request(cmd).await; + tracing::debug!(cmd=?maybe_response, sock=%ctx.addr, "response"); + + match resp_tx.send(maybe_response) { + Err(_) => break, + Ok(_) => (), + }; + } + tracing::info!("runner is quitting"); + } + + async fn core(&mut self) -> Result<()> { + let mut maybe_idle: Option> = None; + loop { + tokio::select! { + // Managing imap_flow stuff + srv_evt = self.server.progress() => match srv_evt? { + ServerFlowEvent::ResponseSent { handle: _handle, response } => { + match response { + Response::Status(Status::Bye(_)) => return Ok(()), + _ => tracing::trace!("sent to {} content {:?}", self.ctx.addr, response), + } + }, + ServerFlowEvent::CommandReceived { command } => { + match self.cmd_tx.try_send(Request::ImapCommand(command)) { + Ok(_) => (), + Err(mpsc::error::TrySendError::Full(_)) => { + self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); + tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); + } + _ => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + } + } + }, + ServerFlowEvent::IdleCommandReceived { tag } => { + match self.cmd_tx.try_send(Request::IdleStart(tag)) { + Ok(_) => (), + Err(mpsc::error::TrySendError::Full(_)) => { + self.server.enqueue_status(Status::bye(None, "Too fast").unwrap()); + tracing::error!("client {:?} is sending commands too fast, closing.", self.ctx.addr); + } + _ => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + } + } + } + ServerFlowEvent::IdleDoneReceived => { + tracing::trace!("client sent DONE and want to stop IDLE"); + maybe_idle.ok_or(anyhow!("Received IDLE done but not idling currently"))?.notify_one(); + maybe_idle = None; + } + flow => { + self.server.enqueue_status(Status::bye(None, "Unsupported server flow event").unwrap()); + tracing::error!("session task exited for {:?} due to unsupported flow {:?}", self.ctx.addr, flow); + } + }, + + // Managing response generated by Aerogramme + maybe_msg = self.resp_rx.recv() => match maybe_msg { + Some(ResponseOrIdle::Response(response)) => { + tracing::trace!("Interactive, server has a response for the client"); + for body_elem in response.body.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.server.enqueue_status(response.completion); + }, + Some(ResponseOrIdle::IdleAccept(stop)) => { + tracing::trace!("Interactive, server agreed to switch in idle mode"); + let cr = CommandContinuationRequest::basic(None, "Idling")?; + self.server.idle_accept(cr).or(Err(anyhow!("refused continuation for idle accept")))?; + self.cmd_tx.try_send(Request::IdlePoll)?; + if maybe_idle.is_some() { + bail!("Can't start IDLE if already idling"); + } + maybe_idle = Some(stop); + }, + Some(ResponseOrIdle::IdleEvent(elems)) => { + tracing::trace!("server imap session has some change to communicate to the client"); + for body_elem in elems.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.cmd_tx.try_send(Request::IdlePoll)?; + }, + Some(ResponseOrIdle::IdleReject(response)) => { + tracing::trace!("inform client that session rejected idle"); + self.server + .idle_reject(response.completion) + .or(Err(anyhow!("wrong reject command")))?; + }, + None => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + }, + }, + + // When receiving a CTRL+C + _ = self.ctx.must_exit.changed() => { + tracing::trace!("Interactive, CTRL+C, exiting"); + self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); + }, + }; + } + } + + /* + async fn idle_mode(&mut self, mut buff: BytesMut, stop: Arc) -> Result { + // Flush send + loop { + tracing::trace!("flush server send"); + match self.server.progress_send().await? { + Some(..) => continue, + None => break, + } + } + + tokio::select! { + // Receiving IDLE event from background + maybe_msg = self.resp_rx.recv() => match maybe_msg { + // Session decided idle is terminated + Some(ResponseOrIdle::Response(response)) => { + tracing::trace!("server imap session said idle is done, sending response done, switching to interactive"); + for body_elem in response.body.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.server.enqueue_status(response.completion); + return Ok(LoopMode::Interactive) + }, + // Session has some information for user + Some(ResponseOrIdle::IdleEvent(elems)) => { + tracing::trace!("server imap session has some change to communicate to the client"); + for body_elem in elems.into_iter() { + let _handle = match body_elem { + Body::Data(d) => self.server.enqueue_data(d), + Body::Status(s) => self.server.enqueue_status(s), + }; + } + self.cmd_tx.try_send(Request::Idle)?; + return Ok(LoopMode::Idle(buff, stop)) + }, + + // Session crashed + None => { + self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); + tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); + return Ok(LoopMode::Interactive) + }, + + // Session can't start idling while already idling, it's a logic error! + Some(ResponseOrIdle::StartIdle(..)) => bail!("can't start idling while already idling!"), + }, + + // User is trying to interact with us + read_client_result = self.server.stream.read(&mut buff) => { + let _bytes_read = read_client_result?; + use imap_codec::decode::Decoder; + let codec = imap_codec::IdleDoneCodec::new(); + tracing::trace!("client sent some data for the server IMAP session"); + match codec.decode(&buff) { + Ok(([], imap_codec::imap_types::extensions::idle::IdleDone)) => { + // Session will be informed that it must stop idle + // It will generate the "done" message and change the loop mode + tracing::trace!("client sent DONE and want to stop IDLE"); + stop.notify_one() + }, + Err(_) => { + tracing::trace!("Unable to decode DONE, maybe not enough data were sent?"); + }, + _ => bail!("Client sent data after terminating the continuation without waiting for the server. This is an unsupported behavior and bug in Aerogramme, quitting."), + }; + + return Ok(LoopMode::Idle(buff, stop)) + }, + + // When receiving a CTRL+C + _ = self.ctx.must_exit.changed() => { + tracing::trace!("CTRL+C sent, aborting IDLE for this session"); + self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); + return Ok(LoopMode::Interactive) + }, + }; + }*/ +} diff --git a/aero-proto/src/imap/request.rs b/aero-proto/src/imap/request.rs new file mode 100644 index 0000000..cff18a3 --- /dev/null +++ b/aero-proto/src/imap/request.rs @@ -0,0 +1,9 @@ +use imap_codec::imap_types::command::Command; +use imap_codec::imap_types::core::Tag; + +#[derive(Debug)] +pub enum Request { + ImapCommand(Command<'static>), + IdleStart(Tag<'static>), + IdlePoll, +} diff --git a/aero-proto/src/imap/response.rs b/aero-proto/src/imap/response.rs new file mode 100644 index 0000000..b6a0e98 --- /dev/null +++ b/aero-proto/src/imap/response.rs @@ -0,0 +1,124 @@ +use anyhow::Result; +use imap_codec::imap_types::command::Command; +use imap_codec::imap_types::core::Tag; +use imap_codec::imap_types::response::{Code, Data, Status}; +use std::sync::Arc; +use tokio::sync::Notify; + +#[derive(Debug)] +pub enum Body<'a> { + Data(Data<'a>), + Status(Status<'a>), +} + +pub struct ResponseBuilder<'a> { + tag: Option>, + code: Option>, + text: String, + body: Vec>, +} + +impl<'a> ResponseBuilder<'a> { + pub fn to_req(mut self, cmd: &Command<'a>) -> Self { + self.tag = Some(cmd.tag.clone()); + self + } + pub fn tag(mut self, tag: Tag<'a>) -> Self { + self.tag = Some(tag); + self + } + + pub fn message(mut self, txt: impl Into) -> Self { + self.text = txt.into(); + self + } + + pub fn code(mut self, code: Code<'a>) -> Self { + self.code = Some(code); + self + } + + pub fn data(mut self, data: Data<'a>) -> Self { + self.body.push(Body::Data(data)); + self + } + + pub fn many_data(mut self, data: Vec>) -> Self { + for d in data.into_iter() { + self = self.data(d); + } + self + } + + #[allow(dead_code)] + pub fn info(mut self, status: Status<'a>) -> Self { + self.body.push(Body::Status(status)); + self + } + + #[allow(dead_code)] + pub fn many_info(mut self, status: Vec>) -> Self { + for d in status.into_iter() { + self = self.info(d); + } + self + } + + pub fn set_body(mut self, body: Vec>) -> Self { + self.body = body; + self + } + + pub fn ok(self) -> Result> { + Ok(Response { + completion: Status::ok(self.tag, self.code, self.text)?, + body: self.body, + }) + } + + pub fn no(self) -> Result> { + Ok(Response { + completion: Status::no(self.tag, self.code, self.text)?, + body: self.body, + }) + } + + pub fn bad(self) -> Result> { + Ok(Response { + completion: Status::bad(self.tag, self.code, self.text)?, + body: self.body, + }) + } +} + +#[derive(Debug)] +pub struct Response<'a> { + pub body: Vec>, + pub completion: Status<'a>, +} + +impl<'a> Response<'a> { + pub fn build() -> ResponseBuilder<'a> { + ResponseBuilder { + tag: None, + code: None, + text: "".to_string(), + body: vec![], + } + } + + pub fn bye() -> Result> { + Ok(Response { + completion: Status::bye(None, "bye")?, + body: vec![], + }) + } +} + +#[derive(Debug)] +pub enum ResponseOrIdle { + Response(Response<'static>), + IdleAccept(Arc), + IdleReject(Response<'static>), + IdleEvent(Vec>), +} diff --git a/aero-proto/src/imap/search.rs b/aero-proto/src/imap/search.rs new file mode 100644 index 0000000..3634a3a --- /dev/null +++ b/aero-proto/src/imap/search.rs @@ -0,0 +1,478 @@ +use std::num::{NonZeroU32, NonZeroU64}; + +use imap_codec::imap_types::core::Vec1; +use imap_codec::imap_types::search::{MetadataItemSearch, SearchKey}; +use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; + +use aero_collections::mail::query::QueryScope; + +use crate::imap::index::MailIndex; +use crate::imap::mail_view::MailView; + +pub enum SeqType { + Undefined, + NonUid, + Uid, +} +impl SeqType { + pub fn is_uid(&self) -> bool { + matches!(self, Self::Uid) + } +} + +pub struct Criteria<'a>(pub &'a SearchKey<'a>); +impl<'a> Criteria<'a> { + /// Returns a set of email identifiers that is greater or equal + /// to the set of emails to return + pub fn to_sequence_set(&self) -> (SequenceSet, SeqType) { + match self.0 { + SearchKey::All => (sequence_set_all(), SeqType::Undefined), + SearchKey::SequenceSet(seq_set) => (seq_set.clone(), SeqType::NonUid), + SearchKey::Uid(seq_set) => (seq_set.clone(), SeqType::Uid), + SearchKey::Not(_inner) => { + tracing::debug!( + "using NOT in a search request is slow: it selects all identifiers" + ); + (sequence_set_all(), SeqType::Undefined) + } + SearchKey::Or(left, right) => { + tracing::debug!("using OR in a search request is slow: no deduplication is done"); + let (base, base_seqtype) = Self(&left).to_sequence_set(); + let (ext, ext_seqtype) = Self(&right).to_sequence_set(); + + // Check if we have a UID/ID conflict in fetching: now we don't know how to handle them + match (base_seqtype, ext_seqtype) { + (SeqType::Uid, SeqType::NonUid) | (SeqType::NonUid, SeqType::Uid) => { + (sequence_set_all(), SeqType::Undefined) + } + (SeqType::Undefined, x) | (x, _) => { + let mut new_vec = base.0.into_inner(); + new_vec.extend_from_slice(ext.0.as_ref()); + let seq = SequenceSet( + Vec1::try_from(new_vec) + .expect("merging non empty vec lead to non empty vec"), + ); + (seq, x) + } + } + } + SearchKey::And(search_list) => { + tracing::debug!( + "using AND in a search request is slow: no intersection is performed" + ); + // As we perform no intersection, we don't care if we mix uid or id. + // We only keep the smallest range, being it ID or UID, depending of + // which one has the less items. This is an approximation as UID ranges + // can have holes while ID ones can't. + search_list + .as_ref() + .iter() + .map(|crit| Self(&crit).to_sequence_set()) + .min_by(|(x, _), (y, _)| { + let x_size = approx_sequence_set_size(x); + let y_size = approx_sequence_set_size(y); + x_size.cmp(&y_size) + }) + .unwrap_or((sequence_set_all(), SeqType::Undefined)) + } + _ => (sequence_set_all(), SeqType::Undefined), + } + } + + /// Not really clever as we can have cases where we filter out + /// the email before needing to inspect its meta. + /// But for now we are seeking the most basic/stupid algorithm. + pub fn query_scope(&self) -> QueryScope { + use SearchKey::*; + match self.0 { + // Combinators + And(and_list) => and_list + .as_ref() + .iter() + .fold(QueryScope::Index, |prev, sk| { + prev.union(&Criteria(sk).query_scope()) + }), + Not(inner) => Criteria(inner).query_scope(), + Or(left, right) => Criteria(left) + .query_scope() + .union(&Criteria(right).query_scope()), + All => QueryScope::Index, + + // IMF Headers + Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) + | Subject(_) | To(_) => QueryScope::Partial, + // Internal Date is also stored in MailMeta + Before(_) | On(_) | Since(_) => QueryScope::Partial, + // Message size is also stored in MailMeta + Larger(_) | Smaller(_) => QueryScope::Partial, + // Text and Body require that we fetch the full content! + Text(_) | Body(_) => QueryScope::Full, + + _ => QueryScope::Index, + } + } + + pub fn is_modseq(&self) -> bool { + use SearchKey::*; + match self.0 { + And(and_list) => and_list + .as_ref() + .iter() + .any(|child| Criteria(child).is_modseq()), + Or(left, right) => Criteria(left).is_modseq() || Criteria(right).is_modseq(), + Not(child) => Criteria(child).is_modseq(), + ModSeq { .. } => true, + _ => false, + } + } + + /// Returns emails that we now for sure we want to keep + /// but also a second list of emails we need to investigate further by + /// fetching some remote data + pub fn filter_on_idx<'b>( + &self, + midx_list: &[&'b MailIndex<'b>], + ) -> (Vec<&'b MailIndex<'b>>, Vec<&'b MailIndex<'b>>) { + let (p1, p2): (Vec<_>, Vec<_>) = midx_list + .iter() + .map(|x| (x, self.is_keep_on_idx(x))) + .filter(|(_midx, decision)| decision.is_keep()) + .map(|(midx, decision)| (*midx, decision)) + .partition(|(_midx, decision)| matches!(decision, PartialDecision::Keep)); + + let to_keep = p1.into_iter().map(|(v, _)| v).collect(); + let to_fetch = p2.into_iter().map(|(v, _)| v).collect(); + (to_keep, to_fetch) + } + + // ---- + + /// Here we are doing a partial filtering: we do not have access + /// to the headers or to the body, so every time we encounter a rule + /// based on them, we need to keep it. + /// + /// @TODO Could be optimized on a per-email basis by also returning the QueryScope + /// when more information is needed! + fn is_keep_on_idx(&self, midx: &MailIndex) -> PartialDecision { + use SearchKey::*; + match self.0 { + // Combinator logic + And(expr_list) => expr_list + .as_ref() + .iter() + .fold(PartialDecision::Keep, |acc, cur| { + acc.and(&Criteria(cur).is_keep_on_idx(midx)) + }), + Or(left, right) => { + let left_decision = Criteria(left).is_keep_on_idx(midx); + let right_decision = Criteria(right).is_keep_on_idx(midx); + left_decision.or(&right_decision) + } + Not(expr) => Criteria(expr).is_keep_on_idx(midx).not(), + All => PartialDecision::Keep, + + // Sequence logic + maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, midx).into(), + maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, midx).into(), + ModSeq { + metadata_item, + modseq, + } => is_keep_modseq(metadata_item, modseq, midx).into(), + + // All the stuff we can't evaluate yet + Bcc(_) | Cc(_) | From(_) | Header(..) | SentBefore(_) | SentOn(_) | SentSince(_) + | Subject(_) | To(_) | Before(_) | On(_) | Since(_) | Larger(_) | Smaller(_) + | Text(_) | Body(_) => PartialDecision::Postpone, + + unknown => { + tracing::error!("Unknown filter {:?}", unknown); + PartialDecision::Discard + } + } + } + + /// @TODO we re-eveluate twice the same logic. The correct way would be, on each pass, + /// to simplify the searck query, by removing the elements that were already checked. + /// For example if we have AND(OR(seqid(X), body(Y)), body(X)), we can't keep for sure + /// the email, as body(x) might be false. So we need to check it. But as seqid(x) is true, + /// we could simplify the request to just body(x) and truncate the first OR. Today, we are + /// not doing that, and thus we reevaluate everything. + pub fn is_keep_on_query(&self, mail_view: &MailView) -> bool { + use SearchKey::*; + match self.0 { + // Combinator logic + And(expr_list) => expr_list + .as_ref() + .iter() + .all(|cur| Criteria(cur).is_keep_on_query(mail_view)), + Or(left, right) => { + Criteria(left).is_keep_on_query(mail_view) + || Criteria(right).is_keep_on_query(mail_view) + } + Not(expr) => !Criteria(expr).is_keep_on_query(mail_view), + All => true, + + //@FIXME Reevaluating our previous logic... + maybe_seq if is_sk_seq(maybe_seq) => is_keep_seq(maybe_seq, &mail_view.in_idx), + maybe_flag if is_sk_flag(maybe_flag) => is_keep_flag(maybe_flag, &mail_view.in_idx), + ModSeq { + metadata_item, + modseq, + } => is_keep_modseq(metadata_item, modseq, &mail_view.in_idx).into(), + + // Filter on mail meta + Before(search_naive) => match mail_view.stored_naive_date() { + Ok(msg_naive) => &msg_naive < search_naive.as_ref(), + _ => false, + }, + On(search_naive) => match mail_view.stored_naive_date() { + Ok(msg_naive) => &msg_naive == search_naive.as_ref(), + _ => false, + }, + Since(search_naive) => match mail_view.stored_naive_date() { + Ok(msg_naive) => &msg_naive > search_naive.as_ref(), + _ => false, + }, + + // Message size is also stored in MailMeta + Larger(size_ref) => { + mail_view + .query_result + .metadata() + .expect("metadata were fetched") + .rfc822_size + > *size_ref as usize + } + Smaller(size_ref) => { + mail_view + .query_result + .metadata() + .expect("metadata were fetched") + .rfc822_size + < *size_ref as usize + } + + // Filter on well-known headers + Bcc(txt) => mail_view.is_header_contains_pattern(&b"bcc"[..], txt.as_ref()), + Cc(txt) => mail_view.is_header_contains_pattern(&b"cc"[..], txt.as_ref()), + From(txt) => mail_view.is_header_contains_pattern(&b"from"[..], txt.as_ref()), + Subject(txt) => mail_view.is_header_contains_pattern(&b"subject"[..], txt.as_ref()), + To(txt) => mail_view.is_header_contains_pattern(&b"to"[..], txt.as_ref()), + Header(hdr, txt) => mail_view.is_header_contains_pattern(hdr.as_ref(), txt.as_ref()), + + // Filter on Date header + SentBefore(search_naive) => mail_view + .imf() + .map(|imf| imf.naive_date().ok()) + .flatten() + .map(|msg_naive| &msg_naive < search_naive.as_ref()) + .unwrap_or(false), + SentOn(search_naive) => mail_view + .imf() + .map(|imf| imf.naive_date().ok()) + .flatten() + .map(|msg_naive| &msg_naive == search_naive.as_ref()) + .unwrap_or(false), + SentSince(search_naive) => mail_view + .imf() + .map(|imf| imf.naive_date().ok()) + .flatten() + .map(|msg_naive| &msg_naive > search_naive.as_ref()) + .unwrap_or(false), + + // Filter on the full content of the email + Text(txt) => mail_view + .content + .as_msg() + .map(|msg| { + msg.raw_part + .windows(txt.as_ref().len()) + .any(|win| win == txt.as_ref()) + }) + .unwrap_or(false), + Body(txt) => mail_view + .content + .as_msg() + .map(|msg| { + msg.raw_body + .windows(txt.as_ref().len()) + .any(|win| win == txt.as_ref()) + }) + .unwrap_or(false), + + unknown => { + tracing::error!("Unknown filter {:?}", unknown); + false + } + } + } +} + +// ---- Sequence things ---- +fn sequence_set_all() -> SequenceSet { + SequenceSet::from(Sequence::Range( + SeqOrUid::Value(NonZeroU32::MIN), + SeqOrUid::Asterisk, + )) +} + +// This is wrong as sequences can overlap +fn approx_sequence_set_size(seq_set: &SequenceSet) -> u64 { + seq_set.0.as_ref().iter().fold(0u64, |acc, seq| { + acc.saturating_add(approx_sequence_size(seq)) + }) +} + +// This is wrong as sequence UID can have holes, +// as we don't know the number of messages in the mailbox also +// we gave to guess +fn approx_sequence_size(seq: &Sequence) -> u64 { + match seq { + Sequence::Single(_) => 1, + Sequence::Range(SeqOrUid::Asterisk, _) | Sequence::Range(_, SeqOrUid::Asterisk) => u64::MAX, + Sequence::Range(SeqOrUid::Value(x1), SeqOrUid::Value(x2)) => { + let x2 = x2.get() as i64; + let x1 = x1.get() as i64; + (x2 - x1).abs().try_into().unwrap_or(1) + } + } +} + +// --- Partial decision things ---- + +enum PartialDecision { + Keep, + Discard, + Postpone, +} +impl From for PartialDecision { + fn from(x: bool) -> Self { + match x { + true => PartialDecision::Keep, + _ => PartialDecision::Discard, + } + } +} +impl PartialDecision { + fn not(&self) -> Self { + match self { + Self::Keep => Self::Discard, + Self::Discard => Self::Keep, + Self::Postpone => Self::Postpone, + } + } + + fn or(&self, other: &Self) -> Self { + match (self, other) { + (Self::Keep, _) | (_, Self::Keep) => Self::Keep, + (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, + (Self::Discard, Self::Discard) => Self::Discard, + } + } + + fn and(&self, other: &Self) -> Self { + match (self, other) { + (Self::Discard, _) | (_, Self::Discard) => Self::Discard, + (Self::Postpone, _) | (_, Self::Postpone) => Self::Postpone, + (Self::Keep, Self::Keep) => Self::Keep, + } + } + + fn is_keep(&self) -> bool { + !matches!(self, Self::Discard) + } +} + +// ----- Search Key things --- +fn is_sk_flag(sk: &SearchKey) -> bool { + use SearchKey::*; + match sk { + Answered | Deleted | Draft | Flagged | Keyword(..) | New | Old | Recent | Seen + | Unanswered | Undeleted | Undraft | Unflagged | Unkeyword(..) | Unseen => true, + _ => false, + } +} + +fn is_keep_flag(sk: &SearchKey, midx: &MailIndex) -> bool { + use SearchKey::*; + match sk { + Answered => midx.is_flag_set("\\Answered"), + Deleted => midx.is_flag_set("\\Deleted"), + Draft => midx.is_flag_set("\\Draft"), + Flagged => midx.is_flag_set("\\Flagged"), + Keyword(kw) => midx.is_flag_set(kw.inner()), + New => { + let is_recent = midx.is_flag_set("\\Recent"); + let is_seen = midx.is_flag_set("\\Seen"); + is_recent && !is_seen + } + Old => { + let is_recent = midx.is_flag_set("\\Recent"); + !is_recent + } + Recent => midx.is_flag_set("\\Recent"), + Seen => midx.is_flag_set("\\Seen"), + Unanswered => { + let is_answered = midx.is_flag_set("\\Recent"); + !is_answered + } + Undeleted => { + let is_deleted = midx.is_flag_set("\\Deleted"); + !is_deleted + } + Undraft => { + let is_draft = midx.is_flag_set("\\Draft"); + !is_draft + } + Unflagged => { + let is_flagged = midx.is_flag_set("\\Flagged"); + !is_flagged + } + Unkeyword(kw) => { + let is_keyword_set = midx.is_flag_set(kw.inner()); + !is_keyword_set + } + Unseen => { + let is_seen = midx.is_flag_set("\\Seen"); + !is_seen + } + + // Not flag logic + _ => unreachable!(), + } +} + +fn is_sk_seq(sk: &SearchKey) -> bool { + use SearchKey::*; + match sk { + SequenceSet(..) | Uid(..) => true, + _ => false, + } +} +fn is_keep_seq(sk: &SearchKey, midx: &MailIndex) -> bool { + use SearchKey::*; + match sk { + SequenceSet(seq_set) => seq_set + .0 + .as_ref() + .iter() + .any(|seq| midx.is_in_sequence_i(seq)), + Uid(seq_set) => seq_set + .0 + .as_ref() + .iter() + .any(|seq| midx.is_in_sequence_uid(seq)), + _ => unreachable!(), + } +} + +fn is_keep_modseq( + filter: &Option, + modseq: &NonZeroU64, + midx: &MailIndex, +) -> bool { + if filter.is_some() { + tracing::warn!(filter=?filter, "Ignoring search metadata filter as it's not supported yet"); + } + modseq <= &midx.modseq +} diff --git a/aero-proto/src/imap/session.rs b/aero-proto/src/imap/session.rs new file mode 100644 index 0000000..92b5eb6 --- /dev/null +++ b/aero-proto/src/imap/session.rs @@ -0,0 +1,175 @@ +use anyhow::{anyhow, bail, Context, Result}; +use imap_codec::imap_types::{command::Command, core::Tag}; + +use aero_user::login::ArcLoginProvider; + +use crate::imap::capability::{ClientCapability, ServerCapability}; +use crate::imap::command::{anonymous, authenticated, selected}; +use crate::imap::flow; +use crate::imap::request::Request; +use crate::imap::response::{Response, ResponseOrIdle}; + +//----- +pub struct Instance { + pub login_provider: ArcLoginProvider, + pub server_capabilities: ServerCapability, + pub client_capabilities: ClientCapability, + pub state: flow::State, +} +impl Instance { + pub fn new(login_provider: ArcLoginProvider, cap: ServerCapability) -> Self { + let client_cap = ClientCapability::new(&cap); + Self { + login_provider, + state: flow::State::NotAuthenticated, + server_capabilities: cap, + client_capabilities: client_cap, + } + } + + pub async fn request(&mut self, req: Request) -> ResponseOrIdle { + match req { + Request::IdleStart(tag) => self.idle_init(tag), + Request::IdlePoll => self.idle_poll().await, + Request::ImapCommand(cmd) => self.command(cmd).await, + } + } + + pub fn idle_init(&mut self, tag: Tag<'static>) -> ResponseOrIdle { + // Build transition + //@FIXME the notifier should be hidden inside the state and thus not part of the transition! + let transition = flow::Transition::Idle(tag.clone(), tokio::sync::Notify::new()); + + // Try to apply the transition and get the stop notifier + let maybe_stop = self + .state + .apply(transition) + .context("IDLE transition failed") + .and_then(|_| { + self.state + .notify() + .ok_or(anyhow!("IDLE state has no Notify object")) + }); + + // Build an appropriate response + match maybe_stop { + Ok(stop) => ResponseOrIdle::IdleAccept(stop), + Err(e) => { + tracing::error!(err=?e, "unable to init idle due to a transition error"); + //ResponseOrIdle::IdleReject(tag) + let no = Response::build() + .tag(tag) + .message( + "Internal error, processing command triggered an illegal IMAP state transition", + ) + .no() + .unwrap(); + ResponseOrIdle::IdleReject(no) + } + } + } + + pub async fn idle_poll(&mut self) -> ResponseOrIdle { + match self.idle_poll_happy().await { + Ok(r) => r, + Err(e) => { + tracing::error!(err=?e, "something bad happened in idle"); + ResponseOrIdle::Response(Response::bye().unwrap()) + } + } + } + + pub async fn idle_poll_happy(&mut self) -> Result { + let (mbx, tag, stop) = match &mut self.state { + flow::State::Idle(_, ref mut mbx, _, tag, stop) => (mbx, tag.clone(), stop.clone()), + _ => bail!("Invalid session state, can't idle"), + }; + + tokio::select! { + _ = stop.notified() => { + self.state.apply(flow::Transition::UnIdle)?; + return Ok(ResponseOrIdle::Response(Response::build() + .tag(tag.clone()) + .message("IDLE completed") + .ok()?)) + }, + change = mbx.idle_sync() => { + tracing::debug!("idle event"); + return Ok(ResponseOrIdle::IdleEvent(change?)); + } + } + } + + pub async fn command(&mut self, cmd: Command<'static>) -> ResponseOrIdle { + // Command behavior is modulated by the state. + // To prevent state error, we handle the same command in separate code paths. + let (resp, tr) = match &mut self.state { + flow::State::NotAuthenticated => { + let ctx = anonymous::AnonymousContext { + req: &cmd, + login_provider: &self.login_provider, + server_capabilities: &self.server_capabilities, + }; + anonymous::dispatch(ctx).await + } + flow::State::Authenticated(ref user) => { + let ctx = authenticated::AuthenticatedContext { + req: &cmd, + server_capabilities: &self.server_capabilities, + client_capabilities: &mut self.client_capabilities, + user, + }; + authenticated::dispatch(ctx).await + } + flow::State::Selected(ref user, ref mut mailbox, ref perm) => { + let ctx = selected::SelectedContext { + req: &cmd, + server_capabilities: &self.server_capabilities, + client_capabilities: &mut self.client_capabilities, + user, + mailbox, + perm, + }; + selected::dispatch(ctx).await + } + flow::State::Idle(..) => Err(anyhow!("can not receive command while idling")), + flow::State::Logout => Response::build() + .tag(cmd.tag.clone()) + .message("No commands are allowed in the LOGOUT state.") + .bad() + .map(|r| (r, flow::Transition::None)), + } + .unwrap_or_else(|err| { + tracing::error!("Command error {:?} occured while processing {:?}", err, cmd); + ( + Response::build() + .to_req(&cmd) + .message("Internal error while processing command") + .bad() + .unwrap(), + flow::Transition::None, + ) + }); + + if let Err(e) = self.state.apply(tr) { + tracing::error!( + "Transition error {:?} occured while processing on command {:?}", + e, + cmd + ); + return ResponseOrIdle::Response(Response::build() + .to_req(&cmd) + .message( + "Internal error, processing command triggered an illegal IMAP state transition", + ) + .bad() + .unwrap()); + } + ResponseOrIdle::Response(resp) + + /*match &self.state { + flow::State::Idle(_, _, _, _, n) => ResponseOrIdle::StartIdle(n.clone()), + _ => ResponseOrIdle::Response(resp), + }*/ + } +} diff --git a/aero-proto/src/lib.rs b/aero-proto/src/lib.rs new file mode 100644 index 0000000..d5154cd --- /dev/null +++ b/aero-proto/src/lib.rs @@ -0,0 +1,6 @@ +#![feature(async_closure)] + +pub mod dav; +pub mod imap; +pub mod lmtp; +pub mod sasl; diff --git a/aero-proto/src/lmtp.rs b/aero-proto/src/lmtp.rs new file mode 100644 index 0000000..9d40296 --- /dev/null +++ b/aero-proto/src/lmtp.rs @@ -0,0 +1,219 @@ +use std::net::SocketAddr; +use std::{pin::Pin, sync::Arc}; + +use anyhow::Result; +use async_trait::async_trait; +use duplexify::Duplex; +use futures::{io, AsyncRead, AsyncReadExt, AsyncWrite}; +use futures::{ + stream, + stream::{FuturesOrdered, FuturesUnordered}, + StreamExt, +}; +use tokio::net::TcpListener; +use tokio::select; +use tokio::sync::watch; +use tokio_util::compat::*; +use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode}; +use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata}; + +use aero_user::config::*; +use aero_user::login::*; +use aero_collections::mail::incoming::EncryptedMessage; + +pub struct LmtpServer { + bind_addr: SocketAddr, + hostname: String, + login_provider: Arc, +} + +impl LmtpServer { + pub fn new( + config: LmtpConfig, + login_provider: Arc, + ) -> Arc { + Arc::new(Self { + bind_addr: config.bind_addr, + hostname: config.hostname, + login_provider, + }) + } + + pub async fn run(self: &Arc, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("LMTP server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("LMTP: accepted connection from {}", remote_addr); + + let conn = tokio::spawn(smtp_server::interact( + socket.compat(), + smtp_server::IsAlreadyTls::No, + (), + self.clone(), + )); + + connections.push(conn); + } + drop(tcp); + + tracing::info!("LMTP server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +// ---- + +pub struct Message { + to: Vec, +} + +#[async_trait] +impl Config for LmtpServer { + type Protocol = smtp_server::protocol::Lmtp; + + type ConnectionUserMeta = (); + type MailUserMeta = Message; + + fn hostname(&self, _conn_meta: &ConnectionMetadata<()>) -> &str { + &self.hostname + } + + async fn new_mail(&self, _conn_meta: &mut ConnectionMetadata<()>) -> Message { + Message { to: vec![] } + } + + async fn tls_accept( + &self, + _io: IO, + _conn_meta: &mut ConnectionMetadata<()>, + ) -> io::Result>, Pin>>> + where + IO: Send + AsyncRead + AsyncWrite, + { + Err(io::Error::new( + io::ErrorKind::InvalidInput, + "TLS not implemented for LMTP server", + )) + } + + async fn filter_from( + &self, + from: Option, + _meta: &mut MailMetadata, + _conn_meta: &mut ConnectionMetadata<()>, + ) -> Decision> { + Decision::Accept { + reply: reply::okay_from().convert(), + res: from, + } + } + + async fn filter_to( + &self, + to: Email, + meta: &mut MailMetadata, + _conn_meta: &mut ConnectionMetadata<()>, + ) -> Decision { + let to_str = match to.hostname.as_ref() { + Some(h) => format!("{}@{}", to.localpart, h), + None => to.localpart.to_string(), + }; + match self.login_provider.public_login(&to_str).await { + Ok(creds) => { + meta.user.to.push(creds); + Decision::Accept { + reply: reply::okay_to().convert(), + res: to, + } + } + Err(e) => Decision::Reject { + reply: Reply { + code: ReplyCode::POLICY_REASON, + ecode: None, + text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], + }, + }, + } + } + + async fn handle_mail<'resp, R>( + &'resp self, + reader: &mut EscapedDataReader<'_, R>, + meta: MailMetadata, + _conn_meta: &'resp mut ConnectionMetadata<()>, + ) -> Pin> + Send + 'resp>> + where + R: Send + Unpin + AsyncRead, + { + let err_response_stream = |meta: MailMetadata, msg: String| { + Box::pin( + stream::iter(meta.user.to.into_iter()).map(move |_| Decision::Reject { + reply: Reply { + code: ReplyCode::POLICY_REASON, + ecode: None, + text: vec![smtp_message::MaybeUtf8::Utf8(msg.clone())], + }, + }), + ) + }; + + let mut text = Vec::new(); + if let Err(e) = reader.read_to_end(&mut text).await { + return err_response_stream(meta, format!("io error: {}", e)); + } + reader.complete(); + let raw_size = text.len(); + + // Unescape email, shrink it also to remove last dot + let unesc_res = DataUnescaper::new(true).unescape(&mut text); + text.truncate(unesc_res.written); + tracing::debug!(prev_sz = raw_size, new_sz = text.len(), "unescaped"); + + let encrypted_message = match EncryptedMessage::new(text) { + Ok(x) => Arc::new(x), + Err(e) => return err_response_stream(meta, e.to_string()), + }; + + Box::pin( + meta.user + .to + .into_iter() + .map(move |creds| { + let encrypted_message = encrypted_message.clone(); + async move { + match encrypted_message.deliver_to(creds).await { + Ok(()) => Decision::Accept { + reply: reply::okay_mail().convert(), + res: (), + }, + Err(e) => Decision::Reject { + reply: Reply { + code: ReplyCode::POLICY_REASON, + ecode: None, + text: vec![smtp_message::MaybeUtf8::Utf8(e.to_string())], + }, + }, + } + } + }) + .collect::>(), + ) + } +} diff --git a/aero-proto/src/sasl.rs b/aero-proto/src/sasl.rs new file mode 100644 index 0000000..dae89eb --- /dev/null +++ b/aero-proto/src/sasl.rs @@ -0,0 +1,142 @@ +use std::net::SocketAddr; + +use anyhow::{anyhow, bail, Result}; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::io::BufStream; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt}; +use tokio::net::{TcpListener, TcpStream}; +use tokio::sync::watch; +use tokio_util::bytes::BytesMut; + +use aero_user::config::AuthConfig; +use aero_user::login::ArcLoginProvider; +use aero_sasl::{flow::State, decode::client_command, encode::Encode}; + +pub struct AuthServer { + login_provider: ArcLoginProvider, + bind_addr: SocketAddr, +} + +impl AuthServer { + pub fn new(config: AuthConfig, login_provider: ArcLoginProvider) -> Self { + Self { + bind_addr: config.bind_addr, + login_provider, + } + } + + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!( + "SASL Authentication Protocol listening on {:#}", + self.bind_addr + ); + + let mut connections = FuturesUnordered::new(); + + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + + tracing::info!("AUTH: accepted connection from {}", remote_addr); + let conn = tokio::spawn( + NetLoop::new(socket, self.login_provider.clone(), must_exit.clone()).run_error(), + ); + + connections.push(conn); + } + drop(tcp); + + tracing::info!("AUTH server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } +} + +struct NetLoop { + login: ArcLoginProvider, + stream: BufStream, + stop: watch::Receiver, + state: State, + read_buf: Vec, + write_buf: BytesMut, +} + +impl NetLoop { + fn new(stream: TcpStream, login: ArcLoginProvider, stop: watch::Receiver) -> Self { + Self { + login, + stream: BufStream::new(stream), + state: State::Init, + stop, + read_buf: Vec::new(), + write_buf: BytesMut::new(), + } + } + + async fn run_error(self) { + match self.run().await { + Ok(()) => tracing::info!("Auth session succeeded"), + Err(e) => tracing::error!(err=?e, "Auth session failed"), + } + } + + async fn run(mut self) -> Result<()> { + loop { + tokio::select! { + read_res = self.stream.read_until(b'\n', &mut self.read_buf) => { + // Detect EOF / socket close + let bread = read_res?; + if bread == 0 { + tracing::info!("Reading buffer empty, connection has been closed. Exiting AUTH session."); + return Ok(()) + } + + // Parse command + let (_, cmd) = client_command(&self.read_buf).map_err(|_| anyhow!("Unable to parse command"))?; + tracing::trace!(cmd=?cmd, "Received command"); + + // Make some progress in our local state + let login = async |user: String, pass: String| self.login.login(user.as_str(), pass.as_str()).await.is_ok(); + self.state.progress(cmd, login).await; + if matches!(self.state, State::Error) { + bail!("Internal state is in error, previous logs explain what went wrong"); + } + + // Build response + let srv_cmds = self.state.response(); + srv_cmds.iter().try_for_each(|r| { + tracing::trace!(cmd=?r, "Sent command"); + r.encode(&mut self.write_buf) + })?; + + // Send responses if at least one command response has been generated + if !srv_cmds.is_empty() { + self.stream.write_all(&self.write_buf).await?; + self.stream.flush().await?; + } + + // Reset buffers + self.read_buf.clear(); + self.write_buf.clear(); + }, + _ = self.stop.changed() => { + tracing::debug!("Server is stopping, quitting this runner"); + return Ok(()) + } + } + } + } +} diff --git a/aero-sasl/src/flow.rs b/aero-sasl/src/flow.rs index 6cc698a..31c8bc5 100644 --- a/aero-sasl/src/flow.rs +++ b/aero-sasl/src/flow.rs @@ -28,9 +28,9 @@ impl State { Self::Init } - async fn try_auth_plain<'a, X, F>(&self, data: &'a [u8], login: X) -> AuthRes + async fn try_auth_plain(&self, data: &[u8], login: X) -> AuthRes where - X: FnOnce(&'a str, &'a str) -> F, + X: FnOnce(String, String) -> F, F: Future, { // Check that we can extract user's login+pass @@ -56,7 +56,7 @@ impl State { }; // Try to connect user - match login(user, password).await { + match login(user.to_string(), password.to_string()).await { true => AuthRes::Success(user.to_string()), false => { tracing::warn!("login failed"); @@ -67,7 +67,7 @@ impl State { pub async fn progress(&mut self, cmd: ClientCommand, login: X) where - X: FnOnce(&str, &str) -> F, + X: FnOnce(String, String) -> F, F: Future, { let new_state = 'state: { -- cgit v1.2.3 From b9f32d720ae5ec60cadeb492af781ade48cd6cbf Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 10:20:45 +0100 Subject: Finalize Aerogramme's refactor --- .gitignore | 8 + Cargo.lock | 259 +++++++++++++++- Cargo.toml | 4 +- aerogramme/Cargo.toml | 15 + aerogramme/src/k2v_util.rs | 26 -- aerogramme/src/lib.rs | 19 -- aerogramme/src/main.rs | 10 +- aerogramme/src/server.rs | 14 +- aerogramme/tests/behavior.rs | 357 ++++++++++++++++++++++ aerogramme/tests/common/constants.rs | 54 ++++ aerogramme/tests/common/fragments.rs | 570 +++++++++++++++++++++++++++++++++++ aerogramme/tests/common/mod.rs | 99 ++++++ doc/.gitignore | 1 - doc/book.toml | 9 - doc/src/SUMMARY.md | 34 --- doc/src/aero-compo.png | Bin 26898 -> 0 bytes doc/src/aero-paranoid.png | Bin 27405 -> 0 bytes doc/src/aero-schema.png | Bin 74645 -> 0 bytes doc/src/aero-states.png | Bin 9090 -> 0 bytes doc/src/aero-states2.png | Bin 17869 -> 0 bytes doc/src/aerogramme.jpg | Bin 563365 -> 0 bytes doc/src/config.md | 126 -------- doc/src/crypt-key.md | 82 ----- doc/src/data_format.md | 50 --- doc/src/imap_uid.md | 203 ------------- doc/src/index.md | 22 -- doc/src/installation.md | 25 -- doc/src/log.md | 149 --------- doc/src/mailbox.md | 56 ---- doc/src/mailbox.png | Bin 10981 -> 0 bytes doc/src/mutt_mail.png | Bin 24325 -> 0 bytes doc/src/mutt_mb.png | Bin 39035 -> 0 bytes doc/src/notes.md | 42 --- doc/src/overview.md | 61 ---- doc/src/rfc.md | 3 - doc/src/setup.md | 90 ------ doc/src/validate.md | 40 --- tests/behavior.rs | 357 ---------------------- tests/common/constants.rs | 54 ---- tests/common/fragments.rs | 570 ----------------------------------- tests/common/mod.rs | 99 ------ 41 files changed, 1372 insertions(+), 2136 deletions(-) delete mode 100644 aerogramme/src/k2v_util.rs delete mode 100644 aerogramme/src/lib.rs create mode 100644 aerogramme/tests/behavior.rs create mode 100644 aerogramme/tests/common/constants.rs create mode 100644 aerogramme/tests/common/fragments.rs create mode 100644 aerogramme/tests/common/mod.rs delete mode 100644 doc/.gitignore delete mode 100644 doc/book.toml delete mode 100644 doc/src/SUMMARY.md delete mode 100644 doc/src/aero-compo.png delete mode 100644 doc/src/aero-paranoid.png delete mode 100644 doc/src/aero-schema.png delete mode 100644 doc/src/aero-states.png delete mode 100644 doc/src/aero-states2.png delete mode 100644 doc/src/aerogramme.jpg delete mode 100644 doc/src/config.md delete mode 100644 doc/src/crypt-key.md delete mode 100644 doc/src/data_format.md delete mode 100644 doc/src/imap_uid.md delete mode 100644 doc/src/index.md delete mode 100644 doc/src/installation.md delete mode 100644 doc/src/log.md delete mode 100644 doc/src/mailbox.md delete mode 100644 doc/src/mailbox.png delete mode 100644 doc/src/mutt_mail.png delete mode 100644 doc/src/mutt_mb.png delete mode 100644 doc/src/notes.md delete mode 100644 doc/src/overview.md delete mode 100644 doc/src/rfc.md delete mode 100644 doc/src/setup.md delete mode 100644 doc/src/validate.md delete mode 100644 tests/behavior.rs delete mode 100644 tests/common/constants.rs delete mode 100644 tests/common/fragments.rs delete mode 100644 tests/common/mod.rs diff --git a/.gitignore b/.gitignore index deb0fec..bfe0d50 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,11 @@ env.sh aerogramme.toml *.swo *.swp +aerogramme.pid +cert.pem +ec_key.pem +provider-users.toml +setup.toml +test.eml +test.txt +users.toml diff --git a/Cargo.lock b/Cargo.lock index 32b798c..77dda64 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -144,6 +144,24 @@ dependencies = [ "zstd", ] +[[package]] +name = "aerogramme" +version = "0.3.0" +dependencies = [ + "aero-proto", + "aero-user", + "anyhow", + "backtrace", + "clap", + "futures", + "log", + "nix", + "rpassword", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "aerogramme-fuzz" version = "0.0.0" @@ -461,6 +479,17 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + [[package]] name = "auto_enums" version = "0.7.12" @@ -1080,6 +1109,45 @@ dependencies = [ "windows-targets 0.52.0", ] +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_derive", + "clap_lex", + "indexmap 1.9.3", + "once_cell", + "strsim", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap_derive" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + [[package]] name = "concurrent-queue" version = "2.4.0" @@ -1637,7 +1705,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap", + "indexmap 2.2.5", "slab", "tokio", "tokio-util", @@ -1656,19 +1724,40 @@ dependencies = [ "futures-sink", "futures-util", "http 1.1.0", - "indexmap", + "indexmap 2.2.5", "slab", "tokio", "tokio-util", "tracing", ] +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + [[package]] name = "hermit-abi" version = "0.3.4" @@ -1956,6 +2045,16 @@ dependencies = [ "thiserror", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.2.5" @@ -1963,7 +2062,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.3", ] [[package]] @@ -1981,7 +2080,7 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.4", "libc", "windows-sys 0.48.0", ] @@ -2198,6 +2297,17 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "nix" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "libc", +] + [[package]] name = "nom" version = "2.2.1" @@ -2227,6 +2337,16 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num-bigint" version = "0.4.4" @@ -2263,7 +2383,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.4", "libc", ] @@ -2294,12 +2414,24 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + [[package]] name = "outref" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "p256" version = "0.11.1" @@ -2447,6 +2579,30 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro2" version = "1.0.76" @@ -2603,6 +2759,27 @@ dependencies = [ "serde", ] +[[package]] +name = "rpassword" +version = "7.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" +dependencies = [ + "libc", + "rtoolbox", + "windows-sys 0.48.0", +] + +[[package]] +name = "rtoolbox" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "rustc-demangle" version = "0.1.23" @@ -2893,6 +3070,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -3052,6 +3238,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + [[package]] name = "subtle" version = "2.5.0" @@ -3098,6 +3290,21 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + [[package]] name = "thiserror" version = "1.0.56" @@ -3118,6 +3325,16 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + [[package]] name = "time" version = "0.3.31" @@ -3316,6 +3533,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", ] [[package]] @@ -3392,6 +3635,12 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "value-bag" version = "1.7.0" diff --git a/Cargo.toml b/Cargo.toml index 406d5bd..a18c41c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,7 @@ members = [ "aero-dav/fuzz", "aero-collections", "aero-proto", -# "aerogramme", + "aerogramme", ] default-members = ["aerogramme"] @@ -21,7 +21,7 @@ aero-sasl = { version = "0.3.0", path = "aero-sasl" } aero-dav = { version = "0.3.0", path = "aero-dav" } aero-collections = { version = "0.3.0", path = "aero-collections" } aero-proto = { version = "0.3.0", path = "aero-proto" } -#aerogramme = { version = "0.3.0", path = "aerogramme" } +aerogramme = { version = "0.3.0", path = "aerogramme" } # async runtime tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } diff --git a/aerogramme/Cargo.toml b/aerogramme/Cargo.toml index e408aec..ab62e44 100644 --- a/aerogramme/Cargo.toml +++ b/aerogramme/Cargo.toml @@ -6,6 +6,21 @@ edition = "2021" license = "EUPL-1.2" description = "A robust email server" +[dependencies] +aero-user.workspace = true +aero-proto.workspace = true + +anyhow.workspace = true +backtrace.workspace = true +futures.workspace = true +tokio.workspace = true +log.workspace = true +nix.workspace = true +clap.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +rpassword.workspace = true + [[test]] name = "behavior" path = "tests/behavior.rs" diff --git a/aerogramme/src/k2v_util.rs b/aerogramme/src/k2v_util.rs deleted file mode 100644 index 3cd969b..0000000 --- a/aerogramme/src/k2v_util.rs +++ /dev/null @@ -1,26 +0,0 @@ -/* -use anyhow::Result; -// ---- UTIL: function to wait for a value to have changed in K2V ---- - -pub async fn k2v_wait_value_changed( - k2v: &storage::RowStore, - key: &storage::RowRef, -) -> Result { - loop { - if let Some(ct) = prev_ct { - match k2v.poll_item(pk, sk, ct.clone(), None).await? { - None => continue, - Some(cv) => return Ok(cv), - } - } else { - match k2v.read_item(pk, sk).await { - Err(k2v_client::Error::NotFound) => { - k2v.insert_item(pk, sk, vec![0u8], None).await?; - } - Err(e) => return Err(e.into()), - Ok(cv) => return Ok(cv), - } - } - } -} -*/ diff --git a/aerogramme/src/lib.rs b/aerogramme/src/lib.rs deleted file mode 100644 index f065478..0000000 --- a/aerogramme/src/lib.rs +++ /dev/null @@ -1,19 +0,0 @@ -#![feature(type_alias_impl_trait)] -#![feature(async_fn_in_trait)] -#![feature(async_closure)] -#![feature(trait_alias)] - -pub mod auth; -pub mod bayou; -pub mod config; -pub mod cryptoblob; -pub mod dav; -pub mod imap; -pub mod k2v_util; -pub mod lmtp; -pub mod login; -pub mod mail; -pub mod server; -pub mod storage; -pub mod timestamp; -pub mod user; diff --git a/aerogramme/src/main.rs b/aerogramme/src/main.rs index 43b4dca..4251520 100644 --- a/aerogramme/src/main.rs +++ b/aerogramme/src/main.rs @@ -1,3 +1,5 @@ +mod server; + use std::io::Read; use std::path::PathBuf; @@ -5,9 +7,9 @@ use anyhow::{bail, Context, Result}; use clap::{Parser, Subcommand}; use nix::{sys::signal, unistd::Pid}; -use aerogramme::config::*; -use aerogramme::login::{static_provider::*, *}; -use aerogramme::server::Server; +use aero_user::config::*; +use aero_user::login::{static_provider::*, *}; +use crate::server::Server; #[derive(Parser, Debug)] #[clap(author, version, about, long_about = None)] @@ -151,7 +153,7 @@ fn tracer() { #[tokio::main] async fn main() -> Result<()> { if std::env::var("RUST_LOG").is_err() { - std::env::set_var("RUST_LOG", "main=info,aerogramme=info,k2v_client=info") + std::env::set_var("RUST_LOG", "info") } // Abort on panic (same behavior as in Go) diff --git a/aerogramme/src/server.rs b/aerogramme/src/server.rs index 09e91ad..e302db3 100644 --- a/aerogramme/src/server.rs +++ b/aerogramme/src/server.rs @@ -7,13 +7,13 @@ use futures::try_join; use log::*; use tokio::sync::watch; -use crate::auth; -use crate::config::*; -use crate::dav; -use crate::imap; -use crate::lmtp::*; -use crate::login::ArcLoginProvider; -use crate::login::{demo_provider::*, ldap_provider::*, static_provider::*}; +use aero_user::config::*; +use aero_user::login::ArcLoginProvider; +use aero_user::login::{demo_provider::*, ldap_provider::*, static_provider::*}; +use aero_proto::sasl as auth; +use aero_proto::dav; +use aero_proto::imap; +use aero_proto::lmtp::*; pub struct Server { lmtp_server: Option>, diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs new file mode 100644 index 0000000..13baf0e --- /dev/null +++ b/aerogramme/tests/behavior.rs @@ -0,0 +1,357 @@ +use anyhow::Context; + +mod common; +use crate::common::constants::*; +use crate::common::fragments::*; + +fn main() { + rfc3501_imap4rev1_base(); + rfc6851_imapext_move(); + rfc4551_imapext_condstore(); + rfc2177_imapext_idle(); + rfc5161_imapext_enable(); // 1 + rfc3691_imapext_unselect(); // 2 + rfc7888_imapext_literal(); // 3 + rfc4315_imapext_uidplus(); // 4 + rfc5819_imapext_liststatus(); // 5 + println!("✅ SUCCESS 🌟🚀🥳🙏🥹"); +} + +fn rfc3501_imap4rev1_base() { + println!("🧪 rfc3501_imap4rev1_base"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + connect(imap_socket).context("server says hello")?; + capability(imap_socket, Extension::None).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + create_mailbox(imap_socket, Mailbox::Archive).context("created mailbox archive")?; + let select_res = + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; + assert!(select_res.contains("* 0 EXISTS")); + + check(imap_socket).context("check must run")?; + status(imap_socket, Mailbox::Archive, StatusKind::UidNext) + .context("status of archive from inbox")?; + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; + noop_exists(imap_socket, 1).context("noop loop must detect a new email")?; + + let srv_msg = fetch( + imap_socket, + Selection::FirstId, + FetchKind::Rfc822, + FetchMod::None, + ) + .context("fetch rfc822 message, should be our first message")?; + let orig_email = std::str::from_utf8(EMAIL1)?; + assert!(srv_msg.contains(orig_email)); + + copy(imap_socket, Selection::FirstId, Mailbox::Archive) + .context("copy message to the archive mailbox")?; + append(imap_socket, Email::Basic).context("insert email in INBOX")?; + noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; + search(imap_socket, SearchKind::Text("OoOoO")).expect("search should return something"); + store( + imap_socket, + Selection::FirstId, + Flag::Deleted, + StoreAction::AddFlags, + StoreMod::None, + ) + .context("should add delete flag to the email")?; + expunge(imap_socket).context("expunge emails")?; + rename_mailbox(imap_socket, Mailbox::Archive, Mailbox::Drafts) + .context("Archive mailbox is renamed Drafts")?; + delete_mailbox(imap_socket, Mailbox::Drafts).context("Drafts mailbox is deleted")?; + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc3691_imapext_unselect() { + println!("🧪 rfc3691_imapext_unselect"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + connect(imap_socket).context("server says hello")?; + + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; + + capability(imap_socket, Extension::Unselect).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + let select_res = + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; + assert!(select_res.contains("* 0 EXISTS")); + + noop_exists(imap_socket, 1).context("noop loop must detect a new email")?; + store( + imap_socket, + Selection::FirstId, + Flag::Deleted, + StoreAction::AddFlags, + StoreMod::None, + ) + .context("add delete flags to the email")?; + unselect(imap_socket) + .context("unselect inbox while preserving email with the \\Delete flag")?; + let select_res = + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox again")?; + assert!(select_res.contains("* 1 EXISTS")); + + let srv_msg = fetch( + imap_socket, + Selection::FirstId, + FetchKind::Rfc822, + FetchMod::None, + ) + .context("message is still present")?; + let orig_email = std::str::from_utf8(EMAIL2)?; + assert!(srv_msg.contains(orig_email)); + + close(imap_socket).context("close inbox and expunge message")?; + let select_res = select(imap_socket, Mailbox::Inbox, SelectMod::None) + .context("select inbox again and check it's empty")?; + assert!(select_res.contains("* 0 EXISTS")); + + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc5161_imapext_enable() { + println!("🧪 rfc5161_imapext_enable"); + common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket| { + connect(imap_socket).context("server says hello")?; + login(imap_socket, Account::Alice).context("login test")?; + enable(imap_socket, Enable::Utf8Accept, Some(Enable::Utf8Accept))?; + enable(imap_socket, Enable::Utf8Accept, None)?; + logout(imap_socket)?; + + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc6851_imapext_move() { + println!("🧪 rfc6851_imapext_move"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + connect(imap_socket).context("server says hello")?; + + capability(imap_socket, Extension::Move).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + create_mailbox(imap_socket, Mailbox::Archive).context("created mailbox archive")?; + let select_res = + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; + assert!(select_res.contains("* 0 EXISTS")); + + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; + + noop_exists(imap_socket, 1).context("noop loop must detect a new email")?; + r#move(imap_socket, Selection::FirstId, Mailbox::Archive) + .context("message from inbox moved to archive")?; + + unselect(imap_socket) + .context("unselect inbox while preserving email with the \\Delete flag")?; + let select_res = + select(imap_socket, Mailbox::Archive, SelectMod::None).context("select archive")?; + assert!(select_res.contains("* 1 EXISTS")); + + let srv_msg = fetch( + imap_socket, + Selection::FirstId, + FetchKind::Rfc822, + FetchMod::None, + ) + .context("check mail exists")?; + let orig_email = std::str::from_utf8(EMAIL2)?; + assert!(srv_msg.contains(orig_email)); + + logout(imap_socket).context("must quit")?; + + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc7888_imapext_literal() { + println!("🧪 rfc7888_imapext_literal"); + common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket| { + connect(imap_socket).context("server says hello")?; + + capability(imap_socket, Extension::LiteralPlus).context("check server capabilities")?; + login_with_literal(imap_socket, Account::Alice).context("use literal to connect Alice")?; + + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc4551_imapext_condstore() { + println!("🧪 rfc4551_imapext_condstore"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + // Setup the test + connect(imap_socket).context("server says hello")?; + + // RFC 3.1.1 Advertising Support for CONDSTORE + capability(imap_socket, Extension::Condstore).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + + // RFC 3.1.8. CONDSTORE Parameter to SELECT and EXAMINE + let select_res = + select(imap_socket, Mailbox::Inbox, SelectMod::Condstore).context("select inbox")?; + // RFC 3.1.2 New OK Untagged Responses for SELECT and EXAMINE + assert!(select_res.contains("[HIGHESTMODSEQ 1]")); + + // RFC 3.1.3. STORE and UID STORE Commands + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; + lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; + noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; + let store_res = store( + imap_socket, + Selection::All, + Flag::Important, + StoreAction::AddFlags, + StoreMod::UnchangedSince(1), + )?; + assert!(store_res.contains("[MODIFIED 2]")); + assert!(store_res.contains("* 1 FETCH (FLAGS (\\Important) MODSEQ (3))")); + assert!(!store_res.contains("* 2 FETCH")); + assert_eq!(store_res.lines().count(), 2); + + // RFC 3.1.4. FETCH and UID FETCH Commands + let fetch_res = fetch( + imap_socket, + Selection::All, + FetchKind::Rfc822Size, + FetchMod::ChangedSince(2), + )?; + assert!(fetch_res.contains("* 1 FETCH (RFC822.SIZE 81 MODSEQ (3))")); + assert!(!fetch_res.contains("* 2 FETCH")); + assert_eq!(store_res.lines().count(), 2); + + // RFC 3.1.5. MODSEQ Search Criterion in SEARCH + let search_res = search(imap_socket, SearchKind::ModSeq(3))?; + // RFC 3.1.6. Modified SEARCH Untagged Response + assert!(search_res.contains("* SEARCH 1 (MODSEQ 3)")); + + // RFC 3.1.7 HIGHESTMODSEQ Status Data Items + let status_res = status(imap_socket, Mailbox::Inbox, StatusKind::HighestModSeq)?; + assert!(status_res.contains("HIGHESTMODSEQ 3")); + + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc2177_imapext_idle() { + println!("🧪 rfc2177_imapext_idle"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + // Test setup, check capability + connect(imap_socket).context("server says hello")?; + capability(imap_socket, Extension::Idle).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; + + // Check that new messages from LMTP are correctly detected during idling + start_idle(imap_socket).context("can't start idling")?; + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; + let srv_msg = stop_idle(imap_socket).context("stop idling")?; + assert!(srv_msg.contains("* 1 EXISTS")); + + Ok(()) + }) + .expect("test fully run"); +} + +fn rfc4315_imapext_uidplus() { + println!("🧪 rfc4315_imapext_uidplus"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + // Test setup, check capability, insert 2 emails + connect(imap_socket).context("server says hello")?; + capability(imap_socket, Extension::UidPlus).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; + lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; + noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; + + // Check UID EXPUNGE seqset + store( + imap_socket, + Selection::All, + Flag::Deleted, + StoreAction::AddFlags, + StoreMod::None, + )?; + let res = uid_expunge(imap_socket, Selection::FirstId)?; + assert_eq!(res.lines().count(), 2); + assert!(res.contains("* 1 EXPUNGE")); + + // APPENDUID check UID + UID VALIDITY + // Note: 4 and not 3, as we update the UID counter when we delete an email + // it's part of our UID proof + let res = append(imap_socket, Email::Multipart)?; + assert!(res.contains("[APPENDUID 1 4]")); + + // COPYUID, check + create_mailbox(imap_socket, Mailbox::Archive).context("created mailbox archive")?; + let res = copy(imap_socket, Selection::FirstId, Mailbox::Archive)?; + assert!(res.contains("[COPYUID 1 2 1]")); + + // MOVEUID, check + let res = r#move(imap_socket, Selection::FirstId, Mailbox::Archive)?; + assert!(res.contains("[COPYUID 1 2 2]")); + + Ok(()) + }) + .expect("test fully run"); +} + +/// +/// Example +/// +/// ```text +/// 30 list "" "*" RETURN (STATUS (MESSAGES UNSEEN)) +/// * LIST (\Subscribed) "." INBOX +/// * STATUS INBOX (MESSAGES 2 UNSEEN 1) +/// 30 OK LIST completed +/// ``` +fn rfc5819_imapext_liststatus() { + println!("🧪 rfc5819_imapext_liststatus"); + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + // Test setup, check capability, add 2 emails, read 1 + connect(imap_socket).context("server says hello")?; + capability(imap_socket, Extension::ListStatus).context("check server capabilities")?; + login(imap_socket, Account::Alice).context("login test")?; + select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; + lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; + lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; + lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; + noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; + fetch( + imap_socket, + Selection::FirstId, + FetchKind::Rfc822, + FetchMod::None, + ) + .context("read one message")?; + close(imap_socket).context("close inbox")?; + + // Test return status MESSAGES UNSEEN + let ret = list( + imap_socket, + MbxSelect::All, + ListReturn::StatusMessagesUnseen, + )?; + assert!(ret.contains("* STATUS INBOX (MESSAGES 2 UNSEEN 1)")); + + // Test that without RETURN, no status is sent + let ret = list(imap_socket, MbxSelect::All, ListReturn::None)?; + assert!(!ret.contains("* STATUS")); + + Ok(()) + }) + .expect("test fully run"); +} diff --git a/aerogramme/tests/common/constants.rs b/aerogramme/tests/common/constants.rs new file mode 100644 index 0000000..c11a04d --- /dev/null +++ b/aerogramme/tests/common/constants.rs @@ -0,0 +1,54 @@ +use std::time; + +pub static SMALL_DELAY: time::Duration = time::Duration::from_millis(200); + +pub static EMAIL1: &[u8] = b"Date: Sat, 8 Jul 2023 07:14:29 +0200\r +From: Bob Robert \r +To: Alice Malice \r +CC: =?ISO-8859-1?Q?Andr=E9?= Pirard \r +Subject: =?ISO-8859-1?B?SWYgeW91IGNhbiByZWFkIHRoaXMgeW8=?=\r + =?ISO-8859-2?B?dSB1bmRlcnN0YW5kIHRoZSBleGFtcGxlLg==?=\r +X-Unknown: something something\r +Bad entry\r + on multiple lines\r +Message-ID: \r +MIME-Version: 1.0\r +Content-Type: multipart/alternative;\r + boundary=\"b1_e376dc71bafc953c0b0fdeb9983a9956\"\r +Content-Transfer-Encoding: 7bit\r +\r +This is a multi-part message in MIME format.\r +\r +--b1_e376dc71bafc953c0b0fdeb9983a9956\r +Content-Type: text/plain; charset=utf-8\r +Content-Transfer-Encoding: quoted-printable\r +\r +GZ\r +OoOoO\r +oOoOoOoOo\r +oOoOoOoOoOoOoOoOo\r +oOoOoOoOoOoOoOoOoOoOoOo\r +oOoOoOoOoOoOoOoOoOoOoOoOoOoOo\r +OoOoOoOoOoOoOoOoOoOoOoOoOoOoOoOoO\r +\r +--b1_e376dc71bafc953c0b0fdeb9983a9956\r +Content-Type: text/html; charset=us-ascii\r +\r +
GZ
\r +OoOoO
\r +oOoOoOoOo
\r +oOoOoOoOoOoOoOoOo
\r +oOoOoOoOoOoOoOoOoOoOoOo
\r +oOoOoOoOoOoOoOoOoOoOoOoOoOoOo
\r +OoOoOoOoOoOoOoOoOoOoOoOoOoOoOoOoO
\r +
\r +\r +--b1_e376dc71bafc953c0b0fdeb9983a9956--\r +"; + +pub static EMAIL2: &[u8] = b"From: alice@example.com\r +To: alice@example.tld\r +Subject: Test\r +\r +Hello world!\r +"; diff --git a/aerogramme/tests/common/fragments.rs b/aerogramme/tests/common/fragments.rs new file mode 100644 index 0000000..606af2b --- /dev/null +++ b/aerogramme/tests/common/fragments.rs @@ -0,0 +1,570 @@ +use anyhow::{bail, Result}; +use std::io::Write; +use std::net::TcpStream; +use std::thread; + +use crate::common::constants::*; +use crate::common::*; + +/// These fragments are not a generic IMAP client +/// but specialized to our specific tests. They can't take +/// arbitrary values, only enum for which the code is known +/// to be correct. The idea is that the generated message is more +/// or less hardcoded by the developer, so its clear what's expected, +/// and not generated by a library. Also don't use vector of enum, +/// as it again introduce some kind of genericity we try so hard to avoid: +/// instead add a dedicated enum, for example "All" or anything relaevent that would +/// describe your list and then hardcode it in your fragment. +/// DON'T. TRY. TO. BE. GENERIC. HERE. + +pub fn connect(imap: &mut TcpStream) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..4], &b"* OK"[..]); + + Ok(()) +} + +pub enum Account { + Alice, +} + +pub enum Extension { + None, + Unselect, + Move, + Condstore, + LiteralPlus, + Idle, + UidPlus, + ListStatus, +} + +pub enum Enable { + Utf8Accept, + CondStore, + All, +} + +pub enum Mailbox { + Inbox, + Archive, + Drafts, +} + +pub enum Flag { + Deleted, + Important, +} + +pub enum Email { + Basic, + Multipart, +} + +pub enum Selection { + FirstId, + SecondId, + All, +} + +pub enum SelectMod { + None, + Condstore, +} + +pub enum StoreAction { + AddFlags, + DelFlags, + SetFlags, + AddFlagsSilent, + DelFlagsSilent, + SetFlagsSilent, +} + +pub enum StoreMod { + None, + UnchangedSince(u64), +} + +pub enum FetchKind { + Rfc822, + Rfc822Size, +} + +pub enum FetchMod { + None, + ChangedSince(u64), +} + +pub enum SearchKind<'a> { + Text(&'a str), + ModSeq(u64), +} + +pub enum StatusKind { + UidNext, + HighestModSeq, +} + +pub enum MbxSelect { + All, +} + +pub enum ListReturn { + None, + StatusMessagesUnseen, +} + +pub fn capability(imap: &mut TcpStream, ext: Extension) -> Result<()> { + imap.write(&b"5 capability\r\n"[..])?; + + let maybe_ext = match ext { + Extension::None => None, + Extension::Unselect => Some("UNSELECT"), + Extension::Move => Some("MOVE"), + Extension::Condstore => Some("CONDSTORE"), + Extension::LiteralPlus => Some("LITERAL+"), + Extension::Idle => Some("IDLE"), + Extension::UidPlus => Some("UIDPLUS"), + Extension::ListStatus => Some("LIST-STATUS"), + }; + + let mut buffer: [u8; 6000] = [0; 6000]; + let read = read_lines(imap, &mut buffer, Some(&b"5 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + assert!(srv_msg.contains("IMAP4REV1")); + if let Some(ext) = maybe_ext { + assert!(srv_msg.contains(ext)); + } + + Ok(()) +} + +pub fn login(imap: &mut TcpStream, account: Account) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + assert!(matches!(account, Account::Alice)); + imap.write(&b"10 login alice hunter2\r\n"[..])?; + + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..5], &b"10 OK"[..]); + + Ok(()) +} + +pub fn login_with_literal(imap: &mut TcpStream, account: Account) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + assert!(matches!(account, Account::Alice)); + imap.write(&b"10 login {5+}\r\nalice {7+}\r\nhunter2\r\n"[..])?; + let _read = read_lines(imap, &mut buffer, Some(&b"10 OK"[..]))?; + Ok(()) +} + +pub fn create_mailbox(imap: &mut TcpStream, mbx: Mailbox) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + let mbx_str = match mbx { + Mailbox::Inbox => "INBOX", + Mailbox::Archive => "ArchiveCustom", + Mailbox::Drafts => "DraftsCustom", + }; + + let cmd = format!("15 create {}\r\n", mbx_str); + imap.write(cmd.as_bytes())?; + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..12], &b"15 OK CREATE"[..]); + + Ok(()) +} + +pub fn list(imap: &mut TcpStream, select: MbxSelect, mod_return: ListReturn) -> Result { + let mut buffer: [u8; 6000] = [0; 6000]; + + let select_str = match select { + MbxSelect::All => "%", + }; + + let mod_return_str = match mod_return { + ListReturn::None => "", + ListReturn::StatusMessagesUnseen => " RETURN (STATUS (MESSAGES UNSEEN))", + }; + + imap.write(format!("19 LIST \"\" \"{}\"{}\r\n", select_str, mod_return_str).as_bytes())?; + + let read = read_lines(imap, &mut buffer, Some(&b"19 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + Ok(srv_msg.to_string()) +} + +pub fn select(imap: &mut TcpStream, mbx: Mailbox, modifier: SelectMod) -> Result { + let mut buffer: [u8; 6000] = [0; 6000]; + + let mbx_str = match mbx { + Mailbox::Inbox => "INBOX", + Mailbox::Archive => "ArchiveCustom", + Mailbox::Drafts => "DraftsCustom", + }; + + let mod_str = match modifier { + SelectMod::Condstore => " (CONDSTORE)", + SelectMod::None => "", + }; + + imap.write(format!("20 select {}{}\r\n", mbx_str, mod_str).as_bytes())?; + + let read = read_lines(imap, &mut buffer, Some(&b"20 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + + Ok(srv_msg.to_string()) +} + +pub fn unselect(imap: &mut TcpStream) -> Result<()> { + imap.write(&b"70 unselect\r\n"[..])?; + let mut buffer: [u8; 1500] = [0; 1500]; + let _read = read_lines(imap, &mut buffer, Some(&b"70 OK"[..]))?; + + Ok(()) +} + +pub fn check(imap: &mut TcpStream) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + imap.write(&b"21 check\r\n"[..])?; + let _read = read_lines(imap, &mut buffer, Some(&b"21 OK"[..]))?; + + Ok(()) +} + +pub fn status(imap: &mut TcpStream, mbx: Mailbox, sk: StatusKind) -> Result { + let mbx_str = match mbx { + Mailbox::Inbox => "INBOX", + Mailbox::Archive => "ArchiveCustom", + Mailbox::Drafts => "DraftsCustom", + }; + let sk_str = match sk { + StatusKind::UidNext => "(UIDNEXT)", + StatusKind::HighestModSeq => "(HIGHESTMODSEQ)", + }; + imap.write(format!("25 STATUS {} {}\r\n", mbx_str, sk_str).as_bytes())?; + let mut buffer: [u8; 6000] = [0; 6000]; + let read = read_lines(imap, &mut buffer, Some(&b"25 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + + Ok(srv_msg.to_string()) +} + +pub fn lmtp_handshake(lmtp: &mut TcpStream) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + let _read = read_lines(lmtp, &mut buffer, None)?; + assert_eq!(&buffer[..4], &b"220 "[..]); + + lmtp.write(&b"LHLO example.tld\r\n"[..])?; + let _read = read_lines(lmtp, &mut buffer, Some(&b"250 "[..]))?; + + Ok(()) +} + +pub fn lmtp_deliver_email(lmtp: &mut TcpStream, email_type: Email) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + + let email = match email_type { + Email::Basic => EMAIL2, + Email::Multipart => EMAIL1, + }; + lmtp.write(&b"MAIL FROM:\r\n"[..])?; + let _read = read_lines(lmtp, &mut buffer, Some(&b"250 2.0.0"[..]))?; + + lmtp.write(&b"RCPT TO:\r\n"[..])?; + let _read = read_lines(lmtp, &mut buffer, Some(&b"250 2.1.5"[..]))?; + + lmtp.write(&b"DATA\r\n"[..])?; + let _read = read_lines(lmtp, &mut buffer, Some(&b"354 "[..]))?; + + lmtp.write(email)?; + lmtp.write(&b"\r\n.\r\n"[..])?; + let _read = read_lines(lmtp, &mut buffer, Some(&b"250 2.0.0"[..]))?; + + Ok(()) +} + +pub fn noop_exists(imap: &mut TcpStream, must_exists: u32) -> Result<()> { + let mut buffer: [u8; 6000] = [0; 6000]; + + let mut max_retry = 20; + loop { + max_retry -= 1; + imap.write(&b"30 NOOP\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"30 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + + for line in srv_msg.lines() { + if line.contains("EXISTS") { + let got = read_first_u32(line)?; + if got == must_exists { + // Done + return Ok(()); + } + } + } + + if max_retry <= 0 { + // Failed + bail!("no more retry"); + } + + thread::sleep(SMALL_DELAY); + } +} + +pub fn fetch( + imap: &mut TcpStream, + selection: Selection, + kind: FetchKind, + modifier: FetchMod, +) -> Result { + let mut buffer: [u8; 65535] = [0; 65535]; + + let sel_str = match selection { + Selection::FirstId => "1", + Selection::SecondId => "2", + Selection::All => "1:*", + }; + + let kind_str = match kind { + FetchKind::Rfc822 => "RFC822", + FetchKind::Rfc822Size => "RFC822.SIZE", + }; + + let mod_str = match modifier { + FetchMod::None => "".into(), + FetchMod::ChangedSince(val) => format!(" (CHANGEDSINCE {})", val), + }; + + imap.write(format!("40 fetch {} {}{}\r\n", sel_str, kind_str, mod_str).as_bytes())?; + + let read = read_lines(imap, &mut buffer, Some(&b"40 OK FETCH"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + + Ok(srv_msg.to_string()) +} + +pub fn copy(imap: &mut TcpStream, selection: Selection, to: Mailbox) -> Result { + let mut buffer: [u8; 65535] = [0; 65535]; + assert!(matches!(selection, Selection::FirstId)); + assert!(matches!(to, Mailbox::Archive)); + + imap.write(&b"45 copy 1 ArchiveCustom\r\n"[..])?; + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..5], &b"45 OK"[..]); + let srv_msg = std::str::from_utf8(read)?; + + Ok(srv_msg.to_string()) +} + +pub fn append(imap: &mut TcpStream, content: Email) -> Result { + let mut buffer: [u8; 6000] = [0; 6000]; + + let ref_mail = match content { + Email::Multipart => EMAIL1, + Email::Basic => EMAIL2, + }; + + let append_cmd = format!("47 append inbox (\\Seen) {{{}}}\r\n", ref_mail.len()); + println!("append cmd: {}", append_cmd); + imap.write(append_cmd.as_bytes())?; + + // wait for continuation + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(read[0], b'+'); + + // write our stuff + imap.write(ref_mail)?; + imap.write(&b"\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"47 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + + Ok(srv_msg.to_string()) +} + +pub fn search(imap: &mut TcpStream, sk: SearchKind) -> Result { + let sk_str = match sk { + SearchKind::Text(x) => format!("TEXT \"{}\"", x), + SearchKind::ModSeq(x) => format!("MODSEQ {}", x), + }; + imap.write(format!("55 SEARCH {}\r\n", sk_str).as_bytes())?; + let mut buffer: [u8; 1500] = [0; 1500]; + let read = read_lines(imap, &mut buffer, Some(&b"55 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + Ok(srv_msg.to_string()) +} + +pub fn store( + imap: &mut TcpStream, + sel: Selection, + flag: Flag, + action: StoreAction, + modifier: StoreMod, +) -> Result { + let mut buffer: [u8; 6000] = [0; 6000]; + + let seq = match sel { + Selection::FirstId => "1", + Selection::SecondId => "2", + Selection::All => "1:*", + }; + + let modif = match modifier { + StoreMod::None => "".into(), + StoreMod::UnchangedSince(val) => format!(" (UNCHANGEDSINCE {})", val), + }; + + let flags_str = match flag { + Flag::Deleted => "(\\Deleted)", + Flag::Important => "(\\Important)", + }; + + let action_str = match action { + StoreAction::AddFlags => "+FLAGS", + StoreAction::DelFlags => "-FLAGS", + StoreAction::SetFlags => "FLAGS", + StoreAction::AddFlagsSilent => "+FLAGS.SILENT", + StoreAction::DelFlagsSilent => "-FLAGS.SILENT", + StoreAction::SetFlagsSilent => "FLAGS.SILENT", + }; + + imap.write(format!("57 STORE {}{} {} {}\r\n", seq, modif, action_str, flags_str).as_bytes())?; + let read = read_lines(imap, &mut buffer, Some(&b"57 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + Ok(srv_msg.to_string()) +} + +pub fn expunge(imap: &mut TcpStream) -> Result<()> { + imap.write(&b"60 expunge\r\n"[..])?; + let mut buffer: [u8; 1500] = [0; 1500]; + let _read = read_lines(imap, &mut buffer, Some(&b"60 OK EXPUNGE"[..]))?; + + Ok(()) +} + +pub fn uid_expunge(imap: &mut TcpStream, sel: Selection) -> Result { + use Selection::*; + let mut buffer: [u8; 6000] = [0; 6000]; + let selstr = match sel { + FirstId => "1", + SecondId => "2", + All => "1:*", + }; + imap.write(format!("61 UID EXPUNGE {}\r\n", selstr).as_bytes())?; + let read = read_lines(imap, &mut buffer, Some(&b"61 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + Ok(srv_msg.to_string()) +} + +pub fn rename_mailbox(imap: &mut TcpStream, from: Mailbox, to: Mailbox) -> Result<()> { + assert!(matches!(from, Mailbox::Archive)); + assert!(matches!(to, Mailbox::Drafts)); + + imap.write(&b"70 rename ArchiveCustom DraftsCustom\r\n"[..])?; + let mut buffer: [u8; 1500] = [0; 1500]; + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..5], &b"70 OK"[..]); + + imap.write(&b"71 list \"\" *\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"71 OK LIST"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + assert!(!srv_msg.contains(" ArchiveCustom\r\n")); + assert!(srv_msg.contains(" INBOX\r\n")); + assert!(srv_msg.contains(" DraftsCustom\r\n")); + + Ok(()) +} + +pub fn delete_mailbox(imap: &mut TcpStream, mbx: Mailbox) -> Result<()> { + let mbx_str = match mbx { + Mailbox::Inbox => "INBOX", + Mailbox::Archive => "ArchiveCustom", + Mailbox::Drafts => "DraftsCustom", + }; + let cmd = format!("80 delete {}\r\n", mbx_str); + + imap.write(cmd.as_bytes())?; + let mut buffer: [u8; 1500] = [0; 1500]; + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..5], &b"80 OK"[..]); + + imap.write(&b"81 list \"\" *\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"81 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + assert!(srv_msg.contains(" INBOX\r\n")); + assert!(!srv_msg.contains(format!(" {}\r\n", mbx_str).as_str())); + + Ok(()) +} + +pub fn close(imap: &mut TcpStream) -> Result<()> { + imap.write(&b"60 close\r\n"[..])?; + let mut buffer: [u8; 1500] = [0; 1500]; + let _read = read_lines(imap, &mut buffer, Some(&b"60 OK"[..]))?; + + Ok(()) +} + +pub fn r#move(imap: &mut TcpStream, selection: Selection, to: Mailbox) -> Result { + let mut buffer: [u8; 1500] = [0; 1500]; + assert!(matches!(to, Mailbox::Archive)); + assert!(matches!(selection, Selection::FirstId)); + + imap.write(&b"35 move 1 ArchiveCustom\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"35 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + assert!(srv_msg.contains("* 1 EXPUNGE")); + + Ok(srv_msg.to_string()) +} + +pub fn enable(imap: &mut TcpStream, ask: Enable, done: Option) -> Result<()> { + let mut buffer: [u8; 6000] = [0; 6000]; + assert!(matches!(ask, Enable::Utf8Accept)); + + imap.write(&b"36 enable UTF8=ACCEPT\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"36 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + match done { + None => assert_eq!(srv_msg.lines().count(), 1), + Some(Enable::Utf8Accept) => { + assert_eq!(srv_msg.lines().count(), 2); + assert!(srv_msg.contains("* ENABLED UTF8=ACCEPT")); + } + _ => unimplemented!(), + } + + Ok(()) +} + +pub fn start_idle(imap: &mut TcpStream) -> Result<()> { + let mut buffer: [u8; 1500] = [0; 1500]; + imap.write(&b"98 IDLE\r\n"[..])?; + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(read[0], b'+'); + Ok(()) +} + +pub fn stop_idle(imap: &mut TcpStream) -> Result { + let mut buffer: [u8; 16536] = [0; 16536]; + imap.write(&b"DONE\r\n"[..])?; + let read = read_lines(imap, &mut buffer, Some(&b"98 OK"[..]))?; + let srv_msg = std::str::from_utf8(read)?; + Ok(srv_msg.to_string()) +} + +pub fn logout(imap: &mut TcpStream) -> Result<()> { + imap.write(&b"99 logout\r\n"[..])?; + let mut buffer: [u8; 1500] = [0; 1500]; + let read = read_lines(imap, &mut buffer, None)?; + assert_eq!(&read[..5], &b"* BYE"[..]); + Ok(()) +} diff --git a/aerogramme/tests/common/mod.rs b/aerogramme/tests/common/mod.rs new file mode 100644 index 0000000..cbe0271 --- /dev/null +++ b/aerogramme/tests/common/mod.rs @@ -0,0 +1,99 @@ +#![allow(dead_code)] +pub mod constants; +pub mod fragments; + +use anyhow::{bail, Context, Result}; +use std::io::Read; +use std::net::{Shutdown, TcpStream}; +use std::process::Command; +use std::thread; + +use constants::SMALL_DELAY; + +pub fn aerogramme_provider_daemon_dev( + mut fx: impl FnMut(&mut TcpStream, &mut TcpStream) -> Result<()>, +) -> Result<()> { + // Check port is not used (= free) before starting the test + let mut max_retry = 20; + loop { + max_retry -= 1; + match (TcpStream::connect("[::1]:1143"), max_retry) { + (Ok(_), 0) => bail!("something is listening on [::1]:1143 and prevent the test from starting"), + (Ok(_), _) => println!("something is listening on [::1]:1143, maybe a previous daemon quitting, retrying soon..."), + (Err(_), _) => { + println!("test ready to start, [::1]:1143 is free!"); + break + } + } + thread::sleep(SMALL_DELAY); + } + + // Start daemon + let mut daemon = Command::new(env!("CARGO_BIN_EXE_aerogramme")) + .arg("--dev") + .arg("provider") + .arg("daemon") + .spawn()?; + + // Check that our daemon is correctly listening on the free port + let mut max_retry = 20; + let mut imap_socket = loop { + max_retry -= 1; + match (TcpStream::connect("[::1]:1143"), max_retry) { + (Err(e), 0) => bail!("no more retry, last error is: {}", e), + (Err(e), _) => { + println!("unable to connect: {} ; will retry soon...", e); + } + (Ok(v), _) => break v, + } + thread::sleep(SMALL_DELAY); + }; + + // Assuming now it's safe to open a LMTP socket + let mut lmtp_socket = + TcpStream::connect("[::1]:1025").context("lmtp socket must be connected")?; + + println!("-- ready to test imap features --"); + let result = fx(&mut imap_socket, &mut lmtp_socket); + println!("-- test teardown --"); + + imap_socket + .shutdown(Shutdown::Both) + .context("closing imap socket at the end of the test")?; + lmtp_socket + .shutdown(Shutdown::Both) + .context("closing lmtp socket at the end of the test")?; + daemon.kill().context("daemon should be killed")?; + + result.context("all tests passed") +} + +pub fn read_lines<'a, F: Read>( + reader: &mut F, + buffer: &'a mut [u8], + stop_marker: Option<&[u8]>, +) -> Result<&'a [u8]> { + let mut nbytes = 0; + loop { + nbytes += reader.read(&mut buffer[nbytes..])?; + //println!("partial read: {}", std::str::from_utf8(&buffer[..nbytes])?); + let pre_condition = match stop_marker { + None => true, + Some(mark) => buffer[..nbytes].windows(mark.len()).any(|w| w == mark), + }; + if pre_condition && nbytes >= 2 && &buffer[nbytes - 2..nbytes] == &b"\r\n"[..] { + break; + } + } + println!("read: {}", std::str::from_utf8(&buffer[..nbytes])?); + Ok(&buffer[..nbytes]) +} + +pub fn read_first_u32(inp: &str) -> Result { + Ok(inp + .chars() + .skip_while(|c| !c.is_digit(10)) + .take_while(|c| c.is_digit(10)) + .collect::() + .parse::()?) +} diff --git a/doc/.gitignore b/doc/.gitignore deleted file mode 100644 index 7585238..0000000 --- a/doc/.gitignore +++ /dev/null @@ -1 +0,0 @@ -book diff --git a/doc/book.toml b/doc/book.toml deleted file mode 100644 index 338ad63..0000000 --- a/doc/book.toml +++ /dev/null @@ -1,9 +0,0 @@ -[book] -authors = ["Quentin Dufour"] -language = "en" -multilingual = false -src = "src" -title = "Aerogramme - Encrypted e-mail storage over Garage" - -[output.html] -mathjax-support = true diff --git a/doc/src/SUMMARY.md b/doc/src/SUMMARY.md deleted file mode 100644 index 92d7932..0000000 --- a/doc/src/SUMMARY.md +++ /dev/null @@ -1,34 +0,0 @@ -# Summary - -[Introduction](./index.md) - -# Quick start - -- [Installation](./installation.md) -- [Setup](./setup.md) -- [Validation](./validate.md) - -# Cookbook - - - [Not ready for production]() - -# Reference - -- [Configuration file](./config.md) -- [RFC coverage](./rfc.md) - -# Design - -- [Overview](./overview.md) -- [Mailboxes](./mailbox.md) -- [Mutation Log](./log.md) -- [IMAP UID proof](./imap_uid.md) - -# Internals - -- [Persisted data structures](./data_format.md) -- [Cryptography & key management](./crypt-key.md) - -# Development - -- [Notes](./notes.md) diff --git a/doc/src/aero-compo.png b/doc/src/aero-compo.png deleted file mode 100644 index fb81b46..0000000 Binary files a/doc/src/aero-compo.png and /dev/null differ diff --git a/doc/src/aero-paranoid.png b/doc/src/aero-paranoid.png deleted file mode 100644 index f9e2df1..0000000 Binary files a/doc/src/aero-paranoid.png and /dev/null differ diff --git a/doc/src/aero-schema.png b/doc/src/aero-schema.png deleted file mode 100644 index 3206245..0000000 Binary files a/doc/src/aero-schema.png and /dev/null differ diff --git a/doc/src/aero-states.png b/doc/src/aero-states.png deleted file mode 100644 index c3b015a..0000000 Binary files a/doc/src/aero-states.png and /dev/null differ diff --git a/doc/src/aero-states2.png b/doc/src/aero-states2.png deleted file mode 100644 index ed2077d..0000000 Binary files a/doc/src/aero-states2.png and /dev/null differ diff --git a/doc/src/aerogramme.jpg b/doc/src/aerogramme.jpg deleted file mode 100644 index c1fe11b..0000000 Binary files a/doc/src/aerogramme.jpg and /dev/null differ diff --git a/doc/src/config.md b/doc/src/config.md deleted file mode 100644 index 732ecb7..0000000 --- a/doc/src/config.md +++ /dev/null @@ -1,126 +0,0 @@ -# Configuration file - -A configuration file that illustrate all the possible options, -in practise, many fields are omitted: - -```toml -s3_endpoint = "s3.garage.tld" -k2v_endpoint = "k2v.garage.tld" -aws_region = "garage" - -[lmtp] -bind_addr = "[::1]:2525" -hostname = "aerogramme.tld" - -[imap] -bind_addr = "[::1]:993" - -[login_static] -default_bucket = "aerogramme" - -[login_static.user.alan] -email_addresses = [ - "alan@smith.me" - "aln@example.com" -] -password = "$argon2id$v=19$m=4096,t=3,p=1$..." - -aws_access_key_id = "GK..." -aws_secret_access_key = "c0ffee" -bucket = "aerogramme-alan" - -user_secret = "s3cr3t" -alternate_user_secrets = [ "s3cr3t2" "s3cr3t3" ] - -master_key = "..." -secret_key = "..." - -[login_ldap] -ldap_server = "ldap.example.com" - -pre_bind_on_login = true -bind_dn = "cn=admin,dc=example,dc=com" -bind_password = "s3cr3t" - -search_base = "ou=users,dc=example,dc=com" -username_attr = "cn" -mail_attr = "mail" - -aws_access_key_id_attr = "garage_s3_access_key" -aws_secret_access_key_attr = "garage_s3_secret_key" -user_secret_attr = "secret" -alternate_user_secrets_attr = "secret_alt" - -# bucket = "aerogramme" -bucket_attr = "bucket" - -``` - -## Global configuration options - -### `s3_endpoint` - -### `k2v_endpoint` - -### `aws_region` - -## LMTP configuration options - -### `lmtp.bind_addr` - -### `lmtp.hostname` - -## IMAP configuration options - -### `imap.bind_addr` - -## Static login configuration options - -### `login_static.default_bucket` - -### `login_static.user..email_addresses` - -### `login_static.user..password` - -### `login_static.user..aws_access_key_id` - -### `login_static.user..aws_secret_access_key` - -### `login_static.user..bucket` - -### `login_static.user..user_secret` - -### `login_static.user..master_key` - -### `login_static.user..secret_key` - -## LDAP login configuration options - -### `login_ldap.ldap_server` - -### `login_ldap.pre_bind_on` - -### `login_ldap.bind_dn` - -### `login_ldap.bind_password` - -### `login_ldap.search_base` - -### `login_ldap.username_attr` - -### `login_ldap.mail_attr` - -### `login_ldap.aws_access_key_id_attr` - -### `login_ldap.aws_secret_access_key_attr` - -### `login_ldap.user_secret_attr` - -### `login_ldap.alternate_user_secrets_attr` - -### `login_ldap.bucket` - -### `login_ldap.bucket_attr` - - - diff --git a/doc/src/crypt-key.md b/doc/src/crypt-key.md deleted file mode 100644 index 9fb199b..0000000 --- a/doc/src/crypt-key.md +++ /dev/null @@ -1,82 +0,0 @@ -# Cryptography & key management - -Keys that are used: - -- master secret key (for indexes) -- curve25519 public/private key pair (for incoming mail) - -Keys that are stored in K2V under PK `keys`: - -- `public`: the public curve25519 key (plain text) -- `salt`: the 32-byte salt `S` used to calculate digests that index keys below -- if a password is used, `password:`: - - a 32-byte salt `Skey` - - followed a secret box - - that is encrypted with a strong argon2 digest of the password (using the salt `Skey`) and a user secret (see below) - - that contains the master secret key and the curve25519 private key - -User secret: an additionnal secret that is added to the password when deriving the encryption key for the secret box. -This additionnal secret should not be stored in K2V/S3, so that just knowing a user's password isn't enough to be able -to decrypt their mailbox (supposing the attacker has a dump of their K2V/S3 bucket). -This user secret should typically be stored in the LDAP database or just in the configuration file when using -the static login provider. - -Operations: - -- **Initialize**(`user_secret`, `password`): - - if `"salt"` or `"public"` already exist, BAIL - - generate salt `S` (32 random bytes) - - generate `public`, `private` (curve25519 keypair) - - generate `master` (secretbox secret key) - - calculate `digest = argon2_S(password)` - - generate salt `Skey` (32 random bytes) - - calculate `key = argon2_Skey(user_secret + password)` - - serialize `box_contents = (private, master)` - - seal box `blob = seal_key(box_contents)` - - write `S` at `"salt"` - - write `concat(Skey, blob)` at `"password:{hex(digest[..16])}"` - - write `public` at `"public"` - -- **InitializeWithoutPassword**(`private`, `master`): - - if `"salt"` or `"public"` already exist, BAIL - - generate salt `S` (32 random bytes) - - write `S` at `"salt"` - - calculate `public` the public key associated with `private` - - write `public` at `"public"` - -- **Open**(`user_secret`, `password`): - - load `S = read("salt")` - - calculate `digest = argon2_S(password)` - - load `blob = read("password:{hex(digest[..16])}") - - set `Skey = blob[..32]` - - calculate `key = argon2_Skey(user_secret + password)` - - open secret box `box_contents = open_key(blob[32..])` - - retrieve `master` and `private` from `box_contents` - - retrieve `public = read("public")` - -- **OpenWithoutPassword**(`private`, `master`): - - load `public = read("public")` - - check that `public` is the correct public key associated with `private` - -- **AddPassword**(`user_secret`, `existing_password`, `new_password`): - - load `S = read("salt")` - - calculate `digest = argon2_S(existing_password)` - - load `blob = read("existing_password:{hex(digest[..16])}") - - set `Skey = blob[..32]` - - calculate `key = argon2_Skey(user_secret + existing_password)` - - open secret box `box_contents = open_key(blob[32..])` - - retrieve `master` and `private` from `box_contents` - - - calculate `digest_new = argon2_S(new_password)` - - generate salt `Skeynew` (32 random bytes) - - calculate `key_new = argon2_Skeynew(user_secret + new_password)` - - serialize `box_contents_new = (private, master)` - - seal box `blob_new = seal_key_new(box_contents_new)` - - write `concat(Skeynew, blob_new)` at `"new_password:{hex(digest_new[..16])}"` - -- **RemovePassword**(`password`): - - load `S = read("salt")` - - calculate `digest = argon2_S(existing_password)` - - check that `"password:{hex(digest[..16])}"` exists - - check that other passwords exist ?? (or not) - - delete `"password:{hex(digest[..16])}"` diff --git a/doc/src/data_format.md b/doc/src/data_format.md deleted file mode 100644 index 32aa2c3..0000000 --- a/doc/src/data_format.md +++ /dev/null @@ -1,50 +0,0 @@ -# Data format - -## Bay(ou) - -Checkpoints are stored in S3 at `/checkpoint/`. Example: - -``` -348 TestMailbox/checkpoint/00000180d77400dc126b16aac546b769 -369 TestMailbox/checkpoint/00000180d776e509b68fdc5c376d0abc -357 TestMailbox/checkpoint/00000180d77a7fe68f4f76e3b45aa751 -``` - -Operations are stored in K2V at PK ``, SK ``. Example: - -``` -TestMailbox 00000180d77400dc126b16aac546b769 RcIsESv7WrjMuHwyI/dvCnkIfy6op5Tiylf0WSnn94aMS2uagl7YeMBwdv09TiSXBpu5nJ5e/9QFSfuEI/NqKrdQkX54MOsnaIGhRb0oqUG3KNaar3BiVSvYvXuzYhk4ii+TUS2Eyd6fCCaNVNM5 -TestMailbox 00000180d775f27f5542a13fc21c665e RrTSOup/zO1Ei+QrjBcDLt4vvFSY+WJPBodwY64wy2ftW+Oh3VSArvlO4SAEPmdsx1gt0HPBZYR/OkVWsZpmix1ZLFUmvdib+rjNkorHQW1p+oLVK8tolGrqk4SRwl88cqu466T4vBEpDu7tRbH0 -TestMailbox 00000180d775f292b3c8da00718389b4 VAwd8SRycIwsipZW5AcSG+EIYZVWn/Uj/TADbWhb4x5LVMceiRBHWVquY08RgT/lJKdhIcUqBA15bVG3klIg8tLsWJVG784NbsZwdGRczWmngcA= -TestMailbox 00000180d775f29d24842cf375d679e0 /FbXtEwm/bijtvOdqM1XFvKUalQFAOPHp+vF9jZThZn/viY5a6W1PyHeI8kTusF6EsVPAwPHpQyjIv/ghskC0f+zUEsSUhDwQANdwLNqDLAvTA== -TestMailbox 00000180d7768ab1dc01ff504e887c62 W/fF0WitpxJ05yHeOv96BlpGymT1kVOjkIW00t9e6UE7mxkvNflu9cZSCd8PDJd2ymC0sC9bLVFAXKmNZsmCFEEHMQSyrX61qTYo4KFCZMp5zm6fXubaYuurrzjXzfUP/R7kBvICFZlF0daf0SwX -TestMailbox 00000180d7768aba629c7ad6adf25228 IPzYGNsSepCX2AEnee/1Eas9a3c5esPSmrNkvaj4XcFb6Ft2KC8N6ubUR3wB+K0oYCTQym6nhHG5dlAxf6NRu7Rk8YtBTBmSqtGqd6kMZ3bU5b8= -TestMailbox 00000180d7768ac1870cda61784114d4 aaLiaWxfx1mxh6aoKE3xUUfZWhivZ/K7ixabflFDW7FO/qbpvCaa+Y6w4lQemTy6m+leAhXGN+Dbyv2qP20yJ9O4oJF5d3Lz5Iv5uF18OxhVZzw= -TestMailbox 00000180d776e4fb294ccdab2612b406 EtUPrLgEeOyab2QRnSie4I3Me9dDh10UdwWnUKdGa/8ezMJDtiy7XlW+tUfJdqtu6Vj7nduT0emDOXbBZsNwlcmzgYNwuNu3I9AfhZTFWtwLgB+wnAgB/jim82DDrJfLia8kB2eA2ao5jfJ3uMSZ -TestMailbox 00000180d776e501528546d340490291 Lz4Z9wCTk1lZ86lL01urhAan4oHcr1NBqdRe+CDpA51D9IncA5+Fhc8I6knUIh2qQ5/woWgISLAVwzSS+0+TxrYoqxf5FumIQtUJfwDER5La3n0= -TestMailbox 00000180d776e509b68fdc5c376d0abc RUGE2xB3fFX/wRH/p2fHIUa+rMaXSRd7fY9zglw0pRfVPqJfpniOjAe4GHIwGlwbwjtFOwS5a+Q7yr0Wez6QwD+ohhqRFKpbjcFcN7VfMyVAf+k= -TestMailbox 00000180d7784b987a8ad8106dc400c9 K+0LVEtBbTnWNS67jy9DtTvQyd5arovduvu490tLOE2TzVhuVoF4pfvTMTN12bH3KwEAHeDfuwKkKJFqldOywouTYPzEjZFkJzyagHrkl6dfnE5CqmlDv+Vc5TOQRskxjW+wQiZdjU8wGiBiBGYh -TestMailbox 00000180d7784bede69ac3cff2c6b724 XMFY3+b1r1//uolVz80JSI3g/84XCk3Tm7/S0BFv+Qe/Xv3/poLrOvAKEe+GzD2s22j8p/T2RXR/JSZckzgjEZeO0wbPDXVQd94di2Pff7jxAH8= -TestMailbox 00000180d7784bffe2595abe7ed81858 QQZhF+7wSHfikoAp93a+UY/XDIX7TVnnVYOtmQ2XHnDKA2F6snRJCPbYBO4IRHCRfVrjDGi32c41it2C3Mu5PBepabxapsW1rfIV3rlX2lkKHtI= -TestMailbox 00000180d77a7fb3f01dbb147c20cf7f IHOlOa1JI11RUKVvQUq3HQPxiRr4UCeE+pHmL8DtNMkOh62V4spuP0VvvQTJCQcPQ1EQR/QcxZ3s7uHLkrZAHF30BkpUkGqsLBWpnyug/puhdiixWsMyLLb6G90zFjiComUwptnDc/CCXtGEHdSW -TestMailbox 00000180d77a7fbb54b100f521ceb347 Ze4KyyTCgrYbZlXlJSY5hNob8sMXvBAmwIx2cADbX5P0M1IHXwXfloEzvvd6WYOtatFC2GnDSrmQ6RdCfeZ3WV9TZilqa0Fv0XEg48sVyVCcguw= -TestMailbox 00000180d77a7fe68f4f76e3b45aa751 cJJVvvRzTVNKUaIHPCCDY2uY7/HlmkxGgo3ozWBlBSRDeBqU65zgZD3QIPCxa6xaqB/Gc0bQ9BGzfU0cvVmO5jgNeeDnbqqs3oeA2jml/Qv2YO9upApfNQtDT1GiwJ8vrgaIow== -TestMailbox 00000180d8e513d3ea58c679a13178ac Ce5su2YOxNmTzk2dK8SX8V/Uue5uAC7oklEjhesY9wCMqGphhOkdWjzCqq0xOzcb/ZzzZ58t+mTksNSYIU4kddHIHBFPgqIwKthVk2mlUdqYiN/Y2vEGqv+YmtKY+GST/7Ee87ZHpU/5sv0GoXxT -TestMailbox 00000180d8e5145a23f8faee86283900 sp3D8xFZcM9icNlDJXIUDJb3mo6VGD9f1aDHD+4RbPdx6mTYF+qNTsPHKCxHHxT/9NfNe8XPg2+8xYRtm7SXfgERZBDB8ye+Xt3fM1k+wbL6RsaJmDHVECeXeL5KHuITzpI22A== -TestMailbox 00000180d8e51465c38f0585f9bb760e FF0VId2O/bBNzYD5ABWReMs5hHoHwynOoJRKj9vyaUMZ3JykInFmvvRgtCbJBDjTQPwPU8apphKQfwuicO76H7GtZqH009Cbv5l8ZTRJKrmzOQmtjzBQc2eGEUMPfbml5t0GCg== -``` - -The timestamp of a checkpoint corresponds to the timestamp of the first operation NOT included in the checkpoint. -In other words, to reconstruct the final state: - -- find timestamp `` of last checkpoint -- load checkpoint `` -- load and apply all operations starting from ``, included - -## UID index - -The UID index is an application of the Bayou storage module -used to assign UID numbers to e-mails. -See document we sent to NGI for properties on UIDVALIDITY. - - diff --git a/doc/src/imap_uid.md b/doc/src/imap_uid.md deleted file mode 100644 index ecdd52b..0000000 --- a/doc/src/imap_uid.md +++ /dev/null @@ -1,203 +0,0 @@ -# IMAP UID proof - -**Notations** - -- $h$: the hash of a message, $\mathbb{H}$ is the set of hashes -- $i$: the UID of a message $(i \in \mathbb{N})$ -- $f$: a flag attributed to a message (it's a string), we write - $\mathbb{F}$ the set of possible flags -- if $M$ is a map (aka a dictionnary), if $x$ has no assigned value in - $M$ we write $M [x] = \bot$ or equivalently $x \not\in M$. If $x$ has a value - in the map we write $x \in M$ and $M [x] \neq \bot$ - -**State** - -- A map $I$ such that $I [h]$ is the UID of the message whose hash is - $h$ is the mailbox, or $\bot$ if there is no such message - -- A map $F$ such that $F [h]$ is the set of flags attributed to the - message whose hash is $h$ - -- $v$: the UIDVALIDITY value - -- $n$: the UIDNEXT value - -- $s$: an internal sequence number that is mostly equal to UIDNEXT but - also grows when mails are deleted - -**Operations** - - - MAIL\_ADD$(h, i)$: the value of $i$ that is put in this operation is - the value of $s$ in the state resulting of all already known operations, - i.e. $s (O_{gen})$ in the notation below where $O_{gen}$ is - the set of all operations known at the time when the MAIL\_ADD is generated. - Moreover, such an operation can only be generated if $I (O_{gen}) [h] - = \bot$, i.e. for a mail $h$ that is not already in the state at - $O_{gen}$. - - - MAIL\_DEL$(h)$ - - - FLAG\_ADD$(h, f)$ - - - FLAG\_DEL$(h, f)$ - -**Algorithms** - - -**apply** MAIL\_ADD$(h, i)$: -   *if* $i < s$: -     $v \leftarrow v + s - i$ -   *if* $F [h] = \bot$: -     $F [h] \leftarrow F_{initial}$ -  $I [h] \leftarrow s$ -  $s \leftarrow s + 1$ -  $n \leftarrow s$ - -**apply** MAIL\_DEL$(h)$: -   $I [h] \leftarrow \bot$ -  $F [h] \leftarrow \bot$ -  $s \leftarrow s + 1$ - -**apply** FLAG\_ADD$(h, f)$: -   *if* $h \in F$: -     $F [h] \leftarrow F [h] \cup \{ f \}$ - -**apply** FLAG\_DEL$(h, f)$: -   *if* $h \in F$: -     $F [h] \leftarrow F [h] \backslash \{ f \}$ - - -**More notations** - -- $o$ is an operation such as MAIL\_ADD, MAIL\_DEL, etc. $O$ is a set of - operations. Operations embed a timestamp, so a set of operations $O$ can be - written as $O = [o_1, o_2, \ldots, o_n]$ by ordering them by timestamp. - -- if $o \in O$, we write $O_{\leqslant o}$, $O_{< o}$, $O_{\geqslant - o}$, $O_{> o}$ the set of items of $O$ that are respectively earlier or - equal, strictly earlier, later or equal, or strictly later than $o$. In - other words, if we write $O = [o_1, \ldots, o_n]$, where $o$ is a certain - $o_i$ in this sequence, then: -$$ -\begin{aligned} -O_{\leqslant o} &= \{ o_1, \ldots, o_i \}\\ -O_{< o} &= \{ o_1, \ldots, o_{i - 1} \}\\ -O_{\geqslant o} &= \{ o_i, \ldots, o_n \}\\ -O_{> o} &= \{ o_{i + 1}, \ldots, o_n \} -\end{aligned} -$$ - -- If $O$ is a set of operations, we write $I (O)$, $F (O)$, $n (O), s - (O)$, and $v (O)$ the values of $I, F, n, s$ and $v$ in the state that - results of applying all of the operations in $O$ in their sorted order. (we - thus write $I (O) [h]$ the value of $I [h]$ in this state) - -**Hypothesis:** -An operation $o$ can only be in a set $O$ if it was -generated after applying operations of a set $O_{gen}$ such that -$O_{gen} \subset O$ (because causality is respected in how we deliver -operations). Sets of operations that do not respect this property are excluded -from all of the properties, lemmas and proofs below. - -**Simplification:** We will now exclude FLAG\_ADD and FLAG\_DEL -operations, as they do not manipulate $n$, $s$ and $v$, and adding them should -have no impact on the properties below. - -**Small lemma:** If there are no FLAG\_ADD and FLAG\_DEL operations, -then $s (O) = | O |$. This is easy to see because the possible operations are -only MAIL\_ADD and MAIL\_DEL, and both increment the value of $s$ by 1. - -**Defnition:** If $o$ is a MAIL\_ADD$(h, i)$ operation, and $O$ is a -set of operations such that $o \in O$, then we define the following value: -$$ -C (o, O) = s (O_{< o}) - i -$$ -We say that $C (o, O)$ is the *number of conflicts of $o$ in $O$*: it -corresponds to the number of operations that were added before $o$ in $O$ that -were not in $O_{gen}$. - -**Property:** - -We have that: - -$$ -v (O) = \sum_{o \in O} C (o, O) -$$ - -Or in English: $v (O)$ is the sum of the number of conflicts of all of the -MAIL\_ADD operations in $O$. This is easy to see because indeed $v$ is -incremented by $C (o, O)$ for each operation $o \in O$ that is applied. - - -**Property:** - If $O$ and $O'$ are two sets of operations, and $O \subseteq O'$, then: - -$$ -\begin{aligned} -\forall o \in O, \qquad C (o, O) \leqslant C (o, O') -\end{aligned} -$$ - -This is easy to see because $O_{< o} \subseteq O'_{< o}$ and $C (o, O') - C - (o, O) = s (O'_{< o}) - s (O_{< o}) = | O'_{< o} | - | O_{< o} | \geqslant - 0$ - -**Theorem:** - -If $O$ and $O'$ are two sets of operations: - -$$ -\begin{aligned} -O \subseteq O' & \Rightarrow & v (O) \leqslant v (O') -\end{aligned} -$$ - -**Proof:** - -$$ -\begin{aligned} -v (O') &= \sum_{o \in O'} C (o, O')\\ - & \geqslant \sum_{o \in O} C (o, O') \qquad \text{(because $O \subseteq - O'$)}\\ - & \geqslant \sum_{o \in O} C (o, O) \qquad \text{(because $\forall o \in - O, C (o, O) \leqslant C (o, O')$)}\\ - & \geqslant v (O) -\end{aligned} -$$ - -**Theorem:** - -If $O$ and $O'$ are two sets of operations, such that $O \subset O'$, - -and if there are two different mails $h$ and $h'$ $(h \neq h')$ such that $I - (O) [h] = I (O') [h']$ - - then: - $$v (O) < v (O')$$ - -**Proof:** - -We already know that $v (O) \leqslant v (O')$ because of the previous theorem. -We will now look at the sum: -$$ -v (O') = \sum_{o \in O'} C (o, O') -$$ -and show that there is at least one term in this sum that is strictly larger -than the corresponding term in the other sum: -$$ -v (O) = \sum_{o \in O} C (o, O) -$$ -Let $o$ be the last MAIL\_ADD$(h, \_)$ operation in $O$, i.e. the operation -that gives its definitive UID to mail $h$ in $O$, and similarly $o'$ be the -last MAIL\_ADD($h', \_$) operation in $O'$. - -Let us write $I = I (O) [h] = I (O') [h']$ - -$o$ is the operation at position $I$ in $O$, and $o'$ is the operation at -position $I$ in $O'$. But $o \neq o'$, so if $o$ is not the operation at -position $I$ in $O'$ then it has to be at a later position $I' > I$ in $O'$, -because no operations are removed between $O$ and $O'$, the only possibility -is that some other operations (including $o'$) are added before $o$. Therefore -we have that $C (o, O') > C (o, O)$, i.e. at least one term in the sum above -is strictly larger in the first sum than in the second one. Since all other -terms are greater or equal, we have $v (O') > v (O)$. diff --git a/doc/src/index.md b/doc/src/index.md deleted file mode 100644 index 9d8f910..0000000 --- a/doc/src/index.md +++ /dev/null @@ -1,22 +0,0 @@ -# Introduction - -

- A scan of an Aerogramme dating from 1955 -
-[ Documentation -| Git repository -] -
-stability status: technical preview (do not use in production) -

- -Aerogramme is an open-source **IMAP server** targeted at **distributed** infrastructures and written in **Rust**. -It is designed to be resilient, easy to operate and private by design. - -**Resilient** - Aerogramme is built on top of Garage, a (geographically) distributed object storage software. Aerogramme thus inherits Garage resiliency: its mailboxes are spread on multiple distant regions, regions can go offline while keeping mailboxes available, storage nodes can be added or removed on the fly, etc. - -**Easy to operate** - Aerogramme mutualizes the burden of data management by storing all its data in an object store and nothing on the local filesystem or any relational database. It can be seen as a proxy between the IMAP protocol and Garage protocols (S3 and K2V). It can thus be freely moved between machines. Multiple instances can also be run in parallel. - -**Private by design** - As emails are very sensitive, Aerogramme encrypts users' mailboxes with their passwords. Data is decrypted in RAM upon user login: the Garage storage layer handles only encrypted blobs. It is even possible to run locally Aerogramme while connecting it to a remote, third-party, untrusted Garage provider; in this case clear text emails never leak outside of your computer. - -Our main use case is to provide a modern email stack for autonomously hosted communities such as [Deuxfleurs](https://deuxfleurs.fr). More generally, we want to set new standards in term of email ethic by lowering the bar to become an email provider while making it harder to spy users' emails. diff --git a/doc/src/installation.md b/doc/src/installation.md deleted file mode 100644 index 7f722e7..0000000 --- a/doc/src/installation.md +++ /dev/null @@ -1,25 +0,0 @@ -# Installation - -Install a Rust nightly toolchain: [go to Rustup](https://rustup.rs/). - -Install and deploy a Garage cluster: [go to Garage documentation](https://garagehq.deuxfleurs.fr/documentation/quick-start/). Make sure that you download a binary that supports K2V. Currently, you will find them in the "Extra build" section of the Download page. - -Clone Aerogramme's repository: - -```bash -git clone https://git.deuxfleurs.fr/Deuxfleurs/aerogramme/ -``` - -Compile Aerogramme: - -```bash -cargo build -``` - -Check that your compiled binary works: - -```bash -cargo run -``` - -You are now ready to [setup Aerogramme!](./setup.md) diff --git a/doc/src/log.md b/doc/src/log.md deleted file mode 100644 index f29ecee..0000000 --- a/doc/src/log.md +++ /dev/null @@ -1,149 +0,0 @@ -# Mutation Log - - -Back to our data structure, we note that one major challenge with this project is to *correctly* handle mutable data. -With our current design, multiple processes can interact with the same mutable data without coordination, and we need a way to detect and solve conflicts. -Directly storing the result in a single k2v key would not work as we have no transaction or lock mechanism, and our state would be always corrupted. -Instead, we choose to record an ordered log of operations, ie. transitions, that each client can use locally to rebuild the state, each transition has its own immutable identifier. -This technique is sometimes referred to as event sourcing. - -With this system, we can't have conflict anymore at Garage level, but conflicts at the IMAP level can still occur, like 2 processes assigning the same identifier to different emails. -We thus need a logic to handle these conflicts that is flexible enough to accommodate the application's specific logic. - -Our solution is inspired by the work conducted by Terry et al. on [Bayou](https://dl.acm.org/doi/10.1145/224056.224070). -Clients fetch regularly the log from Garage, each entry is ordered by a timestamp and a unique identifier. -One of the 2 conflicting clients will be in the state where it has executed a log entry in the wrong order according to the specified ordering. -This client will need to roll back its changes to reapply the log in the same order as the others, and on conflicts, the same logic will be applied by all the clients to get, in the end, the same state. - -**Command definitions** - -The log is made of a sequence of ordered commands that can be run to get a deterministic state in the end. -We define the following commands: - -`FLAG_ADD ` - Add a flag to the target email -`FLAG_DEL ` - Remove a flag from a target email -`MAIL_DEL ` - Remove an email -`MAIL_ADD ` - Register an email in the mailbox with the given identifier -`REMOTE ` - Command is not directly stored here, instead it must be fetched from S3, see batching to understand why. - -*Note: FLAG commands could be enhanced with a MODSEQ field similar to the uid field for the emails, in order to implement IMAP RFC4551. Adding this field would force us to handle conflicts on flags -the same way as on emails, as MODSEQ must be monotonically incremented but is reset by a uid-validity change. This is out of the scope of this document.* - -**A note on UUID** - -When adding an email to the system, we associate it with a *universally unique identifier* or *UUID.* -We can then reference this email in the rest of the system without fearing a conflict or a race condition are we are confident that this UUID is unique. - -We could have used the email hash instead, but we identified some benefits in using UUID. -First, sometimes a mail must be duplicated, because the user received it from 2 different sources, so it is more correct to have 2 entries in the system. -Additionally, UUIDs are smaller and better compressible than a hash, which will lead to better performances. - -**Batching commands** - -Commands that are executed at the same time can be batched together. -Let's imagine a user is deleting its trash containing thousands of emails. -Instead of writing thousands of log lines, we can append them in a single entry. -If this entry becomes big (eg. > 100 commands), we can store it to S3 with the `REMOTE` command. -Batching is important as we want to keep the number of log entries small to be able to fetch them regularly and quickly. - -## Fixing conflicts in the operation log - -The log is applied in order from the last checkpoint. -To stay in sync, the client regularly asks the server for the last commands. - -When the log is applied, our system must enforce the following invariants: - -- For all emails e1 and e2 in the log, such as e2.order > e1.order, then e2.uid > e1.uid - -- For all emails e1 and e2 in the log, such as e1.uuid == e2.uuid, then e1.order == e2.order - -If an invariant is broken, the conflict is solved with the following algorithm and the `uidvalidity` value is increased. - - -```python -def apply_mail_add(uuid, imap_uid): - if imap_uid < internalseq: - uidvalidity += internalseq - imap_uid - mails.insert(uuid, internalseq, flags=["\Recent"]) - internalseq = internalseq + 1 - uidnext = internalseq - -def apply_mail_del(uuid): - mails.remove(uuid) - internalseq = internalseq + 1 -``` - -A mathematical demonstration in Appendix D. shows that this algorithm indeed guarantees that under the same `uidvalidity`, different e-mails cannot share the same IMAP UID. - -To illustrate, let us imagine two processes that have a first operation A in common, and then had a divergent state when one applied an operation B, and another one applied an operation C. For process 1, we have: - -```python -# state: uid-validity = 1, uid_next = 1, internalseq = 1 -(A) MAIL_ADD x 1 -# state: uid-validity = 1, x = 1, uid_next = 2, internalseq = 2 -(B) MAIL_ADD y 2 -# state: uid-validity = 1, x = 1, y = 2, uid_next = 3, internalseq = 3 -``` - -And for process 2 we have: - -```python -# state: uid-validity = 1, uid_next = 1, internalseq = 1 -(A) MAIL_ADD x 1 -# state: uid-validity = 1, x = 1, uid_next = 2, internalseq = 2 -(C) MAIL_ADD z 2 -# state: uid-validity = 1, x = 1, z = 2, uid_next = 3, internalseq = 3 -``` - -Suppose that a new client connects to one of the two processes after the conflicting operations have been communicated between them. They may have before connected either to process 1 or to process 2, so they might have observed either mail `y` or mail `z` with UID 2. The only way to make sure that the client will not be confused about mail UIDs is to bump the uidvalidity when the conflict is solved. This is indeed what happens with our algorithm: for both processes, once they have learned of the other's conflicting operation, they will execute the following set of operations and end in a deterministic state: - -```python -# state: uid-validity = 1, uid_next = 1, internalseq = 1 -(A) MAIL_ADD x 1 -# state: uid-validity = 1, x = 1, uid_next = 2, internalseq = 2 -(B) MAIL_ADD y 2 -# state: uid-validity = 1, x = 1, y = 2, uid_next = 3, internalseq = 3 -(C) MAIL_ADD z 2 -# conflict detected ! -# state: uid-validity = 2, x = 1, y = 2, z = 3, uid_next = 4, internalseq = 4 -``` - -## A computed state for efficient requests - -From a data structure perspective, a list of commands is very inefficient to get the current state of the mailbox. -Indeed, we don't want an `O(n)` complexity (where `n` is the number of log commands in the log) each time we want to know how many emails are stored in the mailbox. - -To address this issue, and thus query the mailbox efficiently, the MDA keeps an in-memory computed version of the logs, ie. the computed state. - -**Mapping IMAP identifiers to email identifiers with B-Tree** - -Core features of IMAP are synchronization and listing of emails. -Its associated command is `FETCH`, it has 2 parameters, a range of `uid` (or `seq`) and a filter. -For us, it means that we must be able to efficiently select a range of emails by their identifier, otherwise the user experience will be bad, and compute resources will be wasted. - -We identified that by using an ordered map based on a B-Tree, we can satisfy this requirement in an optimal manner. -For example, Rust defines a [BTreeMap](https://doc.rust-lang.org/std/collections/struct.BTreeMap.html) object in its standard library. -We define the following structure for our mailbox: - -```rust -struct mailbox { - emails: BTreeMap, - flags: BTreeMap>, - name: String, - uid_next: u32, - uid_validity: u32, - /* other fields */ -} -``` - -This data structure allows us to efficiently select a range of emails by their identifier by walking the tree, allowing the server to be responsive to syncronisation request from clients. - -**Checkpoints** - -Having an in-memory computed state does not solve all the problems of operation on a log only, as 1) bootstrapping a fresh client is expensive as we have to replay possibly thousand of logs, and 2) logs would be kept indefinitely, wasting valuable storage resources. - -As a solution to these limitations, the MDA regularly checkpoints the in-memory state. More specifically, it serializes it (eg. with MessagePack), compresses it (eg. with zstd), and then stores it on Garage through the S3 API. -A fresh client would then only have to download the latest checkpoint and the range of logs between the checkpoint and now, allowing swift bootstraping while retaining all of the value of the log model. - -Old logs and old checkpoints can be garbage collected after a few days for example as long as 1) the most recent checkpoint remains, 2) that all the logs after this checkpoint remain and 3) that we are confident enough that no log before this checkpoint will appear in the future. - diff --git a/doc/src/mailbox.md b/doc/src/mailbox.md deleted file mode 100644 index 02d0e5a..0000000 --- a/doc/src/mailbox.md +++ /dev/null @@ -1,56 +0,0 @@ -# Mailboxes - -IMAP servers, at their root, handle mailboxes. -In this document, we explain the domain logic of IMAP and how we map it to Garage data -with Aerogramme. - -## IMAP Domain Logic - -The main specification of IMAP is defined in [RFC3501](https://datatracker.ietf.org/doc/html/rfc3501). -It defines 3 main objects: Mailboxes, Emails, and Flags. The following figure depicts how they work together: - -![An IMAP mailbox schema](./mailbox.png) - -Emails are stored ordered inside the mailbox, and for legacy reasons, the mailbox assigns 2 identifiers to each email we name `uid` and `seq`. - -`seq` is the legacy identifier, it numbers messages in a sequence. Each time an email is deleted, the message numbering will change to keep a continuous sequence without holes. -While this numbering is convenient for interactive operations, it is not efficient to synchronize mail locally and quickly detect missing new emails. - -To solve this problem, `uid` identifiers were introduced later. They are monotonically increasing integers that must remain stable across time and sessions: when an email is deleted, its identifier is never reused. -This is what Thunderbird uses for example when it synchronizes its mailboxes. - -If this ordering cannot be kept, for example because two independent IMAP daemons were adding an email to the same mailbox at the same time, it is possible to change the ordering as long as we change a value named `uid-validity` to trigger a full resynchronization of all clients. As this operation is expensive, we want to minimize the probability of having to trigger a full resynchronization, but in practice, having this recovery mechanism simplifies the operation of an IMAP server by providing a rather simple solution to rare collision situations. - -Flags are tags put on an email, some are defined at the protocol level, like `\Recent`, `\Deleted` or `\Seen`, which can be assigned or removed directly by the IMAP daemon. -Others can be defined arbitrarily by the client, for which the MUA will apply its own logic. -There is no mechanism in RFC3501 to synchronize flags between MUA besides listing the flags of all the emails. - -IMAP has many extensions, such as [RFC5465](https://www.rfc-editor.org/rfc/rfc5465.html) or [RFC7162](https://datatracker.ietf.org/doc/html/rfc7162). -They are referred to as capabilities and are [referenced by the IANA](https://www.iana.org/assignments/imap-capabilities/imap-capabilities.xhtml). -For this project, we are aiming to implement only IMAP4rev1 and no extension at all. - - -## Aerogramme Implementation - -From a high-level perspective, we will handle _immutable_ emails differently from _mutable_ mailboxes and flags. -Immutable data can be stored directly on Garage, as we do not fear reading an outdated value. -For mutable data, we cannot store them directly in Garage. -Instead, we choose to store a log of operations. Each client then applies this log of operation locally to rebuild its local state. - -During this design phase, we noted that the S3 API semantic was too limited for us, so we introduced a second API, K2V, to have more flexibility. -K2V is designed to store and fetch small values in batches, it uses 2 different keys: one to spread the data on the cluster (`P`), and one to sort linked data on the same node (`S`). -Having data on the same node allows for more efficient queries among this data. - -For performance reasons, we plan to introduce 2 optimizations. -First, we store an email summary in K2V that allows fetching multiple entries at once. -Second, we also store checkpoints of the logs in S3 to avoid keeping and replaying all the logs each time a client starts a session. -We have the following data handled by Garage: - -![Aerogramme Datatypes](./aero-states.png) - -In Garage, it is important to carefully choose the key(s) that are used to store data to have fast queries, we propose the following model: - -![Aerogramme Key Choice](./aero-states2.png) - - - diff --git a/doc/src/mailbox.png b/doc/src/mailbox.png deleted file mode 100644 index 038e3ac..0000000 Binary files a/doc/src/mailbox.png and /dev/null differ diff --git a/doc/src/mutt_mail.png b/doc/src/mutt_mail.png deleted file mode 100644 index e8d04e4..0000000 Binary files a/doc/src/mutt_mail.png and /dev/null differ diff --git a/doc/src/mutt_mb.png b/doc/src/mutt_mb.png deleted file mode 100644 index d1bafaf..0000000 Binary files a/doc/src/mutt_mb.png and /dev/null differ diff --git a/doc/src/notes.md b/doc/src/notes.md deleted file mode 100644 index 3a4c954..0000000 --- a/doc/src/notes.md +++ /dev/null @@ -1,42 +0,0 @@ -# Notes - -An IMAP trace extracted from Aerogramme: - -``` -S: * OK Hello -C: A1 LOGIN alan p455w0rd -S: A1 OK Completed -C: A2 SELECT INBOX -S: * 0 EXISTS -S: * 0 RECENT -S: * FLAGS (\Seen \Answered \Flagged \Deleted \Draft) -S: * OK [PERMANENTFLAGS (\Seen \Answered \Flagged \Deleted \Draft \*)] Flags permitted -S: * OK [UIDVALIDITY 1] UIDs valid -S: * OK [UIDNEXT 1] Predict next UID -S: A2 OK [READ-WRITE] Select completed -C: A3 NOOP -S: A3 OK NOOP completed. - <---- e-mail arrives through LMTP server ----> -C: A4 NOOP -S: * 1 EXISTS -S: A4 OK NOOP completed. -C: A5 FETCH 1 FULL -S: * 1 FETCH (UID 1 FLAGS () INTERNALDATE "06-Jul-2022 14:46:42 +0000" - RFC822.SIZE 117 ENVELOPE (NIL "test" (("Alan Smith" NIL "alan" "smith.me")) - NIL NIL (("Alan Smith" NIL "alan" "aerogramme.tld")) NIL NIL NIL NIL) - BODY ("TEXT" "test" NIL "test" "test" "test" 1 1)) -S: A5 OK FETCH completed -C: A6 FETCH 1 (RFC822) -S: * 1 FETCH (UID 1 RFC822 {117} -S: Subject: test -S: From: Alan Smith -S: To: Alan Smith -S: -S: Hello, world! -S: . -S: ) -S: A6 OK FETCH completed -C: A7 LOGOUT -S: * BYE Logging out -S: A7 OK Logout completed -``` diff --git a/doc/src/overview.md b/doc/src/overview.md deleted file mode 100644 index ca75a29..0000000 --- a/doc/src/overview.md +++ /dev/null @@ -1,61 +0,0 @@ -# Overview - -Aérogramme stands at the interface between the Garage storage server, and the user's e-mail client. It provides regular IMAP access on the client-side, and stores encrypted e-mail data on the server-side. Aérogramme also provides an LMTP server interface through which incoming mail can be forwarded by the MTA (e.g. Postfix). - -
-Aerogramme components -
-Figure 1: Aérogramme, our IMAP daemon, stores its data encrypted in Garage and provides regular IMAP access to mail clients
- - -**Overview of architecture** - -Figure 2 below shows an overview of Aérogramme's architecture. Each user has a personal Garage bucket in which to store their mailbox contents. We will document below the details of the components that make up Aérogramme, but let us first provide a high-level overview. The two main classes, `User` and `Mailbox`, define how data is stored in this bucket, and provide a high-level interface with primitives such as reading the message index, loading a mail's content, copying, moving, and deleting messages, etc. This mail storage system is supported by two important primitives: a cryptography management system that provides encryption keys for user's data, and a simple log-like database system inspired by Bayou [1] which we have called Bay, that we use to store the index of messages in each mailbox. The mail storage system is made accessible to the outside world by two subsystems: an LMTP server that allows for incoming mail to be received and stored in a user's bucket, in a staging area, and the IMAP server itself which allows full-fledged manipulation of mailbox data by users. - -
-Aerogramme internals -Figure 2: Overview of Aérogramme's architecture and internal data structures for a given user, Alice
- - -**Cryptography** - -Our cryptography module is taking care of: authenticating users against a data source (using their IMAP login and password), returning a set of credentials that allow read/write access to a Garage bucket, as well as a set of secret encryption keys used to encrypt and decrypt data stored in the bucket. -The cryptography module makes use of the user's authentication password as a passphrase to decrypt the user's secret keys, which are stored in the user's bucket in a dedicated K2V section. - -This module can use either of two data sources for user authentication: - -- LDAP, in which case the password (which is also the passphrase for decrypting the user's secret keys) must match the LDAP password of the user. -- Static, in which case the users are statically declared in Aérogramme's configuration file, and can have any password. - -The static authentication source can be used in a deployment scenario shown in Figure 3, where Aérogramme is not running on the side of the service provider, but on the user's device itself. In this case, the user can use any password to encrypt their data in the bucket; the only credentials they need for authentication against the service provider are the S3 and K2V API access keys. - -
-user side encryption -
-Figure 3: alternative deployment of Aérogramme on the user's device: the service provider never gets access to the plaintext data.
- -The cryptography module also has a "public authentication" method, which allows the LMTP module to retrieve only a public key for the user to write incoming messages to the user's bucket but without having access to all of the existing encrypted data. - -The cryptography module of Aérogramme is based on standard cryptographic primitives from `libsodium` and follows best practices in the domain. - -**Bay, a simplification of Bayou** - -In our last milestone report, we described how we intended to implement the message index for IMAP mailboxes, based on an eventually-consistent log-like data structure. The principles of this system have been established in Bayou in 1995 [1], allowing users to use a weakly-coordinated datastore to exchange data and solve write conflicts. Bayou is based on a sequential specification, which defines the action that operations in the log have on the shared object's state. To handle concurrent modification, Bayou allows for log entries to be appended in non-sequential order: in case a process reads a log entry that was written earlier by another process, it can rewind its execution of the sequential specification to the point where the newly acquired operation should have been executed, and then execute the log again starting from this point. The challenge then consists in defining a sequential specification that provides the desired semantics for the application. In our last milestone report (milestone 3.A), we described a sequential specification that solves the UID assignment problem in IMAP and proved it correct. We refer the reader to that document for more details. - -For milestone 3B, we have implemented our customized version of Bayou, which we call Bay. Bay implements the log-like semantics and the rewind ability of Bayou, however, it makes use of a much simpler data system: Bay is not operating on a relational database that is stored on disk, but simply on a data structure in RAM, for which a full checkpoint is written regularly. We decided against using a complex database as we observed that the expected size of the data structures we would be handling (the message indexes for each mailbox) wouldn't be so big most of the time, and having a full copy in RAM was perfectly acceptable. This allows for a drastic simplification in comparison to the proposal of the original Bayou paper [1]. On the other side, we added encryption in Bay so that both log entries and checkpoints are stored encrypted in Garage using the user's secret key, meaning that a malicious Garage administrator cannot read the content of a user's mailbox index. - -**LMTP server and incoming mail handler** - -To handle incoming mail, we had to add a simple LMTP server to Aérogramme. This server uses the public authentication method of the cryptography module to retrieve a set of public credentials (in particular, a public key for asymmetric encryption) for storing incoming messages. The incoming messages are stored in their raw RFC822 form (encrypted) in a specific folder of the Garage bucket called `incoming/`. When a user logs in with their username and password, at which time Aérogramme can decrypt the user's secret keys, a special process is launched that watches the incoming folder and moves these messages to the `INBOX` folder. This task can only be done by a process that knows the user's secret keys, as it has to modify the mailbox index of the `INBOX` folder, which is encrypted using the user's secret keys. In later versions of Aérogramme, this process would be the perfect place to implement mail filtering logic using user-specified rules. These rules could be stored in a dedicated section of the bucket, again encrypted with the user's secret keys. - -To implement the LMTP server, we chose to make use of the `smtp-server` crate from the [Kannader](https://github.com/Ekleog/kannader) project (an MTA written in Rust). The `smtp-server` crate had all of the necessary functionality for building SMTP servers, however, it did not handle LMTP. As LMTP is extremely close to SMTP, we were able to extend the `smtp-server` module to allow it to be used for the implementation of both SMTP and LMTP servers. Our work has been proposed as a [pull request](https://github.com/Ekleog/kannader/pull/178) to be merged back upstream in Kannader, which should be integrated soon. - -**IMAP server** - -The last part that remains to build Aérogramme is to implement the logic behind the IMAP protocol and to link it with the mail storage primitives. We started by implementing a state machine that handled the transitions between the different states in the IMAP protocol: ANONYMOUS (before login), AUTHENTICATED (after login), and SELECTED (once a mailbox has been selected for reading/writing). In the SELECTED state, the IMAP session is linked to a given mailbox of the user. In addition, the IMAP server has to keep track of which updates to the mailbox it has sent (or not) to the client so that it can produce IMAP messages consistent with what the client believes to be in the mailbox. In particular, many IMAP commands make use of mail sequence numbers to identify messages, which are indices in the sorted array of all of the messages in the mailbox. However, if messages are added or removed concurrently, these sequence numbers change: hence we must keep a snapshot of the mailbox's index *as the client knows it*, which is not necessarily the same as what is _actually_ in the mailbox, to generate messages that the client will understand correctly. This snapshot is called a *mailbox view* and is synced regularly with the actual mailbox, at which time the corresponding IMAP updates are sent. This can be done only at specific moments when permitted by the IMAP protocol. - -The second part of this task consisted in implementing all of the IMAP protocol commands. Most are relatively straightforward, however, one command, in particular, needed special care: the FETCH command. The FETCH command in the IMAP protocol can return the contents of a message to the client. However, it must also understand precisely the semantics of the content of an e-mail message, as the client can specify very precisely how the message should be returned. For instance, in the case of a multipart message with attachments, the client can emit a FECTH command requesting only a certain attachment of the message to be returned, and not the whole message. To implement such semantics, we have based ourselves on the [`mail-parser`](https://docs.rs/mail-parser/latest/mail_parser/) crate, which can fully parse an RFC822-formatted e-mail message, and also supports some extensions such as MIME. To validate that we were correctly converting the parsed message structure to IMAP messages, we designed a test suite composed of several weirdly shaped e-mail messages, whose IMAP structure definition we extracted by taking Dovecot as a reference. We were then able to compare the output of Aérogramme on these messages with the reference consisting in what was returned by Dovecot. - -## References - -- [1] Terry, D. B., Theimer, M. M., Petersen, K., Demers, A. J., Spreitzer, M. J., & Hauser, C. H. (1995). Managing update conflicts in Bayou, a weakly connected replicated storage system. *ACM SIGOPS Operating Systems Review*, 29(5), 172-182. ([PDF](https://dl.acm.org/doi/pdf/10.1145/224057.224070)) diff --git a/doc/src/rfc.md b/doc/src/rfc.md deleted file mode 100644 index 5b42c92..0000000 --- a/doc/src/rfc.md +++ /dev/null @@ -1,3 +0,0 @@ -# RFC coverage - -*Not yet written* diff --git a/doc/src/setup.md b/doc/src/setup.md deleted file mode 100644 index f954ae3..0000000 --- a/doc/src/setup.md +++ /dev/null @@ -1,90 +0,0 @@ -# Setup - -You must start by creating a user profile in Garage. Run the following command after adjusting the parameters to your configuration: - -```bash -cargo run -- first-login \ - --region garage \ - --k2v-endpoint http://127.0.0.1:3904 \ - --s3-endpoint http://127.0.0.1:3900 \ - --aws-access-key-id GK... \ - --aws-secret-access-key c0ffee... \ - --bucket mailrage-me \ - --user-secret s3cr3t -``` - -*Note: user-secret is not the user's password. It is an additional secret used when deriving user's secret key from their password. The idea is that, even if user leaks their password, their encrypted data remain safe as long as this additional secret does not leak. You can generate it with openssl for example: `openssl rand -base64 30`. Read [Cryptography & key management](./crypt-key.md) for more details.* - - -The program will interactively ask you some questions and finally generates for you a snippet of configuration: - -``` -Please enter your password for key decryption. -If you are using LDAP login, this must be your LDAP password. -If you are using the static login provider, enter any password, and this will also become your password for local IMAP access. -Enter password: -Confirm password: - -Cryptographic key setup is complete. - -If you are using the static login provider, add the following section to your .toml configuration file: - -[login_static.users.] -password = "$argon2id$v=19$m=4096,t=3,p=1$..." -aws_access_key_id = "GK..." -aws_secret_access_key = "c0ffee..." -``` - -In this tutorial, we will use the static login provider (and not the LDAP one). -We will thus create a config file named `aerogramme.toml` in which we will paste the previous snippet. You also need to enter some other keys. In the end, your file should look like that: - -```toml -s3_endpoint = "http://127.0.0.1:3900" -k2v_endpoint = "http://127.0.0.1:3904" -aws_region = "garage" - -[lmtp] -bind_addr = "[::1]:12024" -hostname = "aerogramme.tld" - -[imap] -bind_addr = "[::1]:1993" - -[login_static] -default_bucket = "mailrage" - -[login_static.users.me] -bucket = "mailrage-me" -user_secret = "s3cr3t" -email_addresses = [ - "me@aerogramme.tld" -] - -# copy pasted values from first-login -password = "$argon2id$v=19$m=4096,t=3,p=1$..." -aws_access_key_id = "GK..." -aws_secret_access_key = "c0ffee..." -``` - -If you fear to loose your password, you can backup your key with the following command: - -```bash -cargo run -- show-keys \ - --region garage \ - --k2v-endpoint http://127.0.0.1:3904 \ - --s3-endpoint http://127.0.0.1:3900 \ - --aws-access-key-id GK... \ - --aws-secret-access-key c0ffee... \ - --bucket mailrage-me \ - --user-secret s3cr3t -``` - -You will then be asked for your key decryption password: - -``` -Enter key decryption password: -master_key = "..." -secret_key = "..." -``` - -You are now ready to [validate your installation](./validate.md). diff --git a/doc/src/validate.md b/doc/src/validate.md deleted file mode 100644 index 57903f6..0000000 --- a/doc/src/validate.md +++ /dev/null @@ -1,40 +0,0 @@ -# Validate - -Start a server as follow: - -```bash -cargo run -- server -``` - -Inject emails: - -```bash -./test/inject_emails.sh '' dxflrs -``` - -Now you can connect your mailbox with `mutt`. -Start by creating a config file, for example we used the following `~/.muttrc` file: - -```ini -set imap_user = quentin -set imap_pass = p455w0rd -set folder = imap://localhost:1993 -set spoolfile = +INBOX -set ssl_starttls = no -set ssl_force_tls = no -mailboxes = +INBOX -bind index G imap-fetch-mail -``` - -And then simply launch `mutt`. -The first time nothing will happen as Aerogramme must -process your incoming emails. Just ask `mutt` to refresh its -view by pressing `G` (for *Get*). - -Now, you should see some emails: - -![Screenshot of mutt mailbox](./mutt_mb.png) - -And you can read them: - -![Screenshot of mutt mail view](./mutt_mail.png) diff --git a/tests/behavior.rs b/tests/behavior.rs deleted file mode 100644 index 13baf0e..0000000 --- a/tests/behavior.rs +++ /dev/null @@ -1,357 +0,0 @@ -use anyhow::Context; - -mod common; -use crate::common::constants::*; -use crate::common::fragments::*; - -fn main() { - rfc3501_imap4rev1_base(); - rfc6851_imapext_move(); - rfc4551_imapext_condstore(); - rfc2177_imapext_idle(); - rfc5161_imapext_enable(); // 1 - rfc3691_imapext_unselect(); // 2 - rfc7888_imapext_literal(); // 3 - rfc4315_imapext_uidplus(); // 4 - rfc5819_imapext_liststatus(); // 5 - println!("✅ SUCCESS 🌟🚀🥳🙏🥹"); -} - -fn rfc3501_imap4rev1_base() { - println!("🧪 rfc3501_imap4rev1_base"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - connect(imap_socket).context("server says hello")?; - capability(imap_socket, Extension::None).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - create_mailbox(imap_socket, Mailbox::Archive).context("created mailbox archive")?; - let select_res = - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; - assert!(select_res.contains("* 0 EXISTS")); - - check(imap_socket).context("check must run")?; - status(imap_socket, Mailbox::Archive, StatusKind::UidNext) - .context("status of archive from inbox")?; - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; - noop_exists(imap_socket, 1).context("noop loop must detect a new email")?; - - let srv_msg = fetch( - imap_socket, - Selection::FirstId, - FetchKind::Rfc822, - FetchMod::None, - ) - .context("fetch rfc822 message, should be our first message")?; - let orig_email = std::str::from_utf8(EMAIL1)?; - assert!(srv_msg.contains(orig_email)); - - copy(imap_socket, Selection::FirstId, Mailbox::Archive) - .context("copy message to the archive mailbox")?; - append(imap_socket, Email::Basic).context("insert email in INBOX")?; - noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; - search(imap_socket, SearchKind::Text("OoOoO")).expect("search should return something"); - store( - imap_socket, - Selection::FirstId, - Flag::Deleted, - StoreAction::AddFlags, - StoreMod::None, - ) - .context("should add delete flag to the email")?; - expunge(imap_socket).context("expunge emails")?; - rename_mailbox(imap_socket, Mailbox::Archive, Mailbox::Drafts) - .context("Archive mailbox is renamed Drafts")?; - delete_mailbox(imap_socket, Mailbox::Drafts).context("Drafts mailbox is deleted")?; - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc3691_imapext_unselect() { - println!("🧪 rfc3691_imapext_unselect"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - connect(imap_socket).context("server says hello")?; - - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; - - capability(imap_socket, Extension::Unselect).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - let select_res = - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; - assert!(select_res.contains("* 0 EXISTS")); - - noop_exists(imap_socket, 1).context("noop loop must detect a new email")?; - store( - imap_socket, - Selection::FirstId, - Flag::Deleted, - StoreAction::AddFlags, - StoreMod::None, - ) - .context("add delete flags to the email")?; - unselect(imap_socket) - .context("unselect inbox while preserving email with the \\Delete flag")?; - let select_res = - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox again")?; - assert!(select_res.contains("* 1 EXISTS")); - - let srv_msg = fetch( - imap_socket, - Selection::FirstId, - FetchKind::Rfc822, - FetchMod::None, - ) - .context("message is still present")?; - let orig_email = std::str::from_utf8(EMAIL2)?; - assert!(srv_msg.contains(orig_email)); - - close(imap_socket).context("close inbox and expunge message")?; - let select_res = select(imap_socket, Mailbox::Inbox, SelectMod::None) - .context("select inbox again and check it's empty")?; - assert!(select_res.contains("* 0 EXISTS")); - - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc5161_imapext_enable() { - println!("🧪 rfc5161_imapext_enable"); - common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket| { - connect(imap_socket).context("server says hello")?; - login(imap_socket, Account::Alice).context("login test")?; - enable(imap_socket, Enable::Utf8Accept, Some(Enable::Utf8Accept))?; - enable(imap_socket, Enable::Utf8Accept, None)?; - logout(imap_socket)?; - - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc6851_imapext_move() { - println!("🧪 rfc6851_imapext_move"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - connect(imap_socket).context("server says hello")?; - - capability(imap_socket, Extension::Move).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - create_mailbox(imap_socket, Mailbox::Archive).context("created mailbox archive")?; - let select_res = - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; - assert!(select_res.contains("* 0 EXISTS")); - - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; - - noop_exists(imap_socket, 1).context("noop loop must detect a new email")?; - r#move(imap_socket, Selection::FirstId, Mailbox::Archive) - .context("message from inbox moved to archive")?; - - unselect(imap_socket) - .context("unselect inbox while preserving email with the \\Delete flag")?; - let select_res = - select(imap_socket, Mailbox::Archive, SelectMod::None).context("select archive")?; - assert!(select_res.contains("* 1 EXISTS")); - - let srv_msg = fetch( - imap_socket, - Selection::FirstId, - FetchKind::Rfc822, - FetchMod::None, - ) - .context("check mail exists")?; - let orig_email = std::str::from_utf8(EMAIL2)?; - assert!(srv_msg.contains(orig_email)); - - logout(imap_socket).context("must quit")?; - - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc7888_imapext_literal() { - println!("🧪 rfc7888_imapext_literal"); - common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket| { - connect(imap_socket).context("server says hello")?; - - capability(imap_socket, Extension::LiteralPlus).context("check server capabilities")?; - login_with_literal(imap_socket, Account::Alice).context("use literal to connect Alice")?; - - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc4551_imapext_condstore() { - println!("🧪 rfc4551_imapext_condstore"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - // Setup the test - connect(imap_socket).context("server says hello")?; - - // RFC 3.1.1 Advertising Support for CONDSTORE - capability(imap_socket, Extension::Condstore).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - - // RFC 3.1.8. CONDSTORE Parameter to SELECT and EXAMINE - let select_res = - select(imap_socket, Mailbox::Inbox, SelectMod::Condstore).context("select inbox")?; - // RFC 3.1.2 New OK Untagged Responses for SELECT and EXAMINE - assert!(select_res.contains("[HIGHESTMODSEQ 1]")); - - // RFC 3.1.3. STORE and UID STORE Commands - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; - lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; - noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; - let store_res = store( - imap_socket, - Selection::All, - Flag::Important, - StoreAction::AddFlags, - StoreMod::UnchangedSince(1), - )?; - assert!(store_res.contains("[MODIFIED 2]")); - assert!(store_res.contains("* 1 FETCH (FLAGS (\\Important) MODSEQ (3))")); - assert!(!store_res.contains("* 2 FETCH")); - assert_eq!(store_res.lines().count(), 2); - - // RFC 3.1.4. FETCH and UID FETCH Commands - let fetch_res = fetch( - imap_socket, - Selection::All, - FetchKind::Rfc822Size, - FetchMod::ChangedSince(2), - )?; - assert!(fetch_res.contains("* 1 FETCH (RFC822.SIZE 81 MODSEQ (3))")); - assert!(!fetch_res.contains("* 2 FETCH")); - assert_eq!(store_res.lines().count(), 2); - - // RFC 3.1.5. MODSEQ Search Criterion in SEARCH - let search_res = search(imap_socket, SearchKind::ModSeq(3))?; - // RFC 3.1.6. Modified SEARCH Untagged Response - assert!(search_res.contains("* SEARCH 1 (MODSEQ 3)")); - - // RFC 3.1.7 HIGHESTMODSEQ Status Data Items - let status_res = status(imap_socket, Mailbox::Inbox, StatusKind::HighestModSeq)?; - assert!(status_res.contains("HIGHESTMODSEQ 3")); - - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc2177_imapext_idle() { - println!("🧪 rfc2177_imapext_idle"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - // Test setup, check capability - connect(imap_socket).context("server says hello")?; - capability(imap_socket, Extension::Idle).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; - - // Check that new messages from LMTP are correctly detected during idling - start_idle(imap_socket).context("can't start idling")?; - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; - let srv_msg = stop_idle(imap_socket).context("stop idling")?; - assert!(srv_msg.contains("* 1 EXISTS")); - - Ok(()) - }) - .expect("test fully run"); -} - -fn rfc4315_imapext_uidplus() { - println!("🧪 rfc4315_imapext_uidplus"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - // Test setup, check capability, insert 2 emails - connect(imap_socket).context("server says hello")?; - capability(imap_socket, Extension::UidPlus).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; - lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; - noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; - - // Check UID EXPUNGE seqset - store( - imap_socket, - Selection::All, - Flag::Deleted, - StoreAction::AddFlags, - StoreMod::None, - )?; - let res = uid_expunge(imap_socket, Selection::FirstId)?; - assert_eq!(res.lines().count(), 2); - assert!(res.contains("* 1 EXPUNGE")); - - // APPENDUID check UID + UID VALIDITY - // Note: 4 and not 3, as we update the UID counter when we delete an email - // it's part of our UID proof - let res = append(imap_socket, Email::Multipart)?; - assert!(res.contains("[APPENDUID 1 4]")); - - // COPYUID, check - create_mailbox(imap_socket, Mailbox::Archive).context("created mailbox archive")?; - let res = copy(imap_socket, Selection::FirstId, Mailbox::Archive)?; - assert!(res.contains("[COPYUID 1 2 1]")); - - // MOVEUID, check - let res = r#move(imap_socket, Selection::FirstId, Mailbox::Archive)?; - assert!(res.contains("[COPYUID 1 2 2]")); - - Ok(()) - }) - .expect("test fully run"); -} - -/// -/// Example -/// -/// ```text -/// 30 list "" "*" RETURN (STATUS (MESSAGES UNSEEN)) -/// * LIST (\Subscribed) "." INBOX -/// * STATUS INBOX (MESSAGES 2 UNSEEN 1) -/// 30 OK LIST completed -/// ``` -fn rfc5819_imapext_liststatus() { - println!("🧪 rfc5819_imapext_liststatus"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { - // Test setup, check capability, add 2 emails, read 1 - connect(imap_socket).context("server says hello")?; - capability(imap_socket, Extension::ListStatus).context("check server capabilities")?; - login(imap_socket, Account::Alice).context("login test")?; - select(imap_socket, Mailbox::Inbox, SelectMod::None).context("select inbox")?; - lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; - lmtp_deliver_email(lmtp_socket, Email::Basic).context("mail delivered successfully")?; - lmtp_deliver_email(lmtp_socket, Email::Multipart).context("mail delivered successfully")?; - noop_exists(imap_socket, 2).context("noop loop must detect a new email")?; - fetch( - imap_socket, - Selection::FirstId, - FetchKind::Rfc822, - FetchMod::None, - ) - .context("read one message")?; - close(imap_socket).context("close inbox")?; - - // Test return status MESSAGES UNSEEN - let ret = list( - imap_socket, - MbxSelect::All, - ListReturn::StatusMessagesUnseen, - )?; - assert!(ret.contains("* STATUS INBOX (MESSAGES 2 UNSEEN 1)")); - - // Test that without RETURN, no status is sent - let ret = list(imap_socket, MbxSelect::All, ListReturn::None)?; - assert!(!ret.contains("* STATUS")); - - Ok(()) - }) - .expect("test fully run"); -} diff --git a/tests/common/constants.rs b/tests/common/constants.rs deleted file mode 100644 index c11a04d..0000000 --- a/tests/common/constants.rs +++ /dev/null @@ -1,54 +0,0 @@ -use std::time; - -pub static SMALL_DELAY: time::Duration = time::Duration::from_millis(200); - -pub static EMAIL1: &[u8] = b"Date: Sat, 8 Jul 2023 07:14:29 +0200\r -From: Bob Robert \r -To: Alice Malice \r -CC: =?ISO-8859-1?Q?Andr=E9?= Pirard \r -Subject: =?ISO-8859-1?B?SWYgeW91IGNhbiByZWFkIHRoaXMgeW8=?=\r - =?ISO-8859-2?B?dSB1bmRlcnN0YW5kIHRoZSBleGFtcGxlLg==?=\r -X-Unknown: something something\r -Bad entry\r - on multiple lines\r -Message-ID: \r -MIME-Version: 1.0\r -Content-Type: multipart/alternative;\r - boundary=\"b1_e376dc71bafc953c0b0fdeb9983a9956\"\r -Content-Transfer-Encoding: 7bit\r -\r -This is a multi-part message in MIME format.\r -\r ---b1_e376dc71bafc953c0b0fdeb9983a9956\r -Content-Type: text/plain; charset=utf-8\r -Content-Transfer-Encoding: quoted-printable\r -\r -GZ\r -OoOoO\r -oOoOoOoOo\r -oOoOoOoOoOoOoOoOo\r -oOoOoOoOoOoOoOoOoOoOoOo\r -oOoOoOoOoOoOoOoOoOoOoOoOoOoOo\r -OoOoOoOoOoOoOoOoOoOoOoOoOoOoOoOoO\r -\r ---b1_e376dc71bafc953c0b0fdeb9983a9956\r -Content-Type: text/html; charset=us-ascii\r -\r -
GZ
\r -OoOoO
\r -oOoOoOoOo
\r -oOoOoOoOoOoOoOoOo
\r -oOoOoOoOoOoOoOoOoOoOoOo
\r -oOoOoOoOoOoOoOoOoOoOoOoOoOoOo
\r -OoOoOoOoOoOoOoOoOoOoOoOoOoOoOoOoO
\r -
\r -\r ---b1_e376dc71bafc953c0b0fdeb9983a9956--\r -"; - -pub static EMAIL2: &[u8] = b"From: alice@example.com\r -To: alice@example.tld\r -Subject: Test\r -\r -Hello world!\r -"; diff --git a/tests/common/fragments.rs b/tests/common/fragments.rs deleted file mode 100644 index 606af2b..0000000 --- a/tests/common/fragments.rs +++ /dev/null @@ -1,570 +0,0 @@ -use anyhow::{bail, Result}; -use std::io::Write; -use std::net::TcpStream; -use std::thread; - -use crate::common::constants::*; -use crate::common::*; - -/// These fragments are not a generic IMAP client -/// but specialized to our specific tests. They can't take -/// arbitrary values, only enum for which the code is known -/// to be correct. The idea is that the generated message is more -/// or less hardcoded by the developer, so its clear what's expected, -/// and not generated by a library. Also don't use vector of enum, -/// as it again introduce some kind of genericity we try so hard to avoid: -/// instead add a dedicated enum, for example "All" or anything relaevent that would -/// describe your list and then hardcode it in your fragment. -/// DON'T. TRY. TO. BE. GENERIC. HERE. - -pub fn connect(imap: &mut TcpStream) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..4], &b"* OK"[..]); - - Ok(()) -} - -pub enum Account { - Alice, -} - -pub enum Extension { - None, - Unselect, - Move, - Condstore, - LiteralPlus, - Idle, - UidPlus, - ListStatus, -} - -pub enum Enable { - Utf8Accept, - CondStore, - All, -} - -pub enum Mailbox { - Inbox, - Archive, - Drafts, -} - -pub enum Flag { - Deleted, - Important, -} - -pub enum Email { - Basic, - Multipart, -} - -pub enum Selection { - FirstId, - SecondId, - All, -} - -pub enum SelectMod { - None, - Condstore, -} - -pub enum StoreAction { - AddFlags, - DelFlags, - SetFlags, - AddFlagsSilent, - DelFlagsSilent, - SetFlagsSilent, -} - -pub enum StoreMod { - None, - UnchangedSince(u64), -} - -pub enum FetchKind { - Rfc822, - Rfc822Size, -} - -pub enum FetchMod { - None, - ChangedSince(u64), -} - -pub enum SearchKind<'a> { - Text(&'a str), - ModSeq(u64), -} - -pub enum StatusKind { - UidNext, - HighestModSeq, -} - -pub enum MbxSelect { - All, -} - -pub enum ListReturn { - None, - StatusMessagesUnseen, -} - -pub fn capability(imap: &mut TcpStream, ext: Extension) -> Result<()> { - imap.write(&b"5 capability\r\n"[..])?; - - let maybe_ext = match ext { - Extension::None => None, - Extension::Unselect => Some("UNSELECT"), - Extension::Move => Some("MOVE"), - Extension::Condstore => Some("CONDSTORE"), - Extension::LiteralPlus => Some("LITERAL+"), - Extension::Idle => Some("IDLE"), - Extension::UidPlus => Some("UIDPLUS"), - Extension::ListStatus => Some("LIST-STATUS"), - }; - - let mut buffer: [u8; 6000] = [0; 6000]; - let read = read_lines(imap, &mut buffer, Some(&b"5 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - assert!(srv_msg.contains("IMAP4REV1")); - if let Some(ext) = maybe_ext { - assert!(srv_msg.contains(ext)); - } - - Ok(()) -} - -pub fn login(imap: &mut TcpStream, account: Account) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - assert!(matches!(account, Account::Alice)); - imap.write(&b"10 login alice hunter2\r\n"[..])?; - - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..5], &b"10 OK"[..]); - - Ok(()) -} - -pub fn login_with_literal(imap: &mut TcpStream, account: Account) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - assert!(matches!(account, Account::Alice)); - imap.write(&b"10 login {5+}\r\nalice {7+}\r\nhunter2\r\n"[..])?; - let _read = read_lines(imap, &mut buffer, Some(&b"10 OK"[..]))?; - Ok(()) -} - -pub fn create_mailbox(imap: &mut TcpStream, mbx: Mailbox) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - let mbx_str = match mbx { - Mailbox::Inbox => "INBOX", - Mailbox::Archive => "ArchiveCustom", - Mailbox::Drafts => "DraftsCustom", - }; - - let cmd = format!("15 create {}\r\n", mbx_str); - imap.write(cmd.as_bytes())?; - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..12], &b"15 OK CREATE"[..]); - - Ok(()) -} - -pub fn list(imap: &mut TcpStream, select: MbxSelect, mod_return: ListReturn) -> Result { - let mut buffer: [u8; 6000] = [0; 6000]; - - let select_str = match select { - MbxSelect::All => "%", - }; - - let mod_return_str = match mod_return { - ListReturn::None => "", - ListReturn::StatusMessagesUnseen => " RETURN (STATUS (MESSAGES UNSEEN))", - }; - - imap.write(format!("19 LIST \"\" \"{}\"{}\r\n", select_str, mod_return_str).as_bytes())?; - - let read = read_lines(imap, &mut buffer, Some(&b"19 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - Ok(srv_msg.to_string()) -} - -pub fn select(imap: &mut TcpStream, mbx: Mailbox, modifier: SelectMod) -> Result { - let mut buffer: [u8; 6000] = [0; 6000]; - - let mbx_str = match mbx { - Mailbox::Inbox => "INBOX", - Mailbox::Archive => "ArchiveCustom", - Mailbox::Drafts => "DraftsCustom", - }; - - let mod_str = match modifier { - SelectMod::Condstore => " (CONDSTORE)", - SelectMod::None => "", - }; - - imap.write(format!("20 select {}{}\r\n", mbx_str, mod_str).as_bytes())?; - - let read = read_lines(imap, &mut buffer, Some(&b"20 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - - Ok(srv_msg.to_string()) -} - -pub fn unselect(imap: &mut TcpStream) -> Result<()> { - imap.write(&b"70 unselect\r\n"[..])?; - let mut buffer: [u8; 1500] = [0; 1500]; - let _read = read_lines(imap, &mut buffer, Some(&b"70 OK"[..]))?; - - Ok(()) -} - -pub fn check(imap: &mut TcpStream) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - imap.write(&b"21 check\r\n"[..])?; - let _read = read_lines(imap, &mut buffer, Some(&b"21 OK"[..]))?; - - Ok(()) -} - -pub fn status(imap: &mut TcpStream, mbx: Mailbox, sk: StatusKind) -> Result { - let mbx_str = match mbx { - Mailbox::Inbox => "INBOX", - Mailbox::Archive => "ArchiveCustom", - Mailbox::Drafts => "DraftsCustom", - }; - let sk_str = match sk { - StatusKind::UidNext => "(UIDNEXT)", - StatusKind::HighestModSeq => "(HIGHESTMODSEQ)", - }; - imap.write(format!("25 STATUS {} {}\r\n", mbx_str, sk_str).as_bytes())?; - let mut buffer: [u8; 6000] = [0; 6000]; - let read = read_lines(imap, &mut buffer, Some(&b"25 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - - Ok(srv_msg.to_string()) -} - -pub fn lmtp_handshake(lmtp: &mut TcpStream) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - let _read = read_lines(lmtp, &mut buffer, None)?; - assert_eq!(&buffer[..4], &b"220 "[..]); - - lmtp.write(&b"LHLO example.tld\r\n"[..])?; - let _read = read_lines(lmtp, &mut buffer, Some(&b"250 "[..]))?; - - Ok(()) -} - -pub fn lmtp_deliver_email(lmtp: &mut TcpStream, email_type: Email) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - - let email = match email_type { - Email::Basic => EMAIL2, - Email::Multipart => EMAIL1, - }; - lmtp.write(&b"MAIL FROM:\r\n"[..])?; - let _read = read_lines(lmtp, &mut buffer, Some(&b"250 2.0.0"[..]))?; - - lmtp.write(&b"RCPT TO:\r\n"[..])?; - let _read = read_lines(lmtp, &mut buffer, Some(&b"250 2.1.5"[..]))?; - - lmtp.write(&b"DATA\r\n"[..])?; - let _read = read_lines(lmtp, &mut buffer, Some(&b"354 "[..]))?; - - lmtp.write(email)?; - lmtp.write(&b"\r\n.\r\n"[..])?; - let _read = read_lines(lmtp, &mut buffer, Some(&b"250 2.0.0"[..]))?; - - Ok(()) -} - -pub fn noop_exists(imap: &mut TcpStream, must_exists: u32) -> Result<()> { - let mut buffer: [u8; 6000] = [0; 6000]; - - let mut max_retry = 20; - loop { - max_retry -= 1; - imap.write(&b"30 NOOP\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"30 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - - for line in srv_msg.lines() { - if line.contains("EXISTS") { - let got = read_first_u32(line)?; - if got == must_exists { - // Done - return Ok(()); - } - } - } - - if max_retry <= 0 { - // Failed - bail!("no more retry"); - } - - thread::sleep(SMALL_DELAY); - } -} - -pub fn fetch( - imap: &mut TcpStream, - selection: Selection, - kind: FetchKind, - modifier: FetchMod, -) -> Result { - let mut buffer: [u8; 65535] = [0; 65535]; - - let sel_str = match selection { - Selection::FirstId => "1", - Selection::SecondId => "2", - Selection::All => "1:*", - }; - - let kind_str = match kind { - FetchKind::Rfc822 => "RFC822", - FetchKind::Rfc822Size => "RFC822.SIZE", - }; - - let mod_str = match modifier { - FetchMod::None => "".into(), - FetchMod::ChangedSince(val) => format!(" (CHANGEDSINCE {})", val), - }; - - imap.write(format!("40 fetch {} {}{}\r\n", sel_str, kind_str, mod_str).as_bytes())?; - - let read = read_lines(imap, &mut buffer, Some(&b"40 OK FETCH"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - - Ok(srv_msg.to_string()) -} - -pub fn copy(imap: &mut TcpStream, selection: Selection, to: Mailbox) -> Result { - let mut buffer: [u8; 65535] = [0; 65535]; - assert!(matches!(selection, Selection::FirstId)); - assert!(matches!(to, Mailbox::Archive)); - - imap.write(&b"45 copy 1 ArchiveCustom\r\n"[..])?; - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..5], &b"45 OK"[..]); - let srv_msg = std::str::from_utf8(read)?; - - Ok(srv_msg.to_string()) -} - -pub fn append(imap: &mut TcpStream, content: Email) -> Result { - let mut buffer: [u8; 6000] = [0; 6000]; - - let ref_mail = match content { - Email::Multipart => EMAIL1, - Email::Basic => EMAIL2, - }; - - let append_cmd = format!("47 append inbox (\\Seen) {{{}}}\r\n", ref_mail.len()); - println!("append cmd: {}", append_cmd); - imap.write(append_cmd.as_bytes())?; - - // wait for continuation - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(read[0], b'+'); - - // write our stuff - imap.write(ref_mail)?; - imap.write(&b"\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"47 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - - Ok(srv_msg.to_string()) -} - -pub fn search(imap: &mut TcpStream, sk: SearchKind) -> Result { - let sk_str = match sk { - SearchKind::Text(x) => format!("TEXT \"{}\"", x), - SearchKind::ModSeq(x) => format!("MODSEQ {}", x), - }; - imap.write(format!("55 SEARCH {}\r\n", sk_str).as_bytes())?; - let mut buffer: [u8; 1500] = [0; 1500]; - let read = read_lines(imap, &mut buffer, Some(&b"55 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - Ok(srv_msg.to_string()) -} - -pub fn store( - imap: &mut TcpStream, - sel: Selection, - flag: Flag, - action: StoreAction, - modifier: StoreMod, -) -> Result { - let mut buffer: [u8; 6000] = [0; 6000]; - - let seq = match sel { - Selection::FirstId => "1", - Selection::SecondId => "2", - Selection::All => "1:*", - }; - - let modif = match modifier { - StoreMod::None => "".into(), - StoreMod::UnchangedSince(val) => format!(" (UNCHANGEDSINCE {})", val), - }; - - let flags_str = match flag { - Flag::Deleted => "(\\Deleted)", - Flag::Important => "(\\Important)", - }; - - let action_str = match action { - StoreAction::AddFlags => "+FLAGS", - StoreAction::DelFlags => "-FLAGS", - StoreAction::SetFlags => "FLAGS", - StoreAction::AddFlagsSilent => "+FLAGS.SILENT", - StoreAction::DelFlagsSilent => "-FLAGS.SILENT", - StoreAction::SetFlagsSilent => "FLAGS.SILENT", - }; - - imap.write(format!("57 STORE {}{} {} {}\r\n", seq, modif, action_str, flags_str).as_bytes())?; - let read = read_lines(imap, &mut buffer, Some(&b"57 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - Ok(srv_msg.to_string()) -} - -pub fn expunge(imap: &mut TcpStream) -> Result<()> { - imap.write(&b"60 expunge\r\n"[..])?; - let mut buffer: [u8; 1500] = [0; 1500]; - let _read = read_lines(imap, &mut buffer, Some(&b"60 OK EXPUNGE"[..]))?; - - Ok(()) -} - -pub fn uid_expunge(imap: &mut TcpStream, sel: Selection) -> Result { - use Selection::*; - let mut buffer: [u8; 6000] = [0; 6000]; - let selstr = match sel { - FirstId => "1", - SecondId => "2", - All => "1:*", - }; - imap.write(format!("61 UID EXPUNGE {}\r\n", selstr).as_bytes())?; - let read = read_lines(imap, &mut buffer, Some(&b"61 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - Ok(srv_msg.to_string()) -} - -pub fn rename_mailbox(imap: &mut TcpStream, from: Mailbox, to: Mailbox) -> Result<()> { - assert!(matches!(from, Mailbox::Archive)); - assert!(matches!(to, Mailbox::Drafts)); - - imap.write(&b"70 rename ArchiveCustom DraftsCustom\r\n"[..])?; - let mut buffer: [u8; 1500] = [0; 1500]; - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..5], &b"70 OK"[..]); - - imap.write(&b"71 list \"\" *\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"71 OK LIST"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - assert!(!srv_msg.contains(" ArchiveCustom\r\n")); - assert!(srv_msg.contains(" INBOX\r\n")); - assert!(srv_msg.contains(" DraftsCustom\r\n")); - - Ok(()) -} - -pub fn delete_mailbox(imap: &mut TcpStream, mbx: Mailbox) -> Result<()> { - let mbx_str = match mbx { - Mailbox::Inbox => "INBOX", - Mailbox::Archive => "ArchiveCustom", - Mailbox::Drafts => "DraftsCustom", - }; - let cmd = format!("80 delete {}\r\n", mbx_str); - - imap.write(cmd.as_bytes())?; - let mut buffer: [u8; 1500] = [0; 1500]; - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..5], &b"80 OK"[..]); - - imap.write(&b"81 list \"\" *\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"81 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - assert!(srv_msg.contains(" INBOX\r\n")); - assert!(!srv_msg.contains(format!(" {}\r\n", mbx_str).as_str())); - - Ok(()) -} - -pub fn close(imap: &mut TcpStream) -> Result<()> { - imap.write(&b"60 close\r\n"[..])?; - let mut buffer: [u8; 1500] = [0; 1500]; - let _read = read_lines(imap, &mut buffer, Some(&b"60 OK"[..]))?; - - Ok(()) -} - -pub fn r#move(imap: &mut TcpStream, selection: Selection, to: Mailbox) -> Result { - let mut buffer: [u8; 1500] = [0; 1500]; - assert!(matches!(to, Mailbox::Archive)); - assert!(matches!(selection, Selection::FirstId)); - - imap.write(&b"35 move 1 ArchiveCustom\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"35 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - assert!(srv_msg.contains("* 1 EXPUNGE")); - - Ok(srv_msg.to_string()) -} - -pub fn enable(imap: &mut TcpStream, ask: Enable, done: Option) -> Result<()> { - let mut buffer: [u8; 6000] = [0; 6000]; - assert!(matches!(ask, Enable::Utf8Accept)); - - imap.write(&b"36 enable UTF8=ACCEPT\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"36 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - match done { - None => assert_eq!(srv_msg.lines().count(), 1), - Some(Enable::Utf8Accept) => { - assert_eq!(srv_msg.lines().count(), 2); - assert!(srv_msg.contains("* ENABLED UTF8=ACCEPT")); - } - _ => unimplemented!(), - } - - Ok(()) -} - -pub fn start_idle(imap: &mut TcpStream) -> Result<()> { - let mut buffer: [u8; 1500] = [0; 1500]; - imap.write(&b"98 IDLE\r\n"[..])?; - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(read[0], b'+'); - Ok(()) -} - -pub fn stop_idle(imap: &mut TcpStream) -> Result { - let mut buffer: [u8; 16536] = [0; 16536]; - imap.write(&b"DONE\r\n"[..])?; - let read = read_lines(imap, &mut buffer, Some(&b"98 OK"[..]))?; - let srv_msg = std::str::from_utf8(read)?; - Ok(srv_msg.to_string()) -} - -pub fn logout(imap: &mut TcpStream) -> Result<()> { - imap.write(&b"99 logout\r\n"[..])?; - let mut buffer: [u8; 1500] = [0; 1500]; - let read = read_lines(imap, &mut buffer, None)?; - assert_eq!(&read[..5], &b"* BYE"[..]); - Ok(()) -} diff --git a/tests/common/mod.rs b/tests/common/mod.rs deleted file mode 100644 index cbe0271..0000000 --- a/tests/common/mod.rs +++ /dev/null @@ -1,99 +0,0 @@ -#![allow(dead_code)] -pub mod constants; -pub mod fragments; - -use anyhow::{bail, Context, Result}; -use std::io::Read; -use std::net::{Shutdown, TcpStream}; -use std::process::Command; -use std::thread; - -use constants::SMALL_DELAY; - -pub fn aerogramme_provider_daemon_dev( - mut fx: impl FnMut(&mut TcpStream, &mut TcpStream) -> Result<()>, -) -> Result<()> { - // Check port is not used (= free) before starting the test - let mut max_retry = 20; - loop { - max_retry -= 1; - match (TcpStream::connect("[::1]:1143"), max_retry) { - (Ok(_), 0) => bail!("something is listening on [::1]:1143 and prevent the test from starting"), - (Ok(_), _) => println!("something is listening on [::1]:1143, maybe a previous daemon quitting, retrying soon..."), - (Err(_), _) => { - println!("test ready to start, [::1]:1143 is free!"); - break - } - } - thread::sleep(SMALL_DELAY); - } - - // Start daemon - let mut daemon = Command::new(env!("CARGO_BIN_EXE_aerogramme")) - .arg("--dev") - .arg("provider") - .arg("daemon") - .spawn()?; - - // Check that our daemon is correctly listening on the free port - let mut max_retry = 20; - let mut imap_socket = loop { - max_retry -= 1; - match (TcpStream::connect("[::1]:1143"), max_retry) { - (Err(e), 0) => bail!("no more retry, last error is: {}", e), - (Err(e), _) => { - println!("unable to connect: {} ; will retry soon...", e); - } - (Ok(v), _) => break v, - } - thread::sleep(SMALL_DELAY); - }; - - // Assuming now it's safe to open a LMTP socket - let mut lmtp_socket = - TcpStream::connect("[::1]:1025").context("lmtp socket must be connected")?; - - println!("-- ready to test imap features --"); - let result = fx(&mut imap_socket, &mut lmtp_socket); - println!("-- test teardown --"); - - imap_socket - .shutdown(Shutdown::Both) - .context("closing imap socket at the end of the test")?; - lmtp_socket - .shutdown(Shutdown::Both) - .context("closing lmtp socket at the end of the test")?; - daemon.kill().context("daemon should be killed")?; - - result.context("all tests passed") -} - -pub fn read_lines<'a, F: Read>( - reader: &mut F, - buffer: &'a mut [u8], - stop_marker: Option<&[u8]>, -) -> Result<&'a [u8]> { - let mut nbytes = 0; - loop { - nbytes += reader.read(&mut buffer[nbytes..])?; - //println!("partial read: {}", std::str::from_utf8(&buffer[..nbytes])?); - let pre_condition = match stop_marker { - None => true, - Some(mark) => buffer[..nbytes].windows(mark.len()).any(|w| w == mark), - }; - if pre_condition && nbytes >= 2 && &buffer[nbytes - 2..nbytes] == &b"\r\n"[..] { - break; - } - } - println!("read: {}", std::str::from_utf8(&buffer[..nbytes])?); - Ok(&buffer[..nbytes]) -} - -pub fn read_first_u32(inp: &str) -> Result { - Ok(inp - .chars() - .skip_while(|c| !c.is_digit(10)) - .take_while(|c| c.is_digit(10)) - .collect::() - .parse::()?) -} -- cgit v1.2.3 From 4d65366ff368cc9ea35115cb7e701bfebb166bc6 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 11:34:24 +0100 Subject: Fixed some parsing bugs --- aero-dav/fuzz/fuzz_targets/dav.rs | 4 + aero-dav/src/caldecoder.rs | 37 ++++- aero-dav/src/calencoder.rs | 2 +- aero-dav/src/decoder.rs | 7 +- aero-dav/src/encoder.rs | 301 ++++++++++++++++++++------------------ aero-dav/src/xml.rs | 7 +- 6 files changed, 206 insertions(+), 152 deletions(-) diff --git a/aero-dav/fuzz/fuzz_targets/dav.rs b/aero-dav/fuzz/fuzz_targets/dav.rs index a3c6ece..5bd28bc 100644 --- a/aero-dav/fuzz/fuzz_targets/dav.rs +++ b/aero-dav/fuzz/fuzz_targets/dav.rs @@ -9,6 +9,7 @@ use quick_xml::reader::NsReader; use tokio::runtime::Runtime; use tokio::io::AsyncWriteExt; +// Split this file const tokens: [&str; 63] = [ "0", "1", @@ -125,6 +126,9 @@ impl Tag { #[derive(Arbitrary)] enum XmlNode { + //@FIXME: build RFC3339 and RFC822 Dates with chrono based on timestamps + //@FIXME: add small numbers + //@FIXME: add http status code Node(Tag, Vec), Number(u64), Text(Token), diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index fb840d6..3aae4ad 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -1,9 +1,39 @@ -//use super::types as dav; +use super::types as dav; use super::caltypes::*; use super::xml; use super::error; // ---- ROOT ELEMENTS --- +impl xml::QRead> for MkCalendar { + async fn qread(_xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl> xml::QRead> for MkCalendarResponse { + async fn qread(_xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl xml::QRead> for CalendarQuery { + async fn qread(_xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl xml::QRead> for CalendarMultiget { + async fn qread(_xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + +impl xml::QRead for FreeBusyQuery { + async fn qread(_xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} + // ---- EXTENSIONS --- impl xml::QRead for Violation { @@ -31,3 +61,8 @@ impl xml::QRead for ResourceType { } // ---- INNER XML ---- +impl xml::QRead for SupportedCollation { + async fn qread(_xml: &mut xml::Reader) -> Result { + unreachable!(); + } +} diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 67892ed..a25d767 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -666,7 +666,7 @@ mod tests { use crate::types as dav; use crate::realization::Calendar; use tokio::io::AsyncWriteExt; - use chrono::{Utc,TimeZone,DateTime}; + use chrono::{Utc,TimeZone}; async fn serialize(elem: &impl QWrite) -> String { let mut buffer = Vec::new(); diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index 02bc376..28442a6 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -23,8 +23,8 @@ impl QRead> for PropFind { let propfind: PropFind = loop { // allprop if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { - let includ = xml.maybe_find::>().await?; xml.close().await?; + let includ = xml.maybe_find::>().await?; break PropFind::AllProp(includ) } @@ -594,8 +594,9 @@ impl QRead for Href { #[cfg(test)] mod tests { use super::*; - use chrono::{FixedOffset, DateTime, TimeZone, Utc}; + use chrono::{FixedOffset, TimeZone}; use crate::realization::Core; + use quick_xml::reader::NsReader; #[tokio::test] async fn basic_propfind_propname() { @@ -910,7 +911,7 @@ mod tests { Property::GetContentType("text/html".into()), Property::GetEtag(r#""zzyzx""#.into()), Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap()), - //Property::ResourceType(vec![]), + Property::ResourceType(vec![]), Property::SupportedLock(vec![ LockEntry { lockscope: LockScope::Exclusive, diff --git a/aero-dav/src/encoder.rs b/aero-dav/src/encoder.rs index fd2f9ca..813efe6 100644 --- a/aero-dav/src/encoder.rs +++ b/aero-dav/src/encoder.rs @@ -633,6 +633,7 @@ impl QWrite for Violation { #[cfg(test)] mod tests { use super::*; + use super::super::xml; use crate::realization::Core; use tokio::io::AsyncWriteExt; @@ -653,43 +654,47 @@ mod tests { return got.into() } + async fn deserialize>(src: &str) -> T { + let mut rdr = xml::Reader::new(quick_xml::reader::NsReader::from_reader(src.as_bytes())).await.unwrap(); + rdr.find().await.unwrap() + } + #[tokio::test] async fn basic_href() { + let orig = Href("/SOGo/dav/so/".into()); - let got = serialize( - &Href("/SOGo/dav/so/".into()) - ).await; + let got = serialize(&orig).await; let expected = r#"/SOGo/dav/so/"#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::(got.as_str()).await, orig) } #[tokio::test] async fn basic_multistatus() { - let got = serialize( - &Multistatus::> { - responses: vec![], - responsedescription: Some(ResponseDescription("Hello world".into())) - }, - ).await; + let orig = Multistatus::> { + responses: vec![], + responsedescription: Some(ResponseDescription("Hello world".into())) + }; + let got = serialize(&orig).await; let expected = r#" Hello world "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_error_delete_locked() { - let got = serialize( - &Error::(vec![ + let orig = Error::(vec![ Violation::LockTokenSubmitted(vec![ Href("/locked/".into()) ]) - ]), - ).await; + ]); + let got = serialize(&orig).await; let expected = r#" @@ -698,72 +703,74 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_propname_req() { - let got = serialize( - &PropFind::::PropName, - ).await; + let orig = PropFind::::PropName; + + let got = serialize(&orig).await; let expected = r#" "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_propname_res() { - let got = serialize( - &Multistatus::> { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/".into()), - vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), - error: None, - responsedescription: None, - location: None, - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/front.html".into()), - vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - } - ]), - error: None, - responsedescription: None, - location: None, - }, - ], - responsedescription: None, - }, - ).await; + let orig = Multistatus::> { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }] + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/front.html".into()), + vec![PropStat { + prop: PropName(vec![ + PropertyRequest::CreationDate, + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + } + ]), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + }; + + let got = serialize(&orig).await; let expected = r#" @@ -798,100 +805,102 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_allprop_req() { - let got = serialize( - &PropFind::::AllProp(None), - ).await; + let orig = PropFind::::AllProp(None); + let got = serialize(&orig).await; let expected = r#" "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_allprop_res() { - use chrono::{DateTime,FixedOffset,TimeZone}; - let got = serialize( - &Multistatus::> { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/".into()), - vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600) - .unwrap() - .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) - .unwrap()), - Property::DisplayName("Example collection".into()), - Property::ResourceType(vec![ResourceType::Collection]), - Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ]), + use chrono::{FixedOffset,TimeZone}; + + let orig = Multistatus::> { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) + .unwrap()), + Property::DisplayName("Example collection".into()), + Property::ResourceType(vec![ResourceType::Collection]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), + ]), + status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, - location: None, - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/front.html".into()), - vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600) - .unwrap() - .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) - .unwrap()), - Property::DisplayName("Example HTML resource".into()), - Property::GetContentLength(4525), - Property::GetContentType("text/html".into()), - Property::GetEtag(r#""zzyzx""#.into()), - Property::GetLastModified(FixedOffset::east_opt(0) - .unwrap() - .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) - .unwrap()), - Property::ResourceType(vec![]), - Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ]), + }] + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/front.html".into()), + vec![PropStat { + prop: PropValue(vec![ + Property::CreationDate(FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) + .unwrap()), + Property::DisplayName("Example HTML resource".into()), + Property::GetContentLength(4525), + Property::GetContentType("text/html".into()), + Property::GetEtag(r#""zzyzx""#.into()), + Property::GetLastModified(FixedOffset::east_opt(0) + .unwrap() + .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) + .unwrap()), + Property::ResourceType(vec![]), + Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] - ), + ]), + status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, - location: None, - }, - ], - responsedescription: None, - } - ).await; + }] + ), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + }; + + let got = serialize(&orig).await; let expected = r#" @@ -961,16 +970,17 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_allprop_include() { - let got = serialize( - &PropFind::::AllProp(Some(Include(vec![ - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - ]))), - ).await; + let orig = PropFind::::AllProp(Some(Include(vec![ + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, + ]))); + + let got = serialize(&orig).await; let expected = r#" @@ -981,6 +991,7 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 98037ac..f9e04eb 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -79,7 +79,7 @@ impl Reader { /// skip a node at current level /// I would like to make this one private but not ready pub async fn skip(&mut self) -> Result, ParsingError> { - //println!("skipping inside node {:?}", self.parents.last()); + //println!("skipping inside node {:?} value {:?}", self.parents.last(), self.cur); match &self.cur { Event::Start(b) => { let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; @@ -212,8 +212,10 @@ impl Reader { } pub async fn collect>(&mut self) -> Result, ParsingError> { - self.ensure_parent_has_child()?; let mut acc = Vec::new(); + if !self.parent_has_child() { + return Ok(acc) + } loop { match N::qread(self).await { @@ -230,6 +232,7 @@ impl Reader { } pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + //println!("try open tag {:?}", key); let evt = match self.peek() { Event::Empty(_) if self.is_tag(ns, key) => self.cur.clone(), Event::Start(_) if self.is_tag(ns, key) => self.next().await?, -- cgit v1.2.3 From b786573e08c78b672880cd212db45fc58ab82c4c Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 11:42:44 +0100 Subject: Fixed 2 more bugs --- aero-dav/src/decoder.rs | 6 ++-- aero-dav/src/encoder.rs | 89 +++++++++++++++++++++++++------------------------ 2 files changed, 49 insertions(+), 46 deletions(-) diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index 28442a6..766d19c 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -473,7 +473,7 @@ impl QRead for Owner { impl QRead for Timeout { async fn qread(xml: &mut Reader) -> Result { - const SEC_PFX: &str = "SEC_PFX"; + const SEC_PFX: &str = "Second-"; xml.open(DAV_URN, "timeout").await?; let timeout = match xml.tag_string().await?.as_str() { @@ -492,7 +492,7 @@ impl QRead for Timeout { impl QRead for LockToken { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "locktoken").await?; - let href = Href::qread(xml).await?; + let href = xml.find::().await?; xml.close().await?; Ok(LockToken(href)) } @@ -501,7 +501,7 @@ impl QRead for LockToken { impl QRead for LockRoot { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "lockroot").await?; - let href = Href::qread(xml).await?; + let href = xml.find::().await?; xml.close().await?; Ok(LockRoot(href)) } diff --git a/aero-dav/src/encoder.rs b/aero-dav/src/encoder.rs index 813efe6..3b0bfda 100644 --- a/aero-dav/src/encoder.rs +++ b/aero-dav/src/encoder.rs @@ -996,16 +996,15 @@ mod tests { #[tokio::test] async fn rfc_propertyupdate() { - let got = serialize( - &PropertyUpdate::(vec![ - PropertyUpdateItem::Set(Set(PropValue(vec![ - Property::GetContentLanguage("fr-FR".into()), - ]))), - PropertyUpdateItem::Remove(Remove(PropName(vec![ - PropertyRequest::DisplayName, - ]))), - ]), - ).await; + let orig = PropertyUpdate::(vec![ + PropertyUpdateItem::Set(Set(PropValue(vec![ + Property::GetContentLanguage("fr-FR".into()), + ]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![ + PropertyRequest::DisplayName, + ]))), + ]); + let got = serialize(&orig).await; let expected = r#" @@ -1021,24 +1020,25 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_delete_locked2() { - let got = serialize( - &Multistatus::> { - responses: vec![Response { - status_or_propstat: StatusOrPropstat::Status( - vec![Href("http://www.example.com/container/resource3".into())], - Status(http::status::StatusCode::from_u16(423).unwrap()) - ), - error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), - responsedescription: None, - location: None, - }], + let orig = Multistatus::> { + responses: vec![Response { + status_or_propstat: StatusOrPropstat::Status( + vec![Href("http://www.example.com/container/resource3".into())], + Status(http::status::StatusCode::from_u16(423).unwrap()) + ), + error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), responsedescription: None, - }, - ).await; + location: None, + }], + responsedescription: None, + }; + + let got = serialize(&orig).await; let expected = r#" @@ -1051,17 +1051,18 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_simple_lock_request() { - let got = serialize( - &LockInfo { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - }, - ).await; + let orig = LockInfo { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + }; + + let got = serialize(&orig).await; let expected = r#" @@ -1076,23 +1077,24 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::(got.as_str()).await, orig) } #[tokio::test] async fn rfc_simple_lock_response() { - let got = serialize( - &PropValue::(vec![ - Property::LockDiscovery(vec![ActiveLock { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - depth: Depth::Infinity, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - timeout: Some(Timeout::Seconds(604800)), - locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), - lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), - }]), - ]), - ).await; + let orig = PropValue::(vec![ + Property::LockDiscovery(vec![ActiveLock { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + depth: Depth::Infinity, + owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + timeout: Some(Timeout::Seconds(604800)), + locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), + lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), + }]), + ]); + + let got = serialize(&orig).await; let expected = r#" @@ -1119,5 +1121,6 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!(deserialize::>(got.as_str()).await, orig) } } -- cgit v1.2.3 From 7459f50b5486a137bc90b7e6e04e915d82230e28 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 18:23:23 +0100 Subject: WIP implem cal decoder --- aero-dav/src/caldecoder.rs | 411 ++++++++++++++++++++++++++++++++++++++++++--- aero-dav/src/calencoder.rs | 13 +- aero-dav/src/caltypes.rs | 21 +++ aero-dav/src/decoder.rs | 4 +- aero-dav/src/error.rs | 1 + aero-dav/src/xml.rs | 18 +- 6 files changed, 439 insertions(+), 29 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 3aae4ad..49d1c9e 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -1,68 +1,431 @@ +use quick_xml::events::Event; +use chrono::NaiveDateTime; + use super::types as dav; use super::caltypes::*; -use super::xml; -use super::error; +use super::xml::{QRead, IRead, Reader, Node, CAL_URN}; +use super::error::ParsingError; // ---- ROOT ELEMENTS --- -impl xml::QRead> for MkCalendar { - async fn qread(_xml: &mut xml::Reader) -> Result { - unreachable!(); +impl QRead> for MkCalendar { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "mkcalendar").await?; + let set = xml.find().await?; + xml.close().await?; + Ok(MkCalendar(set)) } } -impl> xml::QRead> for MkCalendarResponse { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl> QRead> for MkCalendarResponse { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } -impl xml::QRead> for CalendarQuery { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead> for CalendarQuery { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } -impl xml::QRead> for CalendarMultiget { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead> for CalendarMultiget { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } -impl xml::QRead for FreeBusyQuery { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead for FreeBusyQuery { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } // ---- EXTENSIONS --- -impl xml::QRead for Violation { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead for Violation { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } -impl xml::QRead for Property { - async fn qread(_xml: &mut xml::Reader) -> Result { - unreachable!(); +impl QRead for Property { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "calendar-description").await?.is_some() { + let lang = xml.prev_attr("xml:lang"); + let text = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::CalendarDescription { lang, text }) + } + + if xml.maybe_open(CAL_URN, "calendar-timezone").await?.is_some() { + let tz = xml.tag_string().await?; + xml.close().await?; + return Ok(Property::CalendarTimezone(tz)) + } + + if xml.maybe_open(CAL_URN, "supported-calendar-component-set").await?.is_some() { + let comp = xml.collect().await?; + xml.close().await?; + return Ok(Property::SupportedCalendarComponentSet(comp)) + } + + if xml.maybe_open(CAL_URN, "supported-calendar-data").await?.is_some() { + let mime = xml.collect().await?; + xml.close().await?; + return Ok(Property::SupportedCalendarData(mime)) + } + + if xml.maybe_open(CAL_URN, "max-resource-size").await?.is_some() { + let sz = xml.tag_string().await?.parse::()?; + xml.close().await?; + return Ok(Property::MaxResourceSize(sz)) + } + + if xml.maybe_open(CAL_URN, "max-date-time").await?.is_some() { + let dtstr = xml.tag_string().await?; + let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + xml.close().await?; + return Ok(Property::MaxDateTime(dt)) + } + + if xml.maybe_open(CAL_URN, "max-instances").await?.is_some() { + let sz = xml.tag_string().await?.parse::()?; + xml.close().await?; + return Ok(Property::MaxInstances(sz)) + } + + if xml.maybe_open(CAL_URN, "max-attendees-per-instance").await?.is_some() { + let sz = xml.tag_string().await?.parse::()?; + xml.close().await?; + return Ok(Property::MaxAttendeesPerInstance(sz)) + } + + if xml.maybe_open(CAL_URN, "supported-collation-set").await?.is_some() { + let cols = xml.collect().await?; + xml.close().await?; + return Ok(Property::SupportedCollationSet(cols)) + } + + let mut dirty = false; + let mut caldata: Option = None; + xml.maybe_read(&mut caldata, &mut dirty).await?; + if let Some(cal) = caldata { + return Ok(Property::CalendarData(cal)) + } + + Err(ParsingError::Recoverable) } } -impl xml::QRead for PropertyRequest { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead for PropertyRequest { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } -impl xml::QRead for ResourceType { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead for ResourceType { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } // ---- INNER XML ---- -impl xml::QRead for SupportedCollation { - async fn qread(_xml: &mut xml::Reader) -> Result { +impl QRead for SupportedCollation { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "supported-collation").await?; + let col = Collation::new(xml.tag_string().await?); + xml.close().await?; + Ok(SupportedCollation(col)) + } +} + +impl QRead for CalendarDataPayload { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "calendar-data").await?; + let mime = CalendarDataSupport::qread(xml).await.ok(); + let payload = xml.tag_string().await?; + xml.close().await?; + Ok(CalendarDataPayload { mime, payload }) + } +} + +impl QRead for CalendarDataSupport { + async fn qread(xml: &mut Reader) -> Result { + let ct = xml.prev_attr("content-type"); + let vs = xml.prev_attr("version"); + match (ct, vs) { + (Some(content_type), Some(version)) => Ok(Self { content_type, version }), + _ => Err(ParsingError::Recoverable), + } + } +} + +impl QRead for CalendarDataRequest { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "calendar-data").await?; + let mime = CalendarDataSupport::qread(xml).await.ok(); + + let (mut comp, mut recurrence, mut limit_freebusy_set) = (None, None, None); + + loop { + let mut dirty = false; + xml.maybe_read(&mut comp, &mut dirty).await?; + xml.maybe_read(&mut recurrence, &mut dirty).await?; + xml.maybe_read(&mut limit_freebusy_set, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + + } + + xml.close().await?; + Ok(Self { mime, comp, recurrence, limit_freebusy_set }) + } +} + +impl QRead for CalendarDataEmpty { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "calendar-data").await?; + let mime = CalendarDataSupport::qread(xml).await.ok(); + xml.close().await?; + Ok(Self(mime)) + } +} + +impl QRead for Comp { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "comp").await?; + let name = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let additional_rules = Box::pin(xml.maybe_find()).await?; + xml.close().await?; + Ok(Self { name, additional_rules }) + } +} + +impl QRead for CompInner { + async fn qread(xml: &mut Reader) -> Result { + let (mut prop_kind, mut comp_kind) = (None, None); + + loop { + let mut dirty = false; + + xml.maybe_read(&mut prop_kind, &mut dirty).await?; + xml.maybe_read(&mut comp_kind, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + }; + + match (prop_kind, comp_kind) { + (Some(prop_kind), Some(comp_kind)) => Ok(Self { prop_kind, comp_kind }), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl QRead for CompSupport { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "comp").await?; + let inner = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + xml.close().await?; + Ok(Self(inner)) + } +} + +impl QRead for CompKind { + async fn qread(xml: &mut Reader) -> Result { + let mut comp = Vec::new(); + loop { + let mut dirty = false; + + if xml.maybe_open(CAL_URN, "allcomp").await?.is_some() { + xml.close().await?; + return Ok(CompKind::AllComp) + } + + xml.maybe_push(&mut comp, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + Ok(CompKind::Comp(comp)) + } +} + +impl QRead for PropKind { + async fn qread(xml: &mut Reader) -> Result { + let mut prop = Vec::new(); + loop { + let mut dirty = false; + + if xml.maybe_open(CAL_URN, "allprop").await?.is_some() { + xml.close().await?; + return Ok(PropKind::AllProp) + } + + xml.maybe_push(&mut prop, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + Ok(PropKind::Prop(prop)) + } +} + +impl QRead for RecurrenceModifier { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for Expand { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for LimitRecurrenceSet { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for LimitFreebusySet { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead> for CalendarSelector { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for CompFilter { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for CompFilterRules { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for CompFilterMatch { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for PropFilter { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for PropFilterRules { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for PropFilterMatch { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for TimeOrText { + async fn qread(_xml: &mut Reader) -> Result { unreachable!(); } } + +impl QRead for TextMatch { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for ParamFilterMatch { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for TimeZone { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for Filter { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for TimeRange { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +impl QRead for CalProp { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + +#[cfg(test)] +mod tests { + use super::*; + //use chrono::{FixedOffset, TimeZone}; + use crate::realization::Calendar; + //use quick_reader::NsReader; + + async fn deserialize>(src: &str) -> T { + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())).await.unwrap(); + rdr.find().await.unwrap() + } + + #[tokio::test] + async fn basic_mkcalendar() { + let expected = MkCalendar(dav::Set(dav::PropValue(vec![ + dav::Property::DisplayName("Lisa's Events".into()), + ]))); + + let src = r#" + + + + + Lisa's Events + + + +"#; + let got = deserialize::>(src).await; + assert_eq!(got, expected) + } +} diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index a25d767..55778db 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -5,7 +5,6 @@ use super::caltypes::*; use super::xml::{Node, QWrite, IWrite, Writer}; use super::types::Extension; -const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; // ==================== Calendar Types Serialization ========================= @@ -300,6 +299,12 @@ impl QWrite for Collation { } } +impl QWrite for CalendarDataSupport { + async fn qwrite(&self, _xml: &mut Writer) -> Result<(), QError> { + unreachable!(); + } +} + impl QWrite for CalendarDataPayload { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut start = xml.create_cal_element("calendar-data"); @@ -348,6 +353,12 @@ impl QWrite for CalendarDataEmpty { } } +impl QWrite for CompInner { + async fn qwrite(&self, _xml: &mut Writer) -> Result<(), QError> { + unreachable!(); + } +} + impl QWrite for Comp { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut start = xml.create_cal_element("comp"); diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 9b9091e..d04c67a 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -4,6 +4,8 @@ use chrono::{DateTime,Utc}; use super::types as dav; use super::xml; +pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; + //@FIXME ACL (rfc3744) is missing, required //@FIXME Versioning (rfc3253) is missing, required //@FIXME WebDAV sync (rfc6578) is missing, optional @@ -1418,6 +1420,18 @@ impl Component { Self::Unknown(c) => c, } } + pub fn new(v: String) -> Self { + match v.as_str() { + "VCALENDAR" => Self::VCalendar, + "VJOURNAL" => Self::VJournal, + "VFREEBUSY" => Self::VFreeBusy, + "VEVENT" => Self::VEvent, + "VTODO" => Self::VTodo, + "VALARM" => Self::VAlarm, + "VTIMEZONE" => Self::VTimeZone, + _ => Self::Unknown(v), + } + } } /// name="VERSION", name="SUMMARY", etc. @@ -1450,4 +1464,11 @@ impl Collation { Self::Unknown(c) => c.as_str(), } } + pub fn new(v: String) -> Self { + match v.as_str() { + "i;ascii-casemap" => Self::AsciiCaseMap, + "i;octet" => Self::Octet, + _ => Self::Unknown(v), + } + } } diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index 766d19c..de04dd4 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -551,7 +551,9 @@ impl QRead for LockScope { if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() { xml.close().await?; break LockScope::Exclusive - } else if xml.maybe_open(DAV_URN, "shared").await?.is_some() { + } + + if xml.maybe_open(DAV_URN, "shared").await?.is_some() { xml.close().await?; break LockScope::Shared } diff --git a/aero-dav/src/error.rs b/aero-dav/src/error.rs index 78c6d6b..f1b5cba 100644 --- a/aero-dav/src/error.rs +++ b/aero-dav/src/error.rs @@ -4,6 +4,7 @@ use quick_xml::events::attributes::AttrError; pub enum ParsingError { Recoverable, MissingChild, + MissingAttribute, NamespacePrefixAlreadyUsed, WrongToken, TagNotFound, diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index f9e04eb..e021543 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -55,6 +55,7 @@ impl Writer { pub struct Reader { pub rdr: NsReader, cur: Event<'static>, + prev: Event<'static>, parents: Vec>, buf: Vec, } @@ -63,8 +64,9 @@ impl Reader { let mut buf: Vec = vec![]; let cur = rdr.read_event_into_async(&mut buf).await?.into_owned(); let parents = vec![]; + let prev = Event::Eof; buf.clear(); - Ok(Self { cur, parents, rdr, buf }) + Ok(Self { cur, prev, parents, rdr, buf }) } /// read one more tag @@ -72,8 +74,8 @@ impl Reader { async fn next(&mut self) -> Result, ParsingError> { let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); self.buf.clear(); - let old_evt = std::mem::replace(&mut self.cur, evt); - Ok(old_evt) + self.prev = std::mem::replace(&mut self.cur, evt); + Ok(self.prev.clone()) } /// skip a node at current level @@ -252,6 +254,16 @@ impl Reader { } } + pub fn prev_attr(&self, attr: &str) -> Option { + match &self.prev { + Event::Start(bs) | Event::Empty(bs) => match bs.try_get_attribute(attr) { + Ok(Some(attr)) => attr.decode_and_unescape_value(&self.rdr).ok().map(|v| v.into_owned()), + _ => None, + } + _ => None, + } + } + // find stop tag pub async fn close(&mut self) -> Result, ParsingError> { //println!("close tag {:?}", self.parents.last()); -- cgit v1.2.3 From 17e42874f5a881d5cf55fb8f5e554b003fb59b96 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 21:39:12 +0100 Subject: WIP decoder --- aero-dav/src/caldecoder.rs | 382 +++++++++++++++++++++++++++++++++++++++------ aero-dav/src/calencoder.rs | 2 +- aero-dav/src/caltypes.rs | 2 +- 3 files changed, 337 insertions(+), 49 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 49d1c9e..11e191f 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -3,7 +3,7 @@ use chrono::NaiveDateTime; use super::types as dav; use super::caltypes::*; -use super::xml::{QRead, IRead, Reader, Node, CAL_URN}; +use super::xml::{QRead, IRead, Reader, Node, DAV_URN, CAL_URN}; use super::error::ParsingError; // ---- ROOT ELEMENTS --- @@ -17,34 +17,146 @@ impl QRead> for MkCalendar { } impl> QRead> for MkCalendarResponse { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "mkcalendar-response").await?; + let propstats = xml.collect().await?; + xml.close().await?; + Ok(MkCalendarResponse(propstats)) } } impl QRead> for CalendarQuery { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "calendar-query").await?; + let (mut selector, mut filter, mut timezone) = (None, None, None); + loop { + let mut dirty = false; + xml.maybe_read(&mut selector, &mut dirty).await?; + xml.maybe_read(&mut filter, &mut dirty).await?; + xml.maybe_read(&mut timezone, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + xml.close().await?; + + match filter { + Some(filter) => Ok(CalendarQuery { selector, filter, timezone }), + _ => Err(ParsingError::MissingChild), + } } } impl QRead> for CalendarMultiget { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "free-busy-query").await?; + let mut selector = None; + let mut href = Vec::new(); + + loop { + let mut dirty = false; + xml.maybe_read(&mut selector, &mut dirty).await?; + xml.maybe_push(&mut href, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.close().await?; + Ok(CalendarMultiget { selector, href }) } } impl QRead for FreeBusyQuery { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "calendar-multiple-get").await?; + let range = xml.find().await?; + xml.close().await?; + Ok(FreeBusyQuery(range)) } } // ---- EXTENSIONS --- impl QRead for Violation { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "resource-must-be-null").await?.is_some() { + xml.close().await?; + Ok(Self::ResourceMustBeNull) + } else if xml.maybe_open(DAV_URN, "need-privileges").await?.is_some() { + xml.close().await?; + Ok(Self::NeedPrivileges) + } else if xml.maybe_open(CAL_URN, "calendar-collection-location-ok").await?.is_some() { + xml.close().await?; + Ok(Self::CalendarCollectionLocationOk) + } else if xml.maybe_open(CAL_URN, "valid-calendar-data").await?.is_some() { + xml.close().await?; + Ok(Self::ValidCalendarData) + } else if xml.maybe_open(CAL_URN, "initialize-calendar-collection").await?.is_some() { + xml.close().await?; + Ok(Self::InitializeCalendarCollection) + } else if xml.maybe_open(CAL_URN, "supported-calendar-data").await?.is_some() { + xml.close().await?; + Ok(Self::SupportedCalendarData) + } else if xml.maybe_open(CAL_URN, "valid-calendar-object-resource").await?.is_some() { + xml.close().await?; + Ok(Self::ValidCalendarObjectResource) + } else if xml.maybe_open(CAL_URN, "supported-calendar-component").await?.is_some() { + xml.close().await?; + Ok(Self::SupportedCalendarComponent) + } else if xml.maybe_open(CAL_URN, "no-uid-conflict").await?.is_some() { + let href = xml.find().await?; + xml.close().await?; + Ok(Self::NoUidConflict(href)) + } else if xml.maybe_open(CAL_URN, "max-resource-size").await?.is_some() { + xml.close().await?; + Ok(Self::MaxResourceSize) + } else if xml.maybe_open(CAL_URN, "min-date-time").await?.is_some() { + xml.close().await?; + Ok(Self::MinDateTime) + } else if xml.maybe_open(CAL_URN, "max-date-time").await?.is_some() { + xml.close().await?; + Ok(Self::MaxDateTime) + } else if xml.maybe_open(CAL_URN, "max-instances").await?.is_some() { + xml.close().await?; + Ok(Self::MaxInstances) + } else if xml.maybe_open(CAL_URN, "max-attendees-per-instance").await?.is_some() { + xml.close().await?; + Ok(Self::MaxAttendeesPerInstance) + } else if xml.maybe_open(CAL_URN, "valid-filter").await?.is_some() { + xml.close().await?; + Ok(Self::ValidFilter) + } else if xml.maybe_open(CAL_URN, "supported-filter").await?.is_some() { + let (mut comp, mut prop, mut param) = (Vec::new(), Vec::new(), Vec::new()); + loop { + let mut dirty = false; + xml.maybe_push(&mut comp, &mut dirty).await?; + xml.maybe_push(&mut prop, &mut dirty).await?; + xml.maybe_push(&mut param, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + xml.close().await?; + Ok(Self::SupportedFilter { comp, prop, param }) + } else if xml.maybe_open(CAL_URN, "number-of-matches-within-limits").await?.is_some() { + xml.close().await?; + Ok(Self::NumberOfMatchesWithinLimits) + } else { + Err(ParsingError::Recoverable) + } } } @@ -289,110 +401,286 @@ impl QRead for PropKind { } impl QRead for RecurrenceModifier { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + match Expand::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(RecurrenceModifier::Expand), + } + LimitRecurrenceSet::qread(xml).await.map(RecurrenceModifier::LimitRecurrenceSet) } } impl QRead for Expand { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "expand").await?; + let (rstart, rend) = match (xml.prev_attr("start"), xml.prev_attr("end")) { + (Some(start), Some(end)) => (start, end), + _ => return Err(ParsingError::MissingAttribute), + }; + + let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + if start > end { + return Err(ParsingError::InvalidValue) + } + + xml.close().await?; + Ok(Expand(start, end)) } } impl QRead for LimitRecurrenceSet { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "limit-recurrence-set").await?; + let (rstart, rend) = match (xml.prev_attr("start"), xml.prev_attr("end")) { + (Some(start), Some(end)) => (start, end), + _ => return Err(ParsingError::MissingAttribute), + }; + + let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + if start > end { + return Err(ParsingError::InvalidValue) + } + + xml.close().await?; + Ok(LimitRecurrenceSet(start, end)) } } impl QRead for LimitFreebusySet { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "limit-freebusy-set").await?; + let (rstart, rend) = match (xml.prev_attr("start"), xml.prev_attr("end")) { + (Some(start), Some(end)) => (start, end), + _ => return Err(ParsingError::MissingAttribute), + }; + + let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + if start > end { + return Err(ParsingError::InvalidValue) + } + + xml.close().await?; + Ok(LimitFreebusySet(start, end)) } } impl QRead> for CalendarSelector { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + // allprop + if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { + xml.close().await?; + return Ok(Self::AllProp) + } + + // propname + if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { + xml.close().await?; + return Ok(Self::PropName) + } + + // prop + let (mut maybe_prop, mut dirty) = (None, false); + xml.maybe_read::>(&mut maybe_prop, &mut dirty).await?; + if let Some(prop) = maybe_prop { + return Ok(Self::Prop(prop)) + } + + Err(ParsingError::Recoverable) } } impl QRead for CompFilter { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "comp-filter").await?; + let name = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let additional_rules = Box::pin(xml.maybe_find()).await?; + xml.close().await?; + Ok(Self { name, additional_rules }) } } impl QRead for CompFilterRules { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { + xml.close().await?; + return Ok(Self::IsNotDefined) + } + CompFilterMatch::qread(xml).await.map(CompFilterRules::Matches) } } impl QRead for CompFilterMatch { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + let mut time_range = None; + let mut prop_filter = Vec::new(); + let mut comp_filter = Vec::new(); + + loop { + let mut dirty = false; + xml.maybe_read(&mut time_range, &mut dirty).await?; + xml.maybe_push(&mut prop_filter, &mut dirty).await?; + xml.maybe_push(&mut comp_filter, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + match (&time_range, &prop_filter[..], &comp_filter[..]) { + (None, [], []) => Err(ParsingError::Recoverable), + _ => Ok(CompFilterMatch { time_range, prop_filter, comp_filter }), + } } } impl QRead for PropFilter { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "prop-filter").await?; + let name = ComponentProperty(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let additional_rules = xml.maybe_find().await?; + xml.close().await?; + Ok(Self { name, additional_rules }) } } impl QRead for PropFilterRules { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { + xml.close().await?; + return Ok(Self::IsNotDefined) + } + PropFilterMatch::qread(xml).await.map(PropFilterRules::Match) } } impl QRead for PropFilterMatch { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + let mut time_range = None; + let mut time_or_text = None; + let mut param_filter = Vec::new(); + + loop { + let mut dirty = false; + xml.maybe_read(&mut time_range, &mut dirty).await?; + xml.maybe_read(&mut time_or_text, &mut dirty).await?; + xml.maybe_push(&mut param_filter, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + match (&time_range, &time_or_text, ¶m_filter[..]) { + (None, None, []) => Err(ParsingError::Recoverable), + _ => Ok(PropFilterMatch { time_range, time_or_text, param_filter }), + } + } +} + +impl QRead for ParamFilter { + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "param-filter").await?; + let name = PropertyParameter(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let additional_rules = xml.maybe_find().await?; + xml.close().await?; + Ok(Self { name, additional_rules }) } } impl QRead for TimeOrText { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + match TimeRange::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Self::Time), + } + TextMatch::qread(xml).await.map(Self::Text) } } impl QRead for TextMatch { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "text-match").await?; + let collation = xml.prev_attr("collation").map(Collation::new); + let negate_condition = xml.prev_attr("negate-condition").map(|v| v == "yes"); + let text = xml.tag_string().await?; + xml.close().await?; + Ok(Self { collation, negate_condition, text }) } } impl QRead for ParamFilterMatch { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { + xml.close().await?; + return Ok(Self::IsNotDefined) + } + TextMatch::qread(xml).await.map(Self::Match) } } impl QRead for TimeZone { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "timezone").await?; + let inner = xml.tag_string().await?; + xml.close().await?; + Ok(Self(inner)) } } impl QRead for Filter { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "timezone").await?; + let comp_filter = xml.find().await?; + xml.close().await?; + Ok(Self(comp_filter)) } } impl QRead for TimeRange { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "time-range").await?; + + let start = match xml.prev_attr("start") { + Some(r) => Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()), + _ => None, + }; + let end = match xml.prev_attr("end") { + Some(r) => Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()), + _ => None, + }; + + xml.close().await?; + + match (start, end) { + (Some(start), Some(end)) => { + if start > end { + return Err(ParsingError::InvalidValue) + } + Ok(TimeRange::FullRange(start, end)) + }, + (Some(start), None) => Ok(TimeRange::OnlyStart(start)), + (None, Some(end)) => Ok(TimeRange::OnlyEnd(end)), + (None, None) => Err(ParsingError::MissingAttribute), + } } } impl QRead for CalProp { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + xml.open(CAL_URN, "prop").await?; + let name = ComponentProperty(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let novalue = xml.prev_attr("novalue").map(|v| v == "yes"); + xml.close().await?; + Ok(Self { name, novalue }) } } diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 55778db..6ad9f41 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -536,7 +536,7 @@ impl QWrite for CompFilterMatch { impl QWrite for PropFilter { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut start = xml.create_cal_element("prop-filter"); - start.push_attribute(("name", self.name.as_str())); + start.push_attribute(("name", self.name.0.as_str())); match &self.additional_rules { None => xml.q.write_event_async(Event::Empty(start.clone())).await, diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index d04c67a..a6ea7ce 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -1208,7 +1208,7 @@ pub struct CompFilterMatch { /// ``` #[derive(Debug, PartialEq)] pub struct PropFilter { - pub name: Component, + pub name: ComponentProperty, // None = Option 1, Some() = Option 2, 3 & 4 pub additional_rules: Option, } -- cgit v1.2.3 From f50f6d68aa8c3bc363fb71bc75a2c1f78f14803f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 21:51:34 +0100 Subject: finalize decoder caldav impl --- aero-dav/src/caldecoder.rs | 55 ++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 51 insertions(+), 4 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 11e191f..1a096c1 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -230,14 +230,61 @@ impl QRead for Property { } impl QRead for PropertyRequest { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "calendar-description").await?.is_some() { + xml.close().await?; + return Ok(Self::CalendarDescription) + } + if xml.maybe_open(CAL_URN, "calendar-timezone").await?.is_some() { + xml.close().await?; + return Ok(Self::CalendarTimezone) + } + if xml.maybe_open(CAL_URN, "supported-calendar-component-set").await?.is_some() { + xml.close().await?; + return Ok(Self::SupportedCalendarComponentSet) + } + if xml.maybe_open(CAL_URN, "supported-calendar-data").await?.is_some() { + xml.close().await?; + return Ok(Self::SupportedCalendarData) + } + if xml.maybe_open(CAL_URN, "max-resource-size").await?.is_some() { + xml.close().await?; + return Ok(Self::MaxResourceSize) + } + if xml.maybe_open(CAL_URN, "min-date-time").await?.is_some() { + xml.close().await?; + return Ok(Self::MinDateTime) + } + if xml.maybe_open(CAL_URN, "max-date-time").await?.is_some() { + xml.close().await?; + return Ok(Self::MaxDateTime) + } + if xml.maybe_open(CAL_URN, "max-instances").await?.is_some() { + xml.close().await?; + return Ok(Self::MaxInstances) + } + if xml.maybe_open(CAL_URN, "max-attendees-per-instance").await?.is_some() { + xml.close().await?; + return Ok(Self::MaxAttendeesPerInstance) + } + if xml.maybe_open(CAL_URN, "supported-collation-set").await?.is_some() { + xml.close().await?; + return Ok(Self::SupportedCollationSet) + } + let mut dirty = false; + let mut m_cdr = None; + xml.maybe_read(&mut m_cdr, &mut dirty).await?; + m_cdr.ok_or(ParsingError::Recoverable).map(Self::CalendarData) } } impl QRead for ResourceType { - async fn qread(_xml: &mut Reader) -> Result { - unreachable!(); + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "calendar").await?.is_some() { + xml.close().await?; + return Ok(Self::Calendar) + } + Err(ParsingError::Recoverable) } } -- cgit v1.2.3 From 6d1f538091ca9445cdc0d72b051fa5090b6ec68a Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 8 Mar 2024 22:03:46 +0100 Subject: Improve my XML parser --- aero-dav/src/caldecoder.rs | 40 ++++++++++++++++++++++++++++++++++++++++ aero-dav/src/xml.rs | 14 +++++++++++--- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 1a096c1..239b005 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -393,7 +393,9 @@ impl QRead for CompInner { impl QRead for CompSupport { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "comp").await?; + println!("before"); let inner = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + println!("after"); xml.close().await?; Ok(Self(inner)) } @@ -763,4 +765,42 @@ mod tests { let got = deserialize::>(src).await; assert_eq!(got, expected) } + + #[tokio::test] + async fn rfc_mkcalendar() { + let expected = MkCalendar(dav::Set(dav::PropValue(vec![ + dav::Property::DisplayName("Lisa's Events".into()), + dav::Property::Extension(Property::CalendarDescription { + lang: Some("en".into()), + text: "Calendar restricted to events.".into(), + }), + dav::Property::Extension(Property::SupportedCalendarComponentSet(vec![ + CompSupport(Component::VEvent) + ])), + dav::Property::Extension(Property::CalendarTimezone("BEGIN:VCALENDAR\nPRODID:-//Example Corp.//CalDAV Client//EN\nVERSION:2.0\nEND:VCALENDAR".into())), + ]))); + + let src = r#" + + + + + Lisa's Events + Calendar restricted to events. + + + + + + + "#; + + let got = deserialize::>(src).await; + assert_eq!(got, expected) + } } diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index e021543..347a123 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -236,12 +236,20 @@ impl Reader { pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { //println!("try open tag {:?}", key); let evt = match self.peek() { - Event::Empty(_) if self.is_tag(ns, key) => self.cur.clone(), + Event::Empty(_) if self.is_tag(ns, key) => { + // hack to make `prev_attr` works + // here we duplicate the current tag + // as in other words, we virtually moved one token + // which is useful for prev_attr and any logic based on + // self.prev + self.open() on empty nodes + self.prev = self.cur.clone(); + self.cur.clone() + }, Event::Start(_) if self.is_tag(ns, key) => self.next().await?, _ => return Err(ParsingError::Recoverable), }; - //println!("open tag {:?}", evt); + println!("open tag {:?}", evt); self.parents.push(evt.clone()); Ok(evt) } @@ -266,7 +274,7 @@ impl Reader { // find stop tag pub async fn close(&mut self) -> Result, ParsingError> { - //println!("close tag {:?}", self.parents.last()); + println!("close tag {:?}", self.parents.last()); // Handle the empty case if !self.parent_has_child() { -- cgit v1.2.3 From 442433d70bf13b1641ec76077d9955f5d63ee965 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 12 Mar 2024 10:18:13 +0100 Subject: fix parsing --- aero-dav/src/caldecoder.rs | 181 +++++++++++++++++++++++++++++++++++++-------- aero-dav/src/calencoder.rs | 61 +++++++-------- aero-dav/src/caltypes.rs | 9 +-- aero-dav/src/xml.rs | 4 + 4 files changed, 184 insertions(+), 71 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 239b005..2e833a1 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -357,25 +357,41 @@ impl QRead for CalendarDataEmpty { impl QRead for Comp { async fn qread(xml: &mut Reader) -> Result { - xml.open(CAL_URN, "comp").await?; + let (mut prop_kind, mut comp_kind) = (None, None); + + let bs = xml.open(CAL_URN, "comp").await?; let name = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); - let additional_rules = Box::pin(xml.maybe_find()).await?; - xml.close().await?; - Ok(Self { name, additional_rules }) - } -} -impl QRead for CompInner { - async fn qread(xml: &mut Reader) -> Result { - let (mut prop_kind, mut comp_kind) = (None, None); + // Return early if it's an empty tag + if matches!(bs, Event::Empty(_)) { + xml.close().await?; + return Ok(Self { name, prop_kind, comp_kind }) + } loop { let mut dirty = false; - - xml.maybe_read(&mut prop_kind, &mut dirty).await?; - xml.maybe_read(&mut comp_kind, &mut dirty).await?; + let (mut tmp_prop_kind, mut tmp_comp_kind): (Option, Option) = (None, None); + + xml.maybe_read(&mut tmp_prop_kind, &mut dirty).await?; + Box::pin(xml.maybe_read(&mut tmp_comp_kind, &mut dirty)).await?; + + //@FIXME hack + // Merge + match (tmp_prop_kind, &mut prop_kind) { + (Some(PropKind::Prop(mut a)), Some(PropKind::Prop(ref mut b))) => b.append(&mut a), + (Some(PropKind::AllProp), v) => *v = Some(PropKind::AllProp), + (Some(x), b) => *b = Some(x), + (None, _) => (), + }; + match (tmp_comp_kind, &mut comp_kind) { + (Some(CompKind::Comp(mut a)), Some(CompKind::Comp(ref mut b))) => b.append(&mut a), + (Some(CompKind::AllComp), v) => *v = Some(CompKind::AllComp), + (Some(a), b) => *b = Some(a), + (None, _) => (), + }; + - if !dirty { + if !dirty { match xml.peek() { Event::End(_) => break, _ => xml.skip().await?, @@ -383,19 +399,15 @@ impl QRead for CompInner { } }; - match (prop_kind, comp_kind) { - (Some(prop_kind), Some(comp_kind)) => Ok(Self { prop_kind, comp_kind }), - _ => Err(ParsingError::MissingChild), - } + xml.close().await?; + Ok(Self { name, prop_kind, comp_kind }) } } impl QRead for CompSupport { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "comp").await?; - println!("before"); let inner = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); - println!("after"); xml.close().await?; Ok(Self(inner)) } @@ -415,13 +427,13 @@ impl QRead for CompKind { xml.maybe_push(&mut comp, &mut dirty).await?; if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => xml.skip().await?, - }; + break } } - Ok(CompKind::Comp(comp)) + match &comp[..] { + [] => Err(ParsingError::Recoverable), + _ => Ok(CompKind::Comp(comp)), + } } } @@ -439,13 +451,14 @@ impl QRead for PropKind { xml.maybe_push(&mut prop, &mut dirty).await?; if !dirty { - match xml.peek() { - Event::End(_) => break, - _ => xml.skip().await?, - }; + break } } - Ok(PropKind::Prop(prop)) + + match &prop[..] { + [] => Err(ParsingError::Recoverable), + _ => Ok(PropKind::Prop(prop)), + } } } @@ -687,7 +700,7 @@ impl QRead for TimeZone { impl QRead for Filter { async fn qread(xml: &mut Reader) -> Result { - xml.open(CAL_URN, "timezone").await?; + xml.open(CAL_URN, "filter").await?; let comp_filter = xml.find().await?; xml.close().await?; Ok(Self(comp_filter)) @@ -736,7 +749,7 @@ impl QRead for CalProp { #[cfg(test)] mod tests { use super::*; - //use chrono::{FixedOffset, TimeZone}; + use chrono::{Utc, TimeZone}; use crate::realization::Calendar; //use quick_reader::NsReader; @@ -803,4 +816,110 @@ END:VCALENDAR]]> let got = deserialize::>(src).await; assert_eq!(got, expected) } + + #[tokio::test] + async fn rfc_calendar_query() { + let expected = CalendarQuery { + selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { + mime: None, + comp: Some(Comp { + name: Component::VCalendar, + prop_kind: Some(PropKind::Prop(vec![ + CalProp { + name: ComponentProperty("VERSION".into()), + novalue: None, + } + ])), + comp_kind: Some(CompKind::Comp(vec![ + Comp { + name: Component::VEvent, + prop_kind: Some(PropKind::Prop(vec![ + CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, + CalProp { name: ComponentProperty("UID".into()), novalue: None }, + CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, + CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, + CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, + CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, + ])), + comp_kind: None, + }, + Comp { + name: Component::VTimeZone, + prop_kind: None, + comp_kind: None, + } + ])), + }), + recurrence: None, + limit_freebusy_set: None, + })), + ]))), + filter: Filter(CompFilter { + name: Component::VCalendar, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + prop_filter: vec![], + comp_filter: vec![ + CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + prop_filter: vec![], + comp_filter: vec![], + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006, 1, 4, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + )), + })), + }, + ], + time_range: None, + })), + }), + timezone: None, + }; + + let src = r#" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +"#; + + let got = deserialize::>(src).await; + assert_eq!(got, expected) + } } diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 6ad9f41..fff4d47 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -353,23 +353,21 @@ impl QWrite for CalendarDataEmpty { } } -impl QWrite for CompInner { - async fn qwrite(&self, _xml: &mut Writer) -> Result<(), QError> { - unreachable!(); - } -} - impl QWrite for Comp { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut start = xml.create_cal_element("comp"); start.push_attribute(("name", self.name.as_str())); - match &self.additional_rules { - None => xml.q.write_event_async(Event::Empty(start)).await, - Some(rules) => { + match (&self.prop_kind, &self.comp_kind) { + (None, None) => xml.q.write_event_async(Event::Empty(start)).await, + _ => { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - rules.prop_kind.qwrite(xml).await?; - rules.comp_kind.qwrite(xml).await?; + if let Some(prop_kind) = &self.prop_kind { + prop_kind.qwrite(xml).await?; + } + if let Some(comp_kind) = &self.comp_kind { + comp_kind.qwrite(xml).await?; + } xml.q.write_event_async(Event::End(end)).await }, } @@ -721,39 +719,36 @@ mod tests { mime: None, comp: Some(Comp { name: Component::VCalendar, - additional_rules: Some(CompInner { - prop_kind: PropKind::Prop(vec![ + prop_kind: Some(PropKind::Prop(vec![ CalProp { name: ComponentProperty("VERSION".into()), novalue: None, } - ]), - comp_kind: CompKind::Comp(vec![ + ])), + comp_kind: Some(CompKind::Comp(vec![ Comp { name: Component::VEvent, - additional_rules: Some(CompInner { - prop_kind: PropKind::Prop(vec![ - CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, - CalProp { name: ComponentProperty("UID".into()), novalue: None }, - CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, - CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, - CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, - CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, - ]), - comp_kind: CompKind::Comp(vec![]), - }), + prop_kind: Some(PropKind::Prop(vec![ + CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, + CalProp { name: ComponentProperty("UID".into()), novalue: None }, + CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, + CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, + CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, + CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, + CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, + CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, + ])), + comp_kind: None, }, Comp { name: Component::VTimeZone, - additional_rules: None, + prop_kind: None, + comp_kind: None, } - ]), + ])), }), - }), recurrence: None, limit_freebusy_set: None, })), diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index a6ea7ce..628ec4b 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -855,13 +855,8 @@ pub struct CalendarDataSupport { #[derive(Debug, PartialEq)] pub struct Comp { pub name: Component, - pub additional_rules: Option, -} - -#[derive(Debug, PartialEq)] -pub struct CompInner { - pub prop_kind: PropKind, - pub comp_kind: CompKind, + pub prop_kind: Option, + pub comp_kind: Option, } /// For SupportedCalendarComponentSet diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 347a123..2f3b7a6 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -128,6 +128,10 @@ impl Reader { &self.cur } + pub fn previous(&self) -> &Event<'static> { + &self.prev + } + // NEW API pub async fn tag_string(&mut self) -> Result { self.ensure_parent_has_child()?; -- cgit v1.2.3 From 98adb1e20d90a1538b474659a96450d4c7b264c5 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 13 Mar 2024 09:11:52 +0100 Subject: fix caldecoder + xml --- aero-dav/src/caldecoder.rs | 272 +++++++++++++++++++++++++++++++++++++++++---- aero-dav/src/xml.rs | 8 +- 2 files changed, 253 insertions(+), 27 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 2e833a1..d3c68f6 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -323,9 +323,13 @@ impl QRead for CalendarDataRequest { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "calendar-data").await?; let mime = CalendarDataSupport::qread(xml).await.ok(); - let (mut comp, mut recurrence, mut limit_freebusy_set) = (None, None, None); + if !xml.parent_has_child() { + return Ok(Self { mime, comp, recurrence, limit_freebusy_set }) + } + + loop { let mut dirty = false; xml.maybe_read(&mut comp, &mut dirty).await?; @@ -565,16 +569,6 @@ impl QRead for CompFilter { } impl QRead for CompFilterRules { - async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { - xml.close().await?; - return Ok(Self::IsNotDefined) - } - CompFilterMatch::qread(xml).await.map(CompFilterRules::Matches) - } -} - -impl QRead for CompFilterMatch { async fn qread(xml: &mut Reader) -> Result { let mut time_range = None; let mut prop_filter = Vec::new(); @@ -582,6 +576,12 @@ impl QRead for CompFilterMatch { loop { let mut dirty = false; + + if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { + xml.close().await?; + return Ok(Self::IsNotDefined) + } + xml.maybe_read(&mut time_range, &mut dirty).await?; xml.maybe_push(&mut prop_filter, &mut dirty).await?; xml.maybe_push(&mut comp_filter, &mut dirty).await?; @@ -596,11 +596,17 @@ impl QRead for CompFilterMatch { match (&time_range, &prop_filter[..], &comp_filter[..]) { (None, [], []) => Err(ParsingError::Recoverable), - _ => Ok(CompFilterMatch { time_range, prop_filter, comp_filter }), + _ => Ok(Self::Matches(CompFilterMatch { time_range, prop_filter, comp_filter })), } } } +impl QRead for CompFilterMatch { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + impl QRead for PropFilter { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "prop-filter").await?; @@ -612,16 +618,6 @@ impl QRead for PropFilter { } impl QRead for PropFilterRules { - async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { - xml.close().await?; - return Ok(Self::IsNotDefined) - } - PropFilterMatch::qread(xml).await.map(PropFilterRules::Match) - } -} - -impl QRead for PropFilterMatch { async fn qread(xml: &mut Reader) -> Result { let mut time_range = None; let mut time_or_text = None; @@ -629,6 +625,12 @@ impl QRead for PropFilterMatch { loop { let mut dirty = false; + + if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { + xml.close().await?; + return Ok(Self::IsNotDefined) + } + xml.maybe_read(&mut time_range, &mut dirty).await?; xml.maybe_read(&mut time_or_text, &mut dirty).await?; xml.maybe_push(&mut param_filter, &mut dirty).await?; @@ -643,11 +645,17 @@ impl QRead for PropFilterMatch { match (&time_range, &time_or_text, ¶m_filter[..]) { (None, None, []) => Err(ParsingError::Recoverable), - _ => Ok(PropFilterMatch { time_range, time_or_text, param_filter }), + _ => Ok(PropFilterRules::Match(PropFilterMatch { time_range, time_or_text, param_filter })), } } } +impl QRead for PropFilterMatch { + async fn qread(_xml: &mut Reader) -> Result { + unreachable!(); + } +} + impl QRead for ParamFilter { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "param-filter").await?; @@ -922,4 +930,222 @@ END:VCALENDAR]]> let got = deserialize::>(src).await; assert_eq!(got, expected) } + + #[tokio::test] + async fn rfc_calendar_query_res() { + let expected = dav::Multistatus::> { + responses: vec![ + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), + vec![ + dav::PropStat { + prop: dav::PropValue(vec![ + dav::Property::GetEtag("\"fffff-abcd2\"".into()), + dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + mime: None, + payload: "BEGIN:VCALENDAR".into(), + })), + ]), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }, + ], + ), + error: None, + location: None, + responsedescription: None, + }, + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), + vec![ + dav::PropStat { + prop: dav::PropValue(vec![ + dav::Property::GetEtag("\"fffff-abcd3\"".into()), + dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + mime: None, + payload: "BEGIN:VCALENDAR".into(), + })), + ]), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }, + ], + ), + error: None, + location: None, + responsedescription: None, + }, + ], + responsedescription: None, + }; + + let src = r#" + + http://cal.example.com/bernard/work/abcd2.ics + + + "fffff-abcd2" + BEGIN:VCALENDAR + + HTTP/1.1 200 OK + + + + http://cal.example.com/bernard/work/abcd3.ics + + + "fffff-abcd3" + BEGIN:VCALENDAR + + HTTP/1.1 200 OK + + + +"#; + + let got = deserialize::>>(src).await; + assert_eq!(got, expected) + } + + #[tokio::test] + async fn rfc_recurring_evt() { + let expected = CalendarQuery:: { + selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest{ + mime: None, + comp: None, + recurrence: Some(RecurrenceModifier::LimitRecurrenceSet(LimitRecurrenceSet ( + Utc.with_ymd_and_hms(2006, 1, 3, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + ))), + limit_freebusy_set: None, + })), + ]))), + filter: Filter(CompFilter { + name: Component::VCalendar, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + prop_filter: vec![], + comp_filter: vec![ + CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + prop_filter: vec![], + comp_filter: vec![], + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006, 1, 3, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + )), + })), + }, + ], + time_range: None, + })), + }), + timezone: None, + }; + + let src = r#" + + + + + + + + + + + + + + + "#; + + let got = deserialize::>(src).await; + assert_eq!(got, expected) + } + + #[tokio::test] + async fn rfc_pending_todos() { + let expected = CalendarQuery:: { + selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { + mime: None, + comp: None, + recurrence: None, + limit_freebusy_set: None, + })) + ]))), + filter: Filter(CompFilter { + name: Component::VCalendar, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: None, + prop_filter: vec![], + comp_filter: vec![ + CompFilter { + name: Component::VTodo, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: None, + comp_filter: vec![], + prop_filter: vec![ + PropFilter { + name: ComponentProperty("COMPLETED".into()), + additional_rules: Some(PropFilterRules::IsNotDefined), + }, + PropFilter { + name: ComponentProperty("STATUS".into()), + additional_rules: Some(PropFilterRules::Match(PropFilterMatch { + time_range: None, + param_filter: vec![], + time_or_text: Some(TimeOrText::Text(TextMatch { + collation: None, + negate_condition: Some(true), + text: "CANCELLED".into(), + })), + })), + }, + ], + })), + } + ], + })), + }), + timezone: None, + }; + + let src = r#" + + + + + + + + + + + + + CANCELLED + + + + + "#; + + + let got = deserialize::>(src).await; + assert_eq!(got, expected) + + } } diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 2f3b7a6..1f8a6b1 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -113,7 +113,7 @@ impl Reader { } } - fn parent_has_child(&self) -> bool { + pub fn parent_has_child(&self) -> bool { matches!(self.parents.last(), Some(Event::Start(_)) | None) } @@ -238,7 +238,7 @@ impl Reader { } pub async fn open(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { - //println!("try open tag {:?}", key); + //println!("try open tag {:?}, on {:?}", key, self.peek()); let evt = match self.peek() { Event::Empty(_) if self.is_tag(ns, key) => { // hack to make `prev_attr` works @@ -253,7 +253,7 @@ impl Reader { _ => return Err(ParsingError::Recoverable), }; - println!("open tag {:?}", evt); + //println!("open tag {:?}", evt); self.parents.push(evt.clone()); Ok(evt) } @@ -278,7 +278,7 @@ impl Reader { // find stop tag pub async fn close(&mut self) -> Result, ParsingError> { - println!("close tag {:?}", self.parents.last()); + //println!("close tag {:?}", self.parents.last()); // Handle the empty case if !self.parent_has_child() { -- cgit v1.2.3 From 3abdafb0dbbc9290329e4974e821933426b32f91 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 13 Mar 2024 15:45:36 +0100 Subject: TLS + Fix auth --- aero-proto/src/dav.rs | 90 +++++++++++++++++++++++++++++++++++++++------- aero-proto/src/imap/mod.rs | 81 ----------------------------------------- aero-user/src/config.rs | 8 +++++ aerogramme/src/main.rs | 1 + aerogramme/src/server.rs | 13 +++++++ 5 files changed, 100 insertions(+), 93 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 2852d34..42fde2c 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -1,4 +1,5 @@ use std::net::SocketAddr; +use std::sync::Arc; use anyhow::{anyhow, Result}; use base64::Engine; @@ -10,23 +11,55 @@ use http_body_util::Full; use futures::stream::{FuturesUnordered, StreamExt}; use tokio::net::TcpListener; use tokio::sync::watch; +use tokio_rustls::TlsAcceptor; +use tokio::net::TcpStream; +use hyper::rt::{Read, Write}; +use tokio::io::{AsyncRead, AsyncWrite}; +use rustls_pemfile::{certs, private_key}; -use aero_user::config::DavUnsecureConfig; +use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; use aero_collections::user::User; pub struct Server { bind_addr: SocketAddr, login_provider: ArcLoginProvider, + tls: Option, } pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { Server { bind_addr: config.bind_addr, login_provider: login, + tls: None, } } +pub fn new(config: DavConfig, login: ArcLoginProvider) -> Result { + let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( + config.certs, + )?)) + .collect::, _>>()?; + let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( + config.key, + )?))? + .unwrap(); + + let tls_config = rustls::ServerConfig::builder() + .with_no_client_auth() + .with_single_cert(loaded_certs, loaded_key)?; + let acceptor = TlsAcceptor::from(Arc::new(tls_config)); + + Ok(Server { + bind_addr: config.bind_addr, + login_provider: login, + tls: Some(acceptor), + }) +} + +trait Stream: Read + Write + Send + Unpin {} +impl Stream for TokioIo {} + impl Server { pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { let tcp = TcpListener::bind(self.bind_addr).await?; @@ -47,14 +80,24 @@ impl Server { _ = must_exit.changed() => continue, }; tracing::info!("Accepted connection from {}", remote_addr); - let stream = TokioIo::new(socket); + let stream = match self.build_stream(socket).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "TLS acceptor failed"); + continue + } + }; + let login = self.login_provider.clone(); let conn = tokio::spawn(async move { //@FIXME should create a generic "public web" server on which "routers" could be //abitrarily bound //@FIXME replace with a handler supporting http2 and TLS + + match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { let login = login.clone(); + tracing::info!("{:?} {:?}", req.method(), req.uri()); async move { auth(login, req).await } @@ -72,6 +115,16 @@ impl Server { Ok(()) } + + async fn build_stream(&self, socket: TcpStream) -> Result> { + match self.tls.clone() { + Some(acceptor) => { + let stream = acceptor.accept(socket).await?; + Ok(Box::new(TokioIo::new(stream))) + } + None => Ok(Box::new(TokioIo::new(socket))), + } + } } //@FIXME We should not support only BasicAuth @@ -80,18 +133,26 @@ async fn auth( req: Request, ) -> Result>> { - let auth_val = match req.headers().get("Authorization") { + tracing::info!("headers: {:?}", req.headers()); + let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { Some(hv) => hv.to_str()?, - None => return Ok(Response::builder() - .status(401) - .body(Full::new(Bytes::from("Missing Authorization field")))?), + None => { + tracing::info!("Missing authorization field"); + return Ok(Response::builder() + .status(401) + .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") + .body(Full::new(Bytes::from("Missing Authorization field")))?) + }, }; let b64_creds_maybe_padded = match auth_val.split_once(" ") { Some(("Basic", b64)) => b64, - _ => return Ok(Response::builder() - .status(400) - .body(Full::new(Bytes::from("Unsupported Authorization field")))?), + _ => { + tracing::info!("Unsupported authorization field"); + return Ok(Response::builder() + .status(400) + .body(Full::new(Bytes::from("Unsupported Authorization field")))?) + }, }; // base64urlencoded may have trailing equals, base64urlsafe has not @@ -110,9 +171,13 @@ async fn auth( // Call login provider let creds = match login.login(username, password).await { Ok(c) => c, - Err(_) => return Ok(Response::builder() - .status(401) - .body(Full::new(Bytes::from("Wrong credentials")))?), + Err(_) => { + tracing::info!(user=username, "Wrong credentials"); + return Ok(Response::builder() + .status(401) + .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") + .body(Full::new(Bytes::from("Wrong credentials")))?) + }, }; // Build a user @@ -124,6 +189,7 @@ async fn auth( async fn router(user: std::sync::Arc, req: Request) -> Result>> { let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); + tracing::info!("router"); match path_segments.as_slice() { [] => tracing::info!("root"), [ username, ..] if *username != user.username => return Ok(Response::builder() diff --git a/aero-proto/src/imap/mod.rs b/aero-proto/src/imap/mod.rs index ae3b58f..7183a78 100644 --- a/aero-proto/src/imap/mod.rs +++ b/aero-proto/src/imap/mod.rs @@ -333,85 +333,4 @@ impl NetLoop { }; } } - - /* - async fn idle_mode(&mut self, mut buff: BytesMut, stop: Arc) -> Result { - // Flush send - loop { - tracing::trace!("flush server send"); - match self.server.progress_send().await? { - Some(..) => continue, - None => break, - } - } - - tokio::select! { - // Receiving IDLE event from background - maybe_msg = self.resp_rx.recv() => match maybe_msg { - // Session decided idle is terminated - Some(ResponseOrIdle::Response(response)) => { - tracing::trace!("server imap session said idle is done, sending response done, switching to interactive"); - for body_elem in response.body.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.server.enqueue_status(response.completion); - return Ok(LoopMode::Interactive) - }, - // Session has some information for user - Some(ResponseOrIdle::IdleEvent(elems)) => { - tracing::trace!("server imap session has some change to communicate to the client"); - for body_elem in elems.into_iter() { - let _handle = match body_elem { - Body::Data(d) => self.server.enqueue_data(d), - Body::Status(s) => self.server.enqueue_status(s), - }; - } - self.cmd_tx.try_send(Request::Idle)?; - return Ok(LoopMode::Idle(buff, stop)) - }, - - // Session crashed - None => { - self.server.enqueue_status(Status::bye(None, "Internal session exited").unwrap()); - tracing::error!("session task exited for {:?}, quitting", self.ctx.addr); - return Ok(LoopMode::Interactive) - }, - - // Session can't start idling while already idling, it's a logic error! - Some(ResponseOrIdle::StartIdle(..)) => bail!("can't start idling while already idling!"), - }, - - // User is trying to interact with us - read_client_result = self.server.stream.read(&mut buff) => { - let _bytes_read = read_client_result?; - use imap_codec::decode::Decoder; - let codec = imap_codec::IdleDoneCodec::new(); - tracing::trace!("client sent some data for the server IMAP session"); - match codec.decode(&buff) { - Ok(([], imap_codec::imap_types::extensions::idle::IdleDone)) => { - // Session will be informed that it must stop idle - // It will generate the "done" message and change the loop mode - tracing::trace!("client sent DONE and want to stop IDLE"); - stop.notify_one() - }, - Err(_) => { - tracing::trace!("Unable to decode DONE, maybe not enough data were sent?"); - }, - _ => bail!("Client sent data after terminating the continuation without waiting for the server. This is an unsupported behavior and bug in Aerogramme, quitting."), - }; - - return Ok(LoopMode::Idle(buff, stop)) - }, - - // When receiving a CTRL+C - _ = self.ctx.must_exit.changed() => { - tracing::trace!("CTRL+C sent, aborting IDLE for this session"); - self.server.enqueue_status(Status::bye(None, "Server is being shutdown").unwrap()); - return Ok(LoopMode::Interactive) - }, - }; - }*/ } diff --git a/aero-user/src/config.rs b/aero-user/src/config.rs index 7de2eac..44b1239 100644 --- a/aero-user/src/config.rs +++ b/aero-user/src/config.rs @@ -23,6 +23,7 @@ pub struct ProviderConfig { pub imap_unsecure: Option, pub lmtp: Option, pub auth: Option, + pub dav: Option, pub dav_unsecure: Option, pub users: UserManagement, } @@ -58,6 +59,13 @@ pub struct DavUnsecureConfig { pub bind_addr: SocketAddr, } +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct DavConfig { + pub bind_addr: SocketAddr, + pub certs: PathBuf, + pub key: PathBuf, +} + #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ImapUnsecureConfig { pub bind_addr: SocketAddr, diff --git a/aerogramme/src/main.rs b/aerogramme/src/main.rs index 4251520..624e8e2 100644 --- a/aerogramme/src/main.rs +++ b/aerogramme/src/main.rs @@ -171,6 +171,7 @@ async fn main() -> Result<()> { AnyConfig::Provider(ProviderConfig { pid: None, imap: None, + dav: None, imap_unsecure: Some(ImapUnsecureConfig { bind_addr: SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 1143), }), diff --git a/aerogramme/src/server.rs b/aerogramme/src/server.rs index e302db3..e57cd72 100644 --- a/aerogramme/src/server.rs +++ b/aerogramme/src/server.rs @@ -21,6 +21,7 @@ pub struct Server { imap_server: Option, auth_server: Option, dav_unsecure_server: Option, + dav_server: Option, pid_file: Option, } @@ -37,6 +38,7 @@ impl Server { imap_server: None, auth_server: None, dav_unsecure_server: None, + dav_server: None, pid_file: config.pid, }) } @@ -63,12 +65,17 @@ impl Server { let dav_unsecure_server = config .dav_unsecure .map(|dav_config| dav::new_unsecure(dav_config, login.clone())); + let dav_server = config + .dav + .map(|dav_config| dav::new(dav_config, login.clone())) + .transpose()?; Ok(Self { lmtp_server, imap_unsecure_server, imap_server, dav_unsecure_server, + dav_server, auth_server, pid_file: config.pid, }) @@ -125,6 +132,12 @@ impl Server { None => Ok(()), Some(s) => s.run(exit_signal.clone()).await, } + }, + async { + match self.dav_server { + None => Ok(()), + Some(s) => s.run(exit_signal.clone()).await, + } } )?; -- cgit v1.2.3 From 902d33c4344f4e38c021ff20b2197ee1dfbd347f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 16 Mar 2024 16:48:46 +0100 Subject: bind streaming codec to hyper 1.x --- Cargo.lock | 12 ++-- Cargo.toml | 7 +- aero-dav/src/calencoder.rs | 18 ++++- aero-dav/src/error.rs | 19 ++++++ aero-dav/src/realization.rs | 4 +- aero-dav/src/xml.rs | 6 +- aero-proto/Cargo.toml | 2 + aero-proto/src/dav.rs | 157 +++++++++++++++++++++++++++++++++++++------- 8 files changed, 186 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 77dda64..c6228af 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -91,6 +91,7 @@ dependencies = [ "hyper-util", "imap-codec", "imap-flow", + "quick-xml", "rustls 0.22.2", "rustls-pemfile 2.1.1", "smtp-message", @@ -98,6 +99,7 @@ dependencies = [ "thiserror", "tokio", "tokio-rustls 0.25.0", + "tokio-stream", "tokio-util", "tracing", ] @@ -1824,12 +1826,12 @@ dependencies = [ [[package]] name = "http-body-util" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" dependencies = [ "bytes", - "futures-util", + "futures-core", "http 1.1.0", "http-body 1.0.0", "pin-project-lite 0.2.13", @@ -3381,9 +3383,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.35.1" +version = "1.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" dependencies = [ "backtrace", "bytes", diff --git a/Cargo.toml b/Cargo.toml index a18c41c..d4bc543 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,8 +24,9 @@ aero-proto = { version = "0.3.0", path = "aero-proto" } aerogramme = { version = "0.3.0", path = "aerogramme" } # async runtime -tokio = { version = "1.18", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } +tokio = { version = "1.36", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } tokio-util = { version = "0.7", features = [ "compat" ] } +tokio-stream = { version = "0.1" } futures = "0.3" # debug @@ -57,8 +58,8 @@ imap-codec = { version = "2.0.0", features = ["bounded-static", "ext_condstore_q imap-flow = { git = "https://github.com/duesee/imap-flow.git", branch = "main" } # http & web -http = "1.0" -http-body-util = "0.1" +http = "1.1" +http-body-util = "0.1.1" hyper = "1.2" hyper-rustls = { version = "0.26", features = ["http2"] } hyper-util = { version = "0.1", features = ["full"] } diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index fff4d47..e00876d 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -393,7 +393,14 @@ impl QWrite for CompKind { for comp in many_comp.iter() { // Required: recursion in an async fn requires boxing // rustc --explain E0733 - Box::pin(comp.qwrite(xml)).await?; + // Cycle detected when computing type of ... + // For more information about this error, try `rustc --explain E0391`. + // https://github.com/rust-lang/rust/issues/78649 + #[inline(always)] + fn recurse<'a>(comp: &'a Comp, xml: &'a mut Writer) -> futures::future::BoxFuture<'a, Result<(), QError>> { + Box::pin(comp.qwrite(xml)) + } + recurse(comp, xml).await?; } Ok(()) } @@ -525,7 +532,14 @@ impl QWrite for CompFilterMatch { for comp_item in self.comp_filter.iter() { // Required: recursion in an async fn requires boxing // rustc --explain E0733 - Box::pin(comp_item.qwrite(xml)).await?; + // Cycle detected when computing type of ... + // For more information about this error, try `rustc --explain E0391`. + // https://github.com/rust-lang/rust/issues/78649 + #[inline(always)] + fn recurse<'a>(comp: &'a CompFilter, xml: &'a mut Writer) -> futures::future::BoxFuture<'a, Result<(), QError>> { + Box::pin(comp.qwrite(xml)) + } + recurse(comp_item, xml).await?; } Ok(()) } diff --git a/aero-dav/src/error.rs b/aero-dav/src/error.rs index f1b5cba..570f779 100644 --- a/aero-dav/src/error.rs +++ b/aero-dav/src/error.rs @@ -15,6 +15,25 @@ pub enum ParsingError { Int(std::num::ParseIntError), Eof } +impl std::fmt::Display for ParsingError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Recoverable => write!(f, "Recoverable"), + Self::MissingChild => write!(f, "Missing child"), + Self::MissingAttribute => write!(f, "Missing attribute"), + Self::NamespacePrefixAlreadyUsed => write!(f, "Namespace prefix already used"), + Self::WrongToken => write!(f, "Wrong token"), + Self::TagNotFound => write!(f, "Tag not found"), + Self::InvalidValue => write!(f, "Invalid value"), + Self::Utf8Error(_) => write!(f, "Utf8 Error"), + Self::QuickXml(_) => write!(f, "Quick XML error"), + Self::Chrono(_) => write!(f, "Chrono error"), + Self::Int(_) => write!(f, "Number parsing error"), + Self::Eof => write!(f, "Found EOF while expecting data"), + } + } +} +impl std::error::Error for ParsingError {} impl From for ParsingError { fn from(value: AttrError) -> Self { Self::QuickXml(value.into()) diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 5781637..a7bbb16 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -11,8 +11,8 @@ impl xml::QRead for Disabled { } } impl xml::QWrite for Disabled { - async fn qwrite(&self, _xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { - unreachable!(); + fn qwrite(&self, _xml: &mut xml::Writer) -> impl futures::Future> + Send { + async { unreachable!(); } } } diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 1f8a6b1..e078c6f 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -12,19 +12,19 @@ pub const CAL_URN: &[u8] = b"urn:ietf:params:xml:ns:caldav"; pub const CARD_URN: &[u8] = b"urn:ietf:params:xml:ns:carddav"; // Async traits -pub trait IWrite = AsyncWrite + Unpin; +pub trait IWrite = AsyncWrite + Unpin + Send; pub trait IRead = AsyncBufRead + Unpin; // Serialization/Deserialization traits pub trait QWrite { - fn qwrite(&self, xml: &mut Writer) -> impl Future>; + fn qwrite(&self, xml: &mut Writer) -> impl Future> + Send; } pub trait QRead { fn qread(xml: &mut Reader) -> impl Future>; } // The representation of an XML node in Rust -pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq; +pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq + Sync; // --------------- diff --git a/aero-proto/Cargo.toml b/aero-proto/Cargo.toml index df8c696..e9f28d1 100644 --- a/aero-proto/Cargo.toml +++ b/aero-proto/Cargo.toml @@ -22,6 +22,7 @@ futures.workspace = true tokio.workspace = true tokio-util.workspace = true tokio-rustls.workspace = true +tokio-stream.workspace = true rustls.workspace = true rustls-pemfile.workspace = true imap-codec.workspace = true @@ -33,3 +34,4 @@ duplexify.workspace = true smtp-message.workspace = true smtp-server.workspace = true tracing.workspace = true +quick-xml.workspace = true diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 42fde2c..3981b61 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -6,6 +6,8 @@ use base64::Engine; use hyper::service::service_fn; use hyper::{Request, Response, body::Bytes}; use hyper::server::conn::http1 as http; +use hyper::rt::{Read, Write}; +use hyper::body::Incoming; use hyper_util::rt::TokioIo; use http_body_util::Full; use futures::stream::{FuturesUnordered, StreamExt}; @@ -13,13 +15,16 @@ use tokio::net::TcpListener; use tokio::sync::watch; use tokio_rustls::TlsAcceptor; use tokio::net::TcpStream; -use hyper::rt::{Read, Write}; use tokio::io::{AsyncRead, AsyncWrite}; +use tokio::io::AsyncWriteExt; use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; use aero_collections::user::User; +use aero_dav::types::{PropFind, Multistatus, PropValue, ResponseDescription}; +use aero_dav::realization::{Core, Calendar}; +use aero_dav::xml as dav; pub struct Server { bind_addr: SocketAddr, @@ -94,13 +99,10 @@ impl Server { //abitrarily bound //@FIXME replace with a handler supporting http2 and TLS - match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { let login = login.clone(); tracing::info!("{:?} {:?}", req.method(), req.uri()); - async move { - auth(login, req).await - } + auth(login, req) })).await { Err(e) => tracing::warn!(err=?e, "connection failed"), Ok(()) => tracing::trace!("connection terminated with success"), @@ -127,11 +129,13 @@ impl Server { } } +use http_body_util::BodyExt; + //@FIXME We should not support only BasicAuth async fn auth( login: ArcLoginProvider, - req: Request, -) -> Result>> { + req: Request, +) -> Result>> { tracing::info!("headers: {:?}", req.headers()); let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { @@ -141,7 +145,7 @@ async fn auth( return Ok(Response::builder() .status(401) .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") - .body(Full::new(Bytes::from("Missing Authorization field")))?) + .body(text_body("Missing Authorization field"))?) }, }; @@ -151,7 +155,7 @@ async fn auth( tracing::info!("Unsupported authorization field"); return Ok(Response::builder() .status(400) - .body(Full::new(Bytes::from("Unsupported Authorization field")))?) + .body(text_body("Unsupported Authorization field"))?) }, }; @@ -176,7 +180,7 @@ async fn auth( return Ok(Response::builder() .status(401) .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") - .body(Full::new(Bytes::from("Wrong credentials")))?) + .body(text_body("Wrong credentials"))?) }, }; @@ -187,26 +191,131 @@ async fn auth( router(user, req).await } -async fn router(user: std::sync::Arc, req: Request) -> Result>> { - let path_segments: Vec<_> = req.uri().path().split("/").filter(|s| *s != "").collect(); - tracing::info!("router"); - match path_segments.as_slice() { - [] => tracing::info!("root"), - [ username, ..] if *username != user.username => return Ok(Response::builder() +async fn router(user: std::sync::Arc, req: Request) -> Result>> { + let path = req.uri().path().to_string(); + let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); + let method = req.method().as_str().to_uppercase(); + + match (method.as_str(), path_segments.as_slice()) { + ("PROPFIND", []) => propfind_root(user, req).await, + (_, [ username, ..]) if *username != user.username => return Ok(Response::builder() .status(403) - .body(Full::new(Bytes::from("Accessing other user ressources is not allowed")))?), - [ _ ] => tracing::info!("user home"), - [ _, "calendar" ] => tracing::info!("user calendars"), - [ _, "calendar", colname ] => tracing::info!(name=colname, "selected calendar"), - [ _, "calendar", colname, member ] => tracing::info!(name=colname, obj=member, "selected event"), + .body(text_body("Accessing other user ressources is not allowed"))?), + ("PROPFIND", [ _ ]) => propfind_home(user, &req).await, + ("PROPFIND", [ _, "calendar" ]) => propfind_all_calendars(user, &req).await, + ("PROPFIND", [ _, "calendar", colname ]) => propfind_this_calendar(user, &req, colname).await, + ("PROPFIND", [ _, "calendar", colname, event ]) => propfind_event(user, req, colname, event).await, _ => return Ok(Response::builder() - .status(404) - .body(Full::new(Bytes::from("Resource not found")))?), + .status(501) + .body(text_body("Not implemented"))?), } - Ok(Response::new(Full::new(Bytes::from("Hello World!")))) } +/// +/// +/// + +async fn propfind_root(user: std::sync::Arc, req: Request) -> Result>> { + tracing::info!("root"); + + let r = deserialize::>(req).await?; + println!("r: {:?}", r); + serialize(Multistatus::> { + responses: vec![], + responsedescription: Some(ResponseDescription("hello world".to_string())), + }) +} + +async fn propfind_home(user: std::sync::Arc, req: &Request) -> Result>> { + tracing::info!("user home"); + Ok(Response::new(text_body("Hello World!"))) +} + +async fn propfind_all_calendars(user: std::sync::Arc, req: &Request) -> Result>> { + tracing::info!("calendar"); + Ok(Response::new(text_body("Hello World!"))) +} + +async fn propfind_this_calendar( + user: std::sync::Arc, + req: &Request, + colname: &str +) -> Result>> { + tracing::info!(name=colname, "selected calendar"); + Ok(Response::new(text_body("Hello World!"))) +} + +async fn propfind_event( + user: std::sync::Arc, + req: Request, + colname: &str, + event: &str, +) -> Result>> { + tracing::info!(name=colname, obj=event, "selected event"); + Ok(Response::new(text_body("Hello World!"))) +} + + #[allow(dead_code)] async fn collections(_user: std::sync::Arc, _req: Request) -> Result>> { unimplemented!(); } + + +use futures::stream::TryStreamExt; +use http_body_util::{BodyStream, Empty}; +use http_body_util::StreamBody; +use http_body_util::combinators::BoxBody; +use hyper::body::Frame; +use tokio_util::sync::PollSender; +use std::io::{Error, ErrorKind}; +use futures::sink::SinkExt; +use tokio_util::io::{SinkWriter, CopyToBytes}; + + +fn text_body(txt: &'static str) -> BoxBody { + BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) +} + +fn serialize(elem: T) -> Result>> { + let (tx, rx) = tokio::sync::mpsc::channel::(1); + + // Build the writer + tokio::task::spawn(async move { + let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe)); + let mut writer = SinkWriter::new(CopyToBytes::new(sink)); + let q = quick_xml::writer::Writer::new_with_indent(&mut writer, b' ', 4); + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let mut qwriter = dav::Writer { q, ns_to_apply }; + match elem.qwrite(&mut qwriter).await { + Ok(_) => tracing::debug!("fully serialized object"), + Err(e) => tracing::error!(err=?e, "failed to serialize object"), + } + }); + + + // Build the reader + let recv = tokio_stream::wrappers::ReceiverStream::new(rx); + let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v)))); + let boxed_body = BoxBody::new(stream); + + let response = Response::builder() + .status(hyper::StatusCode::OK) + .body(boxed_body)?; + + Ok(response) +} + + +/// Deserialize a request body to an XML request +async fn deserialize>(req: Request) -> Result { + let stream_of_frames = BodyStream::new(req.into_body()); + let stream_of_bytes = stream_of_frames + .try_filter_map(|frame| async move { Ok(frame.into_data().ok()) }) + .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)); + let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); + let async_read = std::pin::pin!(async_read); + let mut rdr = dav::Reader::new(quick_xml::reader::NsReader::from_reader(async_read)).await?; + let parsed = rdr.find::().await?; + Ok(parsed) +} -- cgit v1.2.3 From f372a95b017587bd964ef80fdfdef7c2128bca15 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 17 Mar 2024 10:31:05 +0100 Subject: basic propfind --- aero-dav/src/caltypes.rs | 72 +++++++++++++++++----------------- aero-dav/src/realization.rs | 6 +-- aero-dav/src/types.rs | 66 +++++++++++++++---------------- aero-dav/src/xml.rs | 2 +- aero-proto/src/dav.rs | 95 ++++++++++++++++++++++++++++++++++++++------- 5 files changed, 153 insertions(+), 88 deletions(-) diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 628ec4b..cb0a98c 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -108,13 +108,13 @@ pub struct CalendarMultiget { pub struct FreeBusyQuery(pub TimeRange); // ----- Hooks ----- -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum ResourceType { Calendar, } /// Check the matching Property object for documentation -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum PropertyRequest { CalendarDescription, CalendarTimezone, @@ -129,7 +129,7 @@ pub enum PropertyRequest { CalendarData(CalendarDataRequest), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Property { /// Name: calendar-description /// @@ -609,7 +609,7 @@ pub enum Property { CalendarData(CalendarDataPayload), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Violation { /// (DAV:resource-must-be-null): A resource MUST NOT exist at the /// Request-URI; @@ -780,7 +780,7 @@ pub enum Violation { /// If the client chooses a collation not supported by the server, the /// server MUST respond with a CALDAV:supported-collation precondition /// error response. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct SupportedCollation(pub Collation); /// @@ -789,7 +789,7 @@ pub struct SupportedCollation(pub Collation); /// when nested in the DAV:prop XML element in a calendaring /// REPORT response to specify the content of a returned /// calendar object resource. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalendarDataPayload { pub mime: Option, pub payload: String, @@ -802,7 +802,7 @@ pub struct CalendarDataPayload { /// when nested in the DAV:prop XML element in a calendaring /// REPORT request to specify which parts of calendar object /// resources should be returned in the response; -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalendarDataRequest { pub mime: Option, pub comp: Option, @@ -817,7 +817,7 @@ pub struct CalendarDataRequest { /// when nested in the CALDAV:supported-calendar-data property /// to specify a supported media type for calendar object /// resources; -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalendarDataEmpty(pub Option); /// ); /// version value: a version string /// attributes can be used on all three variants of the /// CALDAV:calendar-data XML element. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalendarDataSupport { pub content_type: String, pub version: String, @@ -852,7 +852,7 @@ pub struct CalendarDataSupport { /// However, the CALDAV:prop and CALDAV:allprop elements are defined /// in the "urn:ietf:params:xml:ns:caldav" namespace instead of the /// "DAV:" namespace. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Comp { pub name: Component, pub prop_kind: Option, @@ -872,7 +872,7 @@ pub struct Comp { /// /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CompSupport(pub Component); /// Name: allcomp @@ -888,7 +888,7 @@ pub struct CompSupport(pub Component); /// Definition: /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum CompKind { AllComp, Comp(Vec), @@ -912,7 +912,7 @@ pub enum CompKind { /// allprop element defined in [RFC2518]. However, the CALDAV:allprop /// element is defined in the "urn:ietf:params:xml:ns:caldav" /// namespace instead of the "DAV:" namespace. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum PropKind { AllProp, Prop(Vec), @@ -942,13 +942,13 @@ pub enum PropKind { /// element defined in [RFC2518]. However, the CALDAV:prop element is /// defined in the "urn:ietf:params:xml:ns:caldav" namespace instead /// of the "DAV:" namespace. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalProp { pub name: ComponentProperty, pub novalue: Option, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum RecurrenceModifier { Expand(Expand), LimitRecurrenceSet(LimitRecurrenceSet), @@ -994,7 +994,7 @@ pub enum RecurrenceModifier { /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Expand(pub DateTime, pub DateTime); /// CALDAV:limit-recurrence-set XML Element @@ -1042,7 +1042,7 @@ pub struct Expand(pub DateTime, pub DateTime); /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); /// Name: limit-freebusy-set @@ -1073,11 +1073,11 @@ pub struct LimitRecurrenceSet(pub DateTime, pub DateTime); /// end CDATA #REQUIRED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct LimitFreebusySet(pub DateTime, pub DateTime); /// Used by CalendarQuery & CalendarMultiget -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum CalendarSelector { AllProp, PropName, @@ -1135,20 +1135,20 @@ pub enum CalendarSelector { /// name value: a calendar object or calendar component /// type (e.g., VEVENT) /// ``` -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CompFilter { pub name: Component, // Option 1 = None, Option 2, 3, 4 = Some pub additional_rules: Option, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum CompFilterRules { // Option 2 IsNotDefined, // Options 3 & 4 Matches(CompFilterMatch), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CompFilterMatch { pub time_range: Option, pub prop_filter: Vec, @@ -1201,26 +1201,26 @@ pub struct CompFilterMatch { /// /// name value: a calendar property name (e.g., ATTENDEE) /// ``` -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropFilter { pub name: ComponentProperty, // None = Option 1, Some() = Option 2, 3 & 4 pub additional_rules: Option, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum PropFilterRules { // Option 2 IsNotDefined, // Options 3 & 4 Match(PropFilterMatch), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropFilterMatch { pub time_range: Option, pub time_or_text: Option, pub param_filter: Vec, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum TimeOrText { Time(TimeRange), Text(TextMatch), @@ -1254,7 +1254,7 @@ pub enum TimeOrText { /// PCDATA value: string /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct TextMatch { pub collation: Option, pub negate_condition: Option, @@ -1292,12 +1292,12 @@ pub struct TextMatch { /// /// name value: a property parameter name (e.g., PARTSTAT) /// ``` -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct ParamFilter { pub name: PropertyParameter, pub additional_rules: Option, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum ParamFilterMatch { IsNotDefined, Match(TextMatch), @@ -1353,7 +1353,7 @@ pub enum ParamFilterMatch { /// /// /// PCDATA value: an iCalendar object with exactly one VTIMEZONE -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct TimeZone(pub String); /// Name: filter @@ -1369,7 +1369,7 @@ pub struct TimeZone(pub String); /// /// Definition: /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Filter(pub CompFilter); /// Name: time-range @@ -1381,7 +1381,7 @@ pub struct Filter(pub CompFilter); /// end CDATA #IMPLIED> /// start value: an iCalendar "date with UTC time" /// end value: an iCalendar "date with UTC time" -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum TimeRange { OnlyStart(DateTime), OnlyEnd(DateTime), @@ -1391,7 +1391,7 @@ pub enum TimeRange { // ----------------------- ENUM ATTRIBUTES --------------------- /// Known components -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Component { VCalendar, VJournal, @@ -1432,11 +1432,11 @@ impl Component { /// name="VERSION", name="SUMMARY", etc. /// Can be set on different objects: VCalendar, VEvent, etc. /// Might be replaced by an enum later -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct ComponentProperty(pub String); /// like PARSTAT -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropertyParameter(pub String); impl PropertyParameter { pub fn as_str<'a>(&'a self) -> &'a str { @@ -1444,7 +1444,7 @@ impl PropertyParameter { } } -#[derive(Default,Debug,PartialEq)] +#[derive(Default,Debug,PartialEq,Clone)] pub enum Collation { #[default] AsciiCaseMap, diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index a7bbb16..8c47fad 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -3,7 +3,7 @@ use super::caltypes as cal; use super::xml; use super::error; -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Disabled(()); impl xml::QRead for Disabled { async fn qread(_xml: &mut xml::Reader) -> Result { @@ -20,7 +20,7 @@ impl xml::QWrite for Disabled { /// /// Any extension is kooh is disabled through an object we can't build /// due to a private inner element. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Core {} impl dav::Extension for Core { type Error = Disabled; @@ -30,7 +30,7 @@ impl dav::Extension for Core { } // WebDAV with the base Calendar implementation (RFC4791) -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Calendar {} impl dav::Extension for Calendar { diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs index 2489c0a..79e98fd 100644 --- a/aero-dav/src/types.rs +++ b/aero-dav/src/types.rs @@ -6,7 +6,7 @@ use super::xml; /// It's how we implement a DAV extension /// (That's the dark magic part...) -pub trait Extension: std::fmt::Debug + PartialEq { +pub trait Extension: std::fmt::Debug + PartialEq + Clone { type Error: xml::Node; type Property: xml::Node; type PropertyRequest: xml::Node; @@ -20,7 +20,7 @@ pub trait Extension: std::fmt::Debug + PartialEq { /// Purpose: Describes a lock on a resource. /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct ActiveLock { pub lockscope: LockScope, pub locktype: LockType, @@ -54,7 +54,7 @@ pub struct Collection{} /// Value: "0" | "1" | "infinity" /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Depth { Zero, One, @@ -77,9 +77,9 @@ pub enum Depth { /// postcondition code. Unrecognized elements MUST be ignored. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Error(pub Vec>); -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Violation { /// Name: lock-token-matches-request-uri /// @@ -190,7 +190,7 @@ pub struct Exclusive {} /// Value: Simple-ref /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Href(pub String); @@ -206,7 +206,7 @@ pub struct Href(pub String); /// standards. This element MUST NOT contain text or mixed content. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Include(pub Vec>); /// 14.9. location XML Element @@ -223,7 +223,7 @@ pub struct Include(pub Vec>); /// that would be used in a Location header. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Location(pub Href); /// 14.10. lockentry XML Element @@ -234,7 +234,7 @@ pub struct Location(pub Href); /// resource. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct LockEntry { pub lockscope: LockScope, pub locktype: LockType, @@ -248,7 +248,7 @@ pub struct LockEntry { /// specify the type of lock the client wishes to have created. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct LockInfo { pub lockscope: LockScope, pub locktype: LockType, @@ -267,7 +267,7 @@ pub struct LockInfo { /// values and the response to LOCK requests. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct LockRoot(pub Href); /// 14.13. lockscope XML Element @@ -277,7 +277,7 @@ pub struct LockRoot(pub Href); /// Purpose: Specifies whether a lock is an exclusive lock, or a shared /// lock. /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum LockScope { Exclusive, Shared @@ -293,7 +293,7 @@ pub enum LockScope { /// refers to the lock. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct LockToken(pub Href); /// 14.15. locktype XML Element @@ -304,7 +304,7 @@ pub struct LockToken(pub Href); /// specification only defines one lock type, the write lock. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum LockType { /// 14.30. write XML Element /// @@ -330,7 +330,7 @@ pub enum LockType { /// response descriptions contained within the responses. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Multistatus> { pub responses: Vec>, pub responsedescription: Option, @@ -360,7 +360,7 @@ pub struct Multistatus> { /// /// //@FIXME might need support for an extension -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Owner { Txt(String), Href(Href), @@ -381,10 +381,10 @@ pub enum Owner { /// text or mixed content. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropName(pub Vec>); -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropValue(pub Vec>); /// 14.19. propertyupdate XML Element @@ -397,10 +397,10 @@ pub struct PropValue(pub Vec>); /// required to modify the properties on the resource. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropertyUpdate(pub Vec>); -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum PropertyUpdateItem { Remove(Remove), Set(Set), @@ -440,7 +440,7 @@ pub enum PropertyUpdateItem { /// values. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum PropFind { PropName, AllProp(Option>), @@ -462,7 +462,7 @@ pub enum PropFind { /// the properties named in 'prop'. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct PropStat> { pub prop: N, pub status: Status, @@ -483,7 +483,7 @@ pub struct PropStat> { /// the names of properties to be removed are required. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Remove(pub PropName); /// 14.24. response XML Element @@ -511,7 +511,7 @@ pub struct Remove(pub PropName); /// /// --- rewritten as --- /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum StatusOrPropstat> { // One status, multiple hrefs... Status(Vec, Status), @@ -519,7 +519,7 @@ pub enum StatusOrPropstat> { PropStat(Href, Vec>), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Response> { pub status_or_propstat: StatusOrPropstat, pub error: Option>, @@ -538,7 +538,7 @@ pub struct Response> { /// user. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct ResponseDescription(pub String); /// 14.26. set XML Element @@ -557,7 +557,7 @@ pub struct ResponseDescription(pub String); /// property, and MUST be subsequently retrievable using PROPFIND. /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Set(pub PropValue); /// 14.27. shared XML Element @@ -568,7 +568,7 @@ pub struct Set(pub PropValue); /// /// /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Shared {} @@ -582,7 +582,7 @@ pub struct Shared {} /// /// //@FIXME: Better typing is possible with an enum for example -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct Status(pub http::status::StatusCode); /// 14.29. timeout XML Element @@ -610,7 +610,7 @@ pub struct Status(pub http::status::StatusCode); /// elapse between granting of the lock at the server, and the automatic /// removal of the lock. The timeout value for TimeType "Second" MUST /// NOT be greater than 2^32-1. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Timeout { Seconds(u32), Infinite, @@ -644,7 +644,7 @@ pub enum Timeout { /// the header value could include LWS as defined in [RFC2616], Section /// 4.2. Server implementors SHOULD strip LWS from these values before /// using as WebDAV property values. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum PropertyRequest { CreationDate, DisplayName, @@ -659,7 +659,7 @@ pub enum PropertyRequest { Extension(E::PropertyRequest), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum Property { /// 15.1. creationdate Property /// @@ -942,7 +942,7 @@ pub enum Property { Extension(E::Property), } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub enum ResourceType { Collection, Extension(E::ResourceType), diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index e078c6f..827e9d0 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -24,7 +24,7 @@ pub trait QRead { } // The representation of an XML node in Rust -pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq + Sync; +pub trait Node = QRead + QWrite + std::fmt::Debug + PartialEq + Clone + Sync; // --------------- diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 3981b61..0bbb7f7 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -22,9 +22,9 @@ use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; use aero_collections::user::User; -use aero_dav::types::{PropFind, Multistatus, PropValue, ResponseDescription}; -use aero_dav::realization::{Core, Calendar}; -use aero_dav::xml as dav; +use aero_dav::types as dav; +use aero_dav::realization::Calendar; +use aero_dav::xml as dxml; pub struct Server { bind_addr: SocketAddr, @@ -196,7 +196,13 @@ async fn router(user: std::sync::Arc, req: Request) -> Result = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); + //@FIXME check depth, handle it + match (method.as_str(), path_segments.as_slice()) { + ("OPTIONS", _) => return Ok(Response::builder() + .status(200) + .header("DAV", "1") + .body(text_body(""))?), ("PROPFIND", []) => propfind_root(user, req).await, (_, [ username, ..]) if *username != user.username => return Ok(Response::builder() .status(403) @@ -216,14 +222,73 @@ async fn router(user: std::sync::Arc, req: Request) -> Result async fn propfind_root(user: std::sync::Arc, req: Request) -> Result>> { - tracing::info!("root"); + let supported_propname = vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + ]; + + // A client may choose not to submit a request body. An empty PROPFIND + // request body MUST be treated as if it were an 'allprop' request. + // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly + // handled, but corrupted requests are also silently handled as allprop. + let propfind = deserialize::>(req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); + tracing::debug!(recv=?propfind, "inferred propfind request"); + + if matches!(propfind, dav::PropFind::PropName) { + return serialize(dav::Multistatus::> { + responses: vec![dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(format!("./{}/", user.username)), + vec![dav::PropStat { + prop: dav::PropName(supported_propname), + status: dav::Status(hyper::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + location: None, + responsedescription: Some(dav::ResponseDescription("user home directory".into())), + }], + responsedescription: Some(dav::ResponseDescription("propname response".to_string())), + }); + } - let r = deserialize::>(req).await?; - println!("r: {:?}", r); - serialize(Multistatus::> { - responses: vec![], - responsedescription: Some(ResponseDescription("hello world".to_string())), - }) + let propname = match propfind { + dav::PropFind::PropName => unreachable!(), + dav::PropFind::AllProp(None) => supported_propname.clone(), + dav::PropFind::AllProp(Some(dav::Include(mut include))) => { + include.extend_from_slice(supported_propname.as_slice()); + include + }, + dav::PropFind::Prop(dav::PropName(inner)) => inner, + }; + + let values = propname.iter().filter_map(|n| match n { + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} home", user.username))), + dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + _ => None, + }).collect(); + + let multistatus = dav::Multistatus::> { + responses: vec![ dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(format!("./{}/", user.username)), + vec![dav::PropStat { + prop: dav::PropValue(values), + status: dav::Status(hyper::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + location: None, + responsedescription: Some(dav::ResponseDescription("Root node".into())), + } ], + responsedescription: Some(dav::ResponseDescription("hello world".to_string())), + }; + + serialize(multistatus) } async fn propfind_home(user: std::sync::Arc, req: &Request) -> Result>> { @@ -263,7 +328,7 @@ async fn collections(_user: std::sync::Arc, _req: Request BoxBody { BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) } -fn serialize(elem: T) -> Result>> { +fn serialize(elem: T) -> Result>> { let (tx, rx) = tokio::sync::mpsc::channel::(1); // Build the writer @@ -286,7 +351,7 @@ fn serialize(elem: T) -> Result tracing::debug!("fully serialized object"), Err(e) => tracing::error!(err=?e, "failed to serialize object"), @@ -308,14 +373,14 @@ fn serialize(elem: T) -> Result>(req: Request) -> Result { +async fn deserialize>(req: Request) -> Result { let stream_of_frames = BodyStream::new(req.into_body()); let stream_of_bytes = stream_of_frames .try_filter_map(|frame| async move { Ok(frame.into_data().ok()) }) .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)); let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); let async_read = std::pin::pin!(async_read); - let mut rdr = dav::Reader::new(quick_xml::reader::NsReader::from_reader(async_read)).await?; + let mut rdr = dxml::Reader::new(quick_xml::reader::NsReader::from_reader(async_read)).await?; let parsed = rdr.find::().await?; Ok(parsed) } -- cgit v1.2.3 From 3b57d21e30302be409556711db98e16dc1a9badc Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 12:00:40 +0100 Subject: WIP DAV nodes --- aero-proto/src/dav.rs | 273 +++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 223 insertions(+), 50 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 0bbb7f7..2bc1247 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -23,6 +23,7 @@ use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; use aero_collections::user::User; use aero_dav::types as dav; +use aero_dav::caltypes as cal; use aero_dav::realization::Calendar; use aero_dav::xml as dxml; @@ -196,8 +197,6 @@ async fn router(user: std::sync::Arc, req: Request) -> Result = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); - //@FIXME check depth, handle it - match (method.as_str(), path_segments.as_slice()) { ("OPTIONS", _) => return Ok(Response::builder() .status(200) @@ -220,12 +219,20 @@ async fn router(user: std::sync::Arc, req: Request) -> Result /// /// +const SUPPORTED_PROPNAME: [dav::PropertyRequest; 2] = [ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, +]; async fn propfind_root(user: std::sync::Arc, req: Request) -> Result>> { - let supported_propname = vec![ + let node = RootNode {}; + let depth = depth(&req); + + + /*let supported_propname = vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, - ]; + ];*/ // A client may choose not to submit a request body. An empty PROPFIND // request body MUST be treated as if it were an 'allprop' request. @@ -235,60 +242,20 @@ async fn propfind_root(user: std::sync::Arc, req: Request) -> Re tracing::debug!(recv=?propfind, "inferred propfind request"); if matches!(propfind, dav::PropFind::PropName) { - return serialize(dav::Multistatus::> { - responses: vec![dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(format!("./{}/", user.username)), - vec![dav::PropStat { - prop: dav::PropName(supported_propname), - status: dav::Status(hyper::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - location: None, - responsedescription: Some(dav::ResponseDescription("user home directory".into())), - }], - responsedescription: Some(dav::ResponseDescription("propname response".to_string())), - }); + return serialize(node.multistatus_name(&user, depth)); } let propname = match propfind { dav::PropFind::PropName => unreachable!(), - dav::PropFind::AllProp(None) => supported_propname.clone(), + dav::PropFind::AllProp(None) => dav::PropName(SUPPORTED_PROPNAME.to_vec()), dav::PropFind::AllProp(Some(dav::Include(mut include))) => { - include.extend_from_slice(supported_propname.as_slice()); - include + include.extend_from_slice(&SUPPORTED_PROPNAME); + dav::PropName(include) }, - dav::PropFind::Prop(dav::PropName(inner)) => inner, - }; - - let values = propname.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} home", user.username))), - dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - _ => None, - }).collect(); - - let multistatus = dav::Multistatus::> { - responses: vec![ dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(format!("./{}/", user.username)), - vec![dav::PropStat { - prop: dav::PropValue(values), - status: dav::Status(hyper::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - location: None, - responsedescription: Some(dav::ResponseDescription("Root node".into())), - } ], - responsedescription: Some(dav::ResponseDescription("hello world".to_string())), + dav::PropFind::Prop(inner) => inner, }; - serialize(multistatus) + serialize(node.multistatus_val(&user, &propname, depth)) } async fn propfind_home(user: std::sync::Arc, req: &Request) -> Result>> { @@ -327,6 +294,8 @@ async fn collections(_user: std::sync::Arc, _req: Request) -> dav::Depth { + match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { + Some(Ok("0")) => dav::Depth::Zero, + Some(Ok("1")) => dav::Depth::One, + _ => dav::Depth::Infinity, + } +} fn text_body(txt: &'static str) -> BoxBody { BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) @@ -352,6 +328,11 @@ fn serialize(elem: T) -> Result (), + Err(e) => tracing::error!(err=?e, "unable to write XML declaration "), + } match elem.qwrite(&mut qwriter).await { Ok(_) => tracing::debug!("fully serialized object"), Err(e) => tracing::error!(err=?e, "failed to serialize object"), @@ -384,3 +365,195 @@ async fn deserialize>(req: Request) -> Result { let parsed = rdr.find::().await?; Ok(parsed) } + +//--- + +type ArcUser = std::sync::Arc; +trait DavNode { + // recurence + fn children(&self, user: &ArcUser) -> Vec>; + + // node properties + fn name(&self, user: &ArcUser) -> String; + fn supported_properties(&self, user: &ArcUser) -> dav::PropName; + fn properties(&self, user: &ArcUser, props: &dav::PropName) -> dav::PropValue; + + // building DAV responses + fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus> { + let mut names = vec![(".".into(), self.supported_properties(user))]; + if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { + names.extend(self.children(user).iter().map(|c| (format!("./{}", c.name(user)), c.supported_properties(user)))); + } + + dav::Multistatus::> { + responses: names.into_iter().map(|(url, names)| dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(url), + vec![dav::PropStat { + prop: names, + status: dav::Status(hyper::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + location: None, + responsedescription: None, + }).collect(), + responsedescription: None, + } + } + + fn multistatus_val(&self, user: &ArcUser, props: &dav::PropName, depth: dav::Depth) -> dav::Multistatus> { + let mut values = vec![(".".into(), self.properties(user, props))]; + if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { + values.extend(self + .children(user) + .iter() + .map(|c| (format!("./{}", c.name(user)), c.properties(user, props))) + ); + } + + dav::Multistatus::> { + responses: values.into_iter().map(|(url, propval)| dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(url), + vec![dav::PropStat { + prop: propval, + status: dav::Status(hyper::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + location: None, + responsedescription: None, + }).collect(), + responsedescription: None, + } + } +} + +struct RootNode {} +impl DavNode for RootNode { + fn name(&self, _user: &ArcUser) -> String { + "/".into() + } + fn children(&self, user: &ArcUser) -> Vec> { + vec![Box::new(HomeNode { })] + } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + ]) + } + fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { + dav::PropValue(prop.0.iter().filter_map(|n| match n { + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName("DAV Root".to_string())), + dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + _ => None, + }).collect()) + } +} + +struct HomeNode {} +impl DavNode for HomeNode { + fn name(&self, user: &ArcUser) -> String { + format!("{}/", user.username) + } + fn children(&self, user: &ArcUser) -> Vec> { + vec![Box::new(CalendarListNode { })] + } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + ]) + } + fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { + dav::PropValue(prop.0.iter().filter_map(|n| match n { + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} home", user.username))), + dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + _ => None, + }).collect()) + } +} + +struct CalendarListNode {} +impl DavNode for CalendarListNode { + fn name(&self, _user: &ArcUser) -> String { + "calendar/".into() + } + fn children(&self, user: &ArcUser) -> Vec> { + vec![Box::new(CalendarNode { name: "personal".into() })] + } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + ]) + } + fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { + dav::PropValue(prop.0.iter().filter_map(|n| match n { + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} calendars", user.username))), + dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + _ => None, + }).collect()) + } +} + +struct CalendarNode { + name: String, +} +impl DavNode for CalendarNode { + fn name(&self, _user: &ArcUser) -> String { + format!("{}/", self.name) + } + fn children(&self, user: &ArcUser) -> Vec> { + vec![Box::new(EventNode { file: "something.ics".into() })] + } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + ]) + } + fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { + dav::PropValue(prop.0.iter().filter_map(|n| match n { + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} calendar", self.name))), + dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(cal::ResourceType::Calendar), + ])), + _ => None, + }).collect()) + } +} + +struct EventNode { + file: String, +} +impl DavNode for EventNode { + fn name(&self, _user: &ArcUser) -> String { + self.file.to_string() + } + fn children(&self, user: &ArcUser) -> Vec> { + vec![] + } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + ]) + } + fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { + dav::PropValue(prop.0.iter().filter_map(|n| match n { + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} event", self.file))), + dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![])), + _ => None, + }).collect()) + } +} + + -- cgit v1.2.3 From 4a5ae87059c41ea3e264a0ffef0d9ff4c55b8b83 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 15:04:46 +0100 Subject: WIP DAV hierarchy --- aero-dav/src/calencoder.rs | 2 +- aero-dav/src/realization.rs | 2 +- aero-proto/src/dav.rs | 149 +++++++++++++++++++++++++++++++------------- 3 files changed, 106 insertions(+), 47 deletions(-) diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index e00876d..54a35a2 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -206,7 +206,7 @@ impl QWrite for ResourceType { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { Self::Calendar => { - let empty_tag = xml.create_dav_element("calendar"); + let empty_tag = xml.create_cal_element("calendar"); xml.q.write_event_async(Event::Empty(empty_tag)).await }, } diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 8c47fad..7bec729 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -18,7 +18,7 @@ impl xml::QWrite for Disabled { /// The base WebDAV /// -/// Any extension is kooh is disabled through an object we can't build +/// Any extension is disabled through an object we can't build /// due to a private inner element. #[derive(Debug, PartialEq, Clone)] pub struct Core {} diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 2bc1247..7f0f28d 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -196,20 +196,22 @@ async fn router(user: std::sync::Arc, req: Request) -> Result = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); + let node = match Box::new(RootNode {}).fetch(&user, &path_segments) { + Ok(v) => v, + Err(e) => { + tracing::warn!(err=?e, "dav node fetch failed"); + return Ok(Response::builder() + .status(404) + .body(text_body("Resource not found"))?) + } + }; - match (method.as_str(), path_segments.as_slice()) { - ("OPTIONS", _) => return Ok(Response::builder() + match method.as_str() { + "OPTIONS" => return Ok(Response::builder() .status(200) .header("DAV", "1") .body(text_body(""))?), - ("PROPFIND", []) => propfind_root(user, req).await, - (_, [ username, ..]) if *username != user.username => return Ok(Response::builder() - .status(403) - .body(text_body("Accessing other user ressources is not allowed"))?), - ("PROPFIND", [ _ ]) => propfind_home(user, &req).await, - ("PROPFIND", [ _, "calendar" ]) => propfind_all_calendars(user, &req).await, - ("PROPFIND", [ _, "calendar", colname ]) => propfind_this_calendar(user, &req, colname).await, - ("PROPFIND", [ _, "calendar", colname, event ]) => propfind_event(user, req, colname, event).await, + "PROPFIND" => propfind(user, req, node).await, _ => return Ok(Response::builder() .status(501) .body(text_body("Not implemented"))?), @@ -224,8 +226,7 @@ const SUPPORTED_PROPNAME: [dav::PropertyRequest; 2] = [ dav::PropertyRequest::ResourceType, ]; -async fn propfind_root(user: std::sync::Arc, req: Request) -> Result>> { - let node = RootNode {}; +async fn propfind(user: std::sync::Arc, req: Request, node: Box) -> Result>> { let depth = depth(&req); @@ -258,36 +259,6 @@ async fn propfind_root(user: std::sync::Arc, req: Request) -> Re serialize(node.multistatus_val(&user, &propname, depth)) } -async fn propfind_home(user: std::sync::Arc, req: &Request) -> Result>> { - tracing::info!("user home"); - Ok(Response::new(text_body("Hello World!"))) -} - -async fn propfind_all_calendars(user: std::sync::Arc, req: &Request) -> Result>> { - tracing::info!("calendar"); - Ok(Response::new(text_body("Hello World!"))) -} - -async fn propfind_this_calendar( - user: std::sync::Arc, - req: &Request, - colname: &str -) -> Result>> { - tracing::info!(name=colname, "selected calendar"); - Ok(Response::new(text_body("Hello World!"))) -} - -async fn propfind_event( - user: std::sync::Arc, - req: Request, - colname: &str, - event: &str, -) -> Result>> { - tracing::info!(name=colname, obj=event, "selected event"); - Ok(Response::new(text_body("Hello World!"))) -} - - #[allow(dead_code)] async fn collections(_user: std::sync::Arc, _req: Request) -> Result>> { unimplemented!(); @@ -326,7 +297,7 @@ fn serialize(elem: T) -> Result>(req: Request) -> Result { //--- type ArcUser = std::sync::Arc; -trait DavNode { +trait DavNode: Send { + // ------- specialized logic + // recurence fn children(&self, user: &ArcUser) -> Vec>; + fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result>; // node properties + fn path(&self, user: &ArcUser) -> String; fn name(&self, user: &ArcUser) -> String; fn supported_properties(&self, user: &ArcUser) -> dav::PropName; fn properties(&self, user: &ArcUser, props: &dav::PropName) -> dav::PropValue; - // building DAV responses + // ----- common + + /// building DAV responses fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus> { let mut names = vec![(".".into(), self.supported_properties(user))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { @@ -436,6 +413,23 @@ trait DavNode { struct RootNode {} impl DavNode for RootNode { + fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + if path.len() == 0 { + return Ok(self) + } + + if path[0] == user.username { + let child = Box::new(HomeNode {}); + return child.fetch(user, &path[1..]) + } + + Err(anyhow!("Not found")) + } + + fn path(&self, user: &ArcUser) -> String { + todo!(); + } + fn name(&self, _user: &ArcUser) -> String { "/".into() } @@ -459,6 +453,23 @@ impl DavNode for RootNode { struct HomeNode {} impl DavNode for HomeNode { + fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + if path.len() == 0 { + return Ok(self) + } + + if path[0] == "calendar" { + let child = Box::new(CalendarListNode {}); + return child.fetch(user, &path[1..]) + } + + Err(anyhow!("Not found")) + } + + fn path(&self, user: &ArcUser) -> String { + todo!(); + } + fn name(&self, user: &ArcUser) -> String { format!("{}/", user.username) } @@ -482,6 +493,24 @@ impl DavNode for HomeNode { struct CalendarListNode {} impl DavNode for CalendarListNode { + fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + if path.len() == 0 { + return Ok(self) + } + + //@FIXME hardcoded logic + if path[0] == "personal" { + let child = Box::new(CalendarNode { name: "personal".to_string() }); + return child.fetch(user, &path[1..]) + } + + Err(anyhow!("Not found")) + } + + fn path(&self, user: &ArcUser) -> String { + todo!(); + } + fn name(&self, _user: &ArcUser) -> String { "calendar/".into() } @@ -507,6 +536,24 @@ struct CalendarNode { name: String, } impl DavNode for CalendarNode { + fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + if path.len() == 0 { + return Ok(self) + } + + //@FIXME hardcoded logic + if path[0] == "something.ics" { + let child = Box::new(EventNode { file: "something.ics".to_string() }); + return child.fetch(user, &path[1..]) + } + + Err(anyhow!("Not found")) + } + + fn path(&self, user: &ArcUser) -> String { + todo!(); + } + fn name(&self, _user: &ArcUser) -> String { format!("{}/", self.name) } @@ -535,6 +582,18 @@ struct EventNode { file: String, } impl DavNode for EventNode { + fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + if path.len() == 0 { + return Ok(self) + } + + Err(anyhow!("Not found")) + } + + fn path(&self, user: &ArcUser) -> String { + todo!(); + } + fn name(&self, _user: &ArcUser) -> String { self.file.to_string() } -- cgit v1.2.3 From bb0011dd1745dff888a864d49ec634b7ebee3bfb Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 15:33:28 +0100 Subject: full dav path --- aero-proto/src/dav.rs | 46 +++++++++++++++++----------------------------- 1 file changed, 17 insertions(+), 29 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 7f0f28d..b12aea9 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -349,7 +349,6 @@ trait DavNode: Send { // node properties fn path(&self, user: &ArcUser) -> String; - fn name(&self, user: &ArcUser) -> String; fn supported_properties(&self, user: &ArcUser) -> dav::PropName; fn properties(&self, user: &ArcUser, props: &dav::PropName) -> dav::PropValue; @@ -357,9 +356,9 @@ trait DavNode: Send { /// building DAV responses fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus> { - let mut names = vec![(".".into(), self.supported_properties(user))]; + let mut names = vec![(self.path(user), self.supported_properties(user))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - names.extend(self.children(user).iter().map(|c| (format!("./{}", c.name(user)), c.supported_properties(user)))); + names.extend(self.children(user).iter().map(|c| (c.path(user), c.supported_properties(user)))); } dav::Multistatus::> { @@ -382,12 +381,12 @@ trait DavNode: Send { } fn multistatus_val(&self, user: &ArcUser, props: &dav::PropName, depth: dav::Depth) -> dav::Multistatus> { - let mut values = vec![(".".into(), self.properties(user, props))]; + let mut values = vec![(self.path(user), self.properties(user, props))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { values.extend(self .children(user) .iter() - .map(|c| (format!("./{}", c.name(user)), c.properties(user, props))) + .map(|c| (c.path(user), c.properties(user, props))) ); } @@ -427,12 +426,9 @@ impl DavNode for RootNode { } fn path(&self, user: &ArcUser) -> String { - todo!(); - } - - fn name(&self, _user: &ArcUser) -> String { "/".into() } + fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(HomeNode { })] } @@ -467,12 +463,9 @@ impl DavNode for HomeNode { } fn path(&self, user: &ArcUser) -> String { - todo!(); + format!("/{}/", user.username) } - fn name(&self, user: &ArcUser) -> String { - format!("{}/", user.username) - } fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(CalendarListNode { })] } @@ -508,12 +501,9 @@ impl DavNode for CalendarListNode { } fn path(&self, user: &ArcUser) -> String { - todo!(); + format!("/{}/calendar/", user.username) } - fn name(&self, _user: &ArcUser) -> String { - "calendar/".into() - } fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(CalendarNode { name: "personal".into() })] } @@ -543,7 +533,10 @@ impl DavNode for CalendarNode { //@FIXME hardcoded logic if path[0] == "something.ics" { - let child = Box::new(EventNode { file: "something.ics".to_string() }); + let child = Box::new(EventNode { + calendar: self.name.to_string(), + event_file: "something.ics".to_string(), + }); return child.fetch(user, &path[1..]) } @@ -551,14 +544,11 @@ impl DavNode for CalendarNode { } fn path(&self, user: &ArcUser) -> String { - todo!(); + format!("/{}/calendar/{}/", user.username, self.name) } - fn name(&self, _user: &ArcUser) -> String { - format!("{}/", self.name) - } fn children(&self, user: &ArcUser) -> Vec> { - vec![Box::new(EventNode { file: "something.ics".into() })] + vec![Box::new(EventNode { calendar: self.name.to_string(), event_file: "something.ics".into() })] } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ @@ -579,7 +569,8 @@ impl DavNode for CalendarNode { } struct EventNode { - file: String, + calendar: String, + event_file: String, } impl DavNode for EventNode { fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { @@ -591,12 +582,9 @@ impl DavNode for EventNode { } fn path(&self, user: &ArcUser) -> String { - todo!(); + format!("/{}/calendar/{}/{}", user.username, self.calendar, self.event_file) } - fn name(&self, _user: &ArcUser) -> String { - self.file.to_string() - } fn children(&self, user: &ArcUser) -> Vec> { vec![] } @@ -608,7 +596,7 @@ impl DavNode for EventNode { } fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { dav::PropValue(prop.0.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} event", self.file))), + dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} event", self.event_file))), dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![])), _ => None, }).collect()) -- cgit v1.2.3 From 2e7ffd4f4ca6ba82069290e0a3a70e85a3a79a7b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 16:14:38 +0100 Subject: implement content type --- aero-proto/src/dav.rs | 67 ++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 48 insertions(+), 19 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index b12aea9..252cae8 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -137,8 +137,6 @@ async fn auth( login: ArcLoginProvider, req: Request, ) -> Result>> { - - tracing::info!("headers: {:?}", req.headers()); let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { Some(hv) => hv.to_str()?, None => { @@ -210,7 +208,14 @@ async fn router(user: std::sync::Arc, req: Request) -> Result return Ok(Response::builder() .status(200) .header("DAV", "1") + .header("ALLOW", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK") .body(text_body(""))?), + "HEAD" | "GET" => { + tracing::warn!("HEAD+GET not correctly implemented"); + return Ok(Response::builder() + .status(200) + .body(text_body(""))?) + }, "PROPFIND" => propfind(user, req, node).await, _ => return Ok(Response::builder() .status(501) @@ -219,22 +224,43 @@ async fn router(user: std::sync::Arc, req: Request) -> Result -/// -/// -const SUPPORTED_PROPNAME: [dav::PropertyRequest; 2] = [ +/// +/// +/// +/// +/// +/// +/// + + +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// + +const ALLPROP: [dav::PropertyRequest; 10] = [ + dav::PropertyRequest::CreationDate, dav::PropertyRequest::DisplayName, + dav::PropertyRequest::GetContentLanguage, + dav::PropertyRequest::GetContentLength, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::GetLastModified, + dav::PropertyRequest::LockDiscovery, dav::PropertyRequest::ResourceType, + dav::PropertyRequest::SupportedLock, ]; async fn propfind(user: std::sync::Arc, req: Request, node: Box) -> Result>> { let depth = depth(&req); - - /*let supported_propname = vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - ];*/ - // A client may choose not to submit a request body. An empty PROPFIND // request body MUST be treated as if it were an 'allprop' request. // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly @@ -248,9 +274,9 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< let propname = match propfind { dav::PropFind::PropName => unreachable!(), - dav::PropFind::AllProp(None) => dav::PropName(SUPPORTED_PROPNAME.to_vec()), + dav::PropFind::AllProp(None) => dav::PropName(ALLPROP.to_vec()), dav::PropFind::AllProp(Some(dav::Include(mut include))) => { - include.extend_from_slice(&SUPPORTED_PROPNAME); + include.extend_from_slice(&ALLPROP); dav::PropName(include) }, dav::PropFind::Prop(inner) => inner, @@ -259,12 +285,6 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< serialize(node.multistatus_val(&user, &propname, depth)) } -#[allow(dead_code)] -async fn collections(_user: std::sync::Arc, _req: Request) -> Result>> { - unimplemented!(); -} - - // ---- HTTP DAV Binding use futures::stream::TryStreamExt; @@ -436,12 +456,14 @@ impl DavNode for RootNode { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, ]) } fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { dav::PropValue(prop.0.iter().filter_map(|n| match n { dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName("DAV Root".to_string())), dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), _ => None, }).collect()) } @@ -473,12 +495,14 @@ impl DavNode for HomeNode { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, ]) } fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { dav::PropValue(prop.0.iter().filter_map(|n| match n { dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} home", user.username))), dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), _ => None, }).collect()) } @@ -511,12 +535,14 @@ impl DavNode for CalendarListNode { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, ]) } fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { dav::PropValue(prop.0.iter().filter_map(|n| match n { dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} calendars", user.username))), dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), _ => None, }).collect()) } @@ -554,6 +580,7 @@ impl DavNode for CalendarNode { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, ]) } fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { @@ -563,6 +590,7 @@ impl DavNode for CalendarNode { dav::ResourceType::Collection, dav::ResourceType::Extension(cal::ResourceType::Calendar), ])), + dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), _ => None, }).collect()) } @@ -598,6 +626,7 @@ impl DavNode for EventNode { dav::PropValue(prop.0.iter().filter_map(|n| match n { dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} event", self.event_file))), dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![])), + dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("text/calendar".into())), _ => None, }).collect()) } -- cgit v1.2.3 From d0c47b93fe19a9ebc35d624b9dbed7d1d539ecaa Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 20:45:30 +0100 Subject: Rework webdav types --- aero-dav/src/caldecoder.rs | 43 ++++++++-------- aero-dav/src/calencoder.rs | 20 ++++---- aero-dav/src/caltypes.rs | 12 ++--- aero-dav/src/decoder.rs | 122 ++++++++++++++++++++++++++------------------- aero-dav/src/encoder.rs | 114 +++++++++++++++++++++++++----------------- aero-dav/src/types.rs | 30 ++++++++--- aero-dav/src/xml.rs | 19 +++++++ aero-proto/src/dav.rs | 12 ++--- 8 files changed, 226 insertions(+), 146 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index d3c68f6..dbc6e18 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -3,7 +3,7 @@ use chrono::NaiveDateTime; use super::types as dav; use super::caltypes::*; -use super::xml::{QRead, IRead, Reader, Node, DAV_URN, CAL_URN}; +use super::xml::{QRead, IRead, Reader, DAV_URN, CAL_URN}; use super::error::ParsingError; // ---- ROOT ELEMENTS --- @@ -16,7 +16,7 @@ impl QRead> for MkCalendar { } } -impl> QRead> for MkCalendarResponse { +impl QRead> for MkCalendarResponse { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "mkcalendar-response").await?; let propstats = xml.collect().await?; @@ -162,57 +162,57 @@ impl QRead for Violation { impl QRead for Property { async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(CAL_URN, "calendar-description").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "calendar-description").await?.is_some() { let lang = xml.prev_attr("xml:lang"); let text = xml.tag_string().await?; xml.close().await?; return Ok(Property::CalendarDescription { lang, text }) } - if xml.maybe_open(CAL_URN, "calendar-timezone").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "calendar-timezone").await?.is_some() { let tz = xml.tag_string().await?; xml.close().await?; return Ok(Property::CalendarTimezone(tz)) } - if xml.maybe_open(CAL_URN, "supported-calendar-component-set").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "supported-calendar-component-set").await?.is_some() { let comp = xml.collect().await?; xml.close().await?; return Ok(Property::SupportedCalendarComponentSet(comp)) } - if xml.maybe_open(CAL_URN, "supported-calendar-data").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "supported-calendar-data").await?.is_some() { let mime = xml.collect().await?; xml.close().await?; return Ok(Property::SupportedCalendarData(mime)) } - if xml.maybe_open(CAL_URN, "max-resource-size").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "max-resource-size").await?.is_some() { let sz = xml.tag_string().await?.parse::()?; xml.close().await?; return Ok(Property::MaxResourceSize(sz)) } - if xml.maybe_open(CAL_URN, "max-date-time").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "max-date-time").await?.is_some() { let dtstr = xml.tag_string().await?; let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), ICAL_DATETIME_FMT)?.and_utc(); xml.close().await?; return Ok(Property::MaxDateTime(dt)) } - if xml.maybe_open(CAL_URN, "max-instances").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "max-instances").await?.is_some() { let sz = xml.tag_string().await?.parse::()?; xml.close().await?; return Ok(Property::MaxInstances(sz)) } - if xml.maybe_open(CAL_URN, "max-attendees-per-instance").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "max-attendees-per-instance").await?.is_some() { let sz = xml.tag_string().await?.parse::()?; xml.close().await?; return Ok(Property::MaxAttendeesPerInstance(sz)) } - if xml.maybe_open(CAL_URN, "supported-collation-set").await?.is_some() { + if xml.maybe_open_start(CAL_URN, "supported-collation-set").await?.is_some() { let cols = xml.collect().await?; xml.close().await?; return Ok(Property::SupportedCollationSet(cols)) @@ -759,6 +759,7 @@ mod tests { use super::*; use chrono::{Utc, TimeZone}; use crate::realization::Calendar; + use crate::xml::Node; //use quick_reader::NsReader; async fn deserialize>(src: &str) -> T { @@ -933,19 +934,19 @@ END:VCALENDAR]]> #[tokio::test] async fn rfc_calendar_query_res() { - let expected = dav::Multistatus::> { + let expected = dav::Multistatus:: { responses: vec![ dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), vec![ dav::PropStat { - prop: dav::PropValue(vec![ - dav::Property::GetEtag("\"fffff-abcd2\"".into()), - dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + prop: dav::AnyProp(vec![ + dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd2\"".into())), + dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { mime: None, payload: "BEGIN:VCALENDAR".into(), - })), + }))), ]), status: dav::Status(http::status::StatusCode::OK), error: None, @@ -962,12 +963,12 @@ END:VCALENDAR]]> dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), vec![ dav::PropStat { - prop: dav::PropValue(vec![ - dav::Property::GetEtag("\"fffff-abcd3\"".into()), - dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + prop: dav::AnyProp(vec![ + dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd3\"".into())), + dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { mime: None, payload: "BEGIN:VCALENDAR".into(), - })), + }))), ]), status: dav::Status(http::status::StatusCode::OK), error: None, @@ -1008,7 +1009,7 @@ END:VCALENDAR]]> "#; - let got = deserialize::>>(src).await; + let got = deserialize::>(src).await; assert_eq!(got, expected) } diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 54a35a2..5323229 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -20,7 +20,7 @@ impl QWrite for MkCalendar { } } -impl> QWrite for MkCalendarResponse { +impl QWrite for MkCalendarResponse { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_cal_element("mkcalendar-response"); let end = start.to_end(); @@ -828,18 +828,18 @@ mod tests { #[tokio::test] async fn rfc_calendar_query1_res() { let got = serialize( - &dav::Multistatus::> { + &dav::Multistatus:: { responses: vec![ dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), vec![dav::PropStat { - prop: dav::PropValue(vec![ - dav::Property::GetEtag("\"fffff-abcd2\"".into()), - dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + prop: dav::AnyProp(vec![ + dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd2\"".into())), + dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { mime: None, payload: "PLACEHOLDER".into() - })), + }))), ]), status: dav::Status(http::status::StatusCode::OK), error: None, @@ -854,12 +854,12 @@ mod tests { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), vec![dav::PropStat { - prop: dav::PropValue(vec![ - dav::Property::GetEtag("\"fffff-abcd3\"".into()), - dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ + prop: dav::AnyProp(vec![ + dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd3\"".into())), + dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ mime: None, payload: "PLACEHOLDER".into(), - })), + }))), ]), status: dav::Status(http::status::StatusCode::OK), error: None, diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index cb0a98c..5ac50e6 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -31,7 +31,7 @@ pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; /// ```xmlschema /// /// ``` -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct MkCalendar(pub dav::Set); @@ -48,8 +48,8 @@ pub struct MkCalendar(pub dav::Set); /// Definition: /// /// -#[derive(Debug, PartialEq)] -pub struct MkCalendarResponse>(pub Vec>); +#[derive(Debug, PartialEq, Clone)] +pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- @@ -66,7 +66,7 @@ pub struct MkCalendarResponse>(pub Vec -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalendarQuery { pub selector: Option>, pub filter: Filter, @@ -87,7 +87,7 @@ pub struct CalendarQuery { /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct CalendarMultiget { pub selector: Option>, pub href: Vec, @@ -104,7 +104,7 @@ pub struct CalendarMultiget { /// /// Definition: /// -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct FreeBusyQuery(pub TimeRange); // ----- Hooks ----- diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index de04dd4..bb8d9de 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -12,7 +12,7 @@ use super::xml::{Node, QRead, Reader, IRead, DAV_URN}; // (2) Rewrite QRead and replace Result, _> with Result<_, _>, not found being a possible // error. // (3) Rewrite vectors with xml.collect() -> Result, _> -// (4) Something for alternatives would be great but no idea yet +// (4) Something for alternatives like xml::choices on some lib would be great but no idea yet // ---- ROOT ---- @@ -61,7 +61,7 @@ impl QRead> for PropertyUpdate { } /// Generic response -impl> QRead> for Multistatus { +impl QRead> for Multistatus { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "multistatus").await?; let mut responses = Vec::new(); @@ -113,6 +113,7 @@ impl QRead for LockInfo { // LOCK RESPONSE impl QRead> for PropValue { async fn qread(xml: &mut Reader) -> Result { + println!("---- propvalue"); xml.open(DAV_URN, "prop").await?; let acc = xml.collect::>().await?; xml.close().await?; @@ -134,7 +135,7 @@ impl QRead> for Error { // ---- INNER XML -impl> QRead> for Response { +impl QRead> for Response { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "response").await?; let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); @@ -145,7 +146,7 @@ impl> QRead> for Response { let mut dirty = false; xml.maybe_read::(&mut status, &mut dirty).await?; xml.maybe_push::(&mut href, &mut dirty).await?; - xml.maybe_push::>(&mut propstat, &mut dirty).await?; + xml.maybe_push::>(&mut propstat, &mut dirty).await?; xml.maybe_read::>(&mut error, &mut dirty).await?; xml.maybe_read::(&mut responsedescription, &mut dirty).await?; xml.maybe_read::(&mut location, &mut dirty).await?; @@ -174,15 +175,15 @@ impl> QRead> for Response { } } -impl> QRead> for PropStat { +impl QRead> for PropStat { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "propstat").await?; - let (mut m_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); + let (mut m_any_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); loop { let mut dirty = false; - xml.maybe_read::(&mut m_prop, &mut dirty).await?; + xml.maybe_read::>(&mut m_any_prop, &mut dirty).await?; xml.maybe_read::(&mut m_status, &mut dirty).await?; xml.maybe_read::>(&mut error, &mut dirty).await?; xml.maybe_read::(&mut responsedescription, &mut dirty).await?; @@ -196,7 +197,7 @@ impl> QRead> for PropStat { } xml.close().await?; - match (m_prop, m_status) { + match (m_any_prop, m_status) { (Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), _ => Err(ParsingError::MissingChild), } @@ -309,6 +310,25 @@ impl QRead> for PropName { } } +impl QRead> for AnyProp { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "prop").await?; + let acc = xml.collect::>().await?; + xml.close().await?; + Ok(AnyProp(acc)) + } +} + +impl QRead> for AnyProperty { + async fn qread(xml: &mut Reader) -> Result { + match Property::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Self::Value) + } + PropertyRequest::qread(xml).await.map(Self::Request) + } +} + impl QRead> for PropertyRequest { async fn qread(xml: &mut Reader) -> Result { let maybe = if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { @@ -348,43 +368,43 @@ impl QRead> for PropertyRequest { impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result { // Core WebDAV properties - if xml.maybe_open(DAV_URN, "creationdate").await?.is_some() { + if xml.maybe_open_start(DAV_URN, "creationdate").await?.is_some() { let datestr = xml.tag_string().await?; xml.close().await?; return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) - } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "displayname").await?.is_some() { let name = xml.tag_string().await?; xml.close().await?; return Ok(Property::DisplayName(name)) - } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "getcontentlanguage").await?.is_some() { let lang = xml.tag_string().await?; xml.close().await?; return Ok(Property::GetContentLanguage(lang)) - } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "getcontentlength").await?.is_some() { let cl = xml.tag_string().await?.parse::()?; xml.close().await?; return Ok(Property::GetContentLength(cl)) - } else if xml.maybe_open(DAV_URN, "getcontenttype").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "getcontenttype").await?.is_some() { let ct = xml.tag_string().await?; xml.close().await?; return Ok(Property::GetContentType(ct)) - } else if xml.maybe_open(DAV_URN, "getetag").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "getetag").await?.is_some() { let etag = xml.tag_string().await?; xml.close().await?; return Ok(Property::GetEtag(etag)) - } else if xml.maybe_open(DAV_URN, "getlastmodified").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "getlastmodified").await?.is_some() { let datestr = xml.tag_string().await?; xml.close().await?; return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(datestr.as_str())?)) - } else if xml.maybe_open(DAV_URN, "lockdiscovery").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "lockdiscovery").await?.is_some() { let acc = xml.collect::().await?; xml.close().await?; return Ok(Property::LockDiscovery(acc)) - } else if xml.maybe_open(DAV_URN, "resourcetype").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "resourcetype").await?.is_some() { let acc = xml.collect::>().await?; xml.close().await?; return Ok(Property::ResourceType(acc)) - } else if xml.maybe_open(DAV_URN, "supportedlock").await?.is_some() { + } else if xml.maybe_open_start(DAV_URN, "supportedlock").await?.is_some() { let acc = xml.collect::().await?; xml.close().await?; return Ok(Property::SupportedLock(acc)) @@ -758,7 +778,7 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>>().await.unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, Multistatus { responses: vec![ @@ -766,11 +786,11 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("http://www.example.com/container/".into()), vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, + prop: AnyProp(vec![ + AnyProperty::Request(PropertyRequest::CreationDate), + AnyProperty::Request(PropertyRequest::DisplayName), + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Request(PropertyRequest::SupportedLock), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -785,15 +805,15 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("http://www.example.com/container/front.html".into()), vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, + prop: AnyProp(vec![ + AnyProperty::Request(PropertyRequest::CreationDate), + AnyProperty::Request(PropertyRequest::DisplayName), + AnyProperty::Request(PropertyRequest::GetContentLength), + AnyProperty::Request(PropertyRequest::GetContentType), + AnyProperty::Request(PropertyRequest::GetEtag), + AnyProperty::Request(PropertyRequest::GetLastModified), + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Request(PropertyRequest::SupportedLock), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -869,7 +889,7 @@ mod tests { "#; let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>>().await.unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, Multistatus { responses: vec![ @@ -877,11 +897,11 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/".into()), vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap()), - Property::DisplayName("Example collection".into()), - Property::ResourceType(vec![ResourceType::Collection]), - Property::SupportedLock(vec![ + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap())), + AnyProperty::Value(Property::DisplayName("Example collection".into())), + AnyProperty::Value(Property::ResourceType(vec![ResourceType::Collection])), + AnyProperty::Value(Property::SupportedLock(vec![ LockEntry { lockscope: LockScope::Exclusive, locktype: LockType::Write, @@ -890,7 +910,7 @@ mod tests { lockscope: LockScope::Shared, locktype: LockType::Write, }, - ]), + ])), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -906,15 +926,17 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/front.html".into()), vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap()), - Property::DisplayName("Example HTML resource".into()), - Property::GetContentLength(4525), - Property::GetContentType("text/html".into()), - Property::GetEtag(r#""zzyzx""#.into()), - Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap()), - Property::ResourceType(vec![]), - Property::SupportedLock(vec![ + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap())), + AnyProperty::Value(Property::DisplayName("Example HTML resource".into())), + AnyProperty::Value(Property::GetContentLength(4525)), + AnyProperty::Value(Property::GetContentType("text/html".into())), + AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), + AnyProperty::Value(Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap())), + //@FIXME know bug, can't disambiguate between an empty resource + //type value and a request resource type + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Value(Property::SupportedLock(vec![ LockEntry { lockscope: LockScope::Exclusive, locktype: LockType::Write, @@ -923,7 +945,7 @@ mod tests { lockscope: LockScope::Shared, locktype: LockType::Write, }, - ]), + ])), ]), status: Status(http::status::StatusCode::OK), error: None, diff --git a/aero-dav/src/encoder.rs b/aero-dav/src/encoder.rs index 3b0bfda..1320c8a 100644 --- a/aero-dav/src/encoder.rs +++ b/aero-dav/src/encoder.rs @@ -48,7 +48,7 @@ impl QWrite for PropertyUpdate { /// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE /// DELETE RESPONSE, -impl> QWrite for Multistatus { +impl QWrite for Multistatus { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("multistatus"); let end = start.to_end(); @@ -154,6 +154,28 @@ impl QWrite for PropName { } } +impl QWrite for AnyProp { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("prop"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for propname in &self.0 { + propname.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for AnyProperty { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Request(v) => v.qwrite(xml).await, + Self::Value(v) => v.qwrite(xml).await, + } + } +} + impl QWrite for Href { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { @@ -166,7 +188,7 @@ impl QWrite for Href { } } -impl> QWrite for Response { +impl QWrite for Response { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("response"); let end = start.to_end(); @@ -186,7 +208,7 @@ impl> QWrite for Response { } } -impl> QWrite for StatusOrPropstat { +impl QWrite for StatusOrPropstat { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { Self::Status(many_href, status) => { @@ -244,7 +266,7 @@ impl QWrite for Location { } } -impl> QWrite for PropStat { +impl QWrite for PropStat { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("propstat"); let end = start.to_end(); @@ -672,7 +694,7 @@ mod tests { #[tokio::test] async fn basic_multistatus() { - let orig = Multistatus::> { + let orig = Multistatus:: { responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())) }; @@ -683,7 +705,7 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - assert_eq!(deserialize::>>(got.as_str()).await, orig) + assert_eq!(deserialize::>(got.as_str()).await, orig) } @@ -722,17 +744,17 @@ mod tests { #[tokio::test] async fn rfc_propname_res() { - let orig = Multistatus::> { + let orig = Multistatus:: { responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( Href("http://www.example.com/container/".into()), vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, + prop: AnyProp(vec![ + AnyProperty::Request(PropertyRequest::CreationDate), + AnyProperty::Request(PropertyRequest::DisplayName), + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Request(PropertyRequest::SupportedLock), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -747,15 +769,15 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("http://www.example.com/container/front.html".into()), vec![PropStat { - prop: PropName(vec![ - PropertyRequest::CreationDate, - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, + prop: AnyProp(vec![ + AnyProperty::Request(PropertyRequest::CreationDate), + AnyProperty::Request(PropertyRequest::DisplayName), + AnyProperty::Request(PropertyRequest::GetContentLength), + AnyProperty::Request(PropertyRequest::GetContentType), + AnyProperty::Request(PropertyRequest::GetEtag), + AnyProperty::Request(PropertyRequest::GetLastModified), + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Request(PropertyRequest::SupportedLock), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -805,7 +827,7 @@ mod tests { assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - assert_eq!(deserialize::>>(got.as_str()).await, orig) + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] @@ -825,20 +847,20 @@ mod tests { async fn rfc_allprop_res() { use chrono::{FixedOffset,TimeZone}; - let orig = Multistatus::> { + let orig = Multistatus:: { responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/".into()), vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600) + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) - .unwrap()), - Property::DisplayName("Example collection".into()), - Property::ResourceType(vec![ResourceType::Collection]), - Property::SupportedLock(vec![ + .unwrap())), + AnyProperty::Value(Property::DisplayName("Example collection".into())), + AnyProperty::Value(Property::ResourceType(vec![ResourceType::Collection])), + AnyProperty::Value(Property::SupportedLock(vec![ LockEntry { lockscope: LockScope::Exclusive, locktype: LockType::Write, @@ -847,7 +869,7 @@ mod tests { lockscope: LockScope::Shared, locktype: LockType::Write, }, - ]), + ])), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -862,21 +884,23 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/front.html".into()), vec![PropStat { - prop: PropValue(vec![ - Property::CreationDate(FixedOffset::west_opt(8 * 3600) + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600) .unwrap() .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) - .unwrap()), - Property::DisplayName("Example HTML resource".into()), - Property::GetContentLength(4525), - Property::GetContentType("text/html".into()), - Property::GetEtag(r#""zzyzx""#.into()), - Property::GetLastModified(FixedOffset::east_opt(0) + .unwrap())), + AnyProperty::Value(Property::DisplayName("Example HTML resource".into())), + AnyProperty::Value(Property::GetContentLength(4525)), + AnyProperty::Value(Property::GetContentType("text/html".into())), + AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), + AnyProperty::Value(Property::GetLastModified(FixedOffset::east_opt(0) .unwrap() .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) - .unwrap()), - Property::ResourceType(vec![]), - Property::SupportedLock(vec![ + .unwrap())), + //@FIXME know bug, can't disambiguate between an empty resource + //type value and a request resource type + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Value(Property::SupportedLock(vec![ LockEntry { lockscope: LockScope::Exclusive, locktype: LockType::Write, @@ -885,7 +909,7 @@ mod tests { lockscope: LockScope::Shared, locktype: LockType::Write, }, - ]), + ])), ]), status: Status(http::status::StatusCode::OK), error: None, @@ -970,7 +994,7 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - assert_eq!(deserialize::>>(got.as_str()).await, orig) + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] @@ -1025,7 +1049,7 @@ mod tests { #[tokio::test] async fn rfc_delete_locked2() { - let orig = Multistatus::> { + let orig = Multistatus:: { responses: vec![Response { status_or_propstat: StatusOrPropstat::Status( vec![Href("http://www.example.com/container/resource3".into())], @@ -1051,7 +1075,7 @@ mod tests { "#; assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - assert_eq!(deserialize::>>(got.as_str()).await, orig) + assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs index 79e98fd..9457a8f 100644 --- a/aero-dav/src/types.rs +++ b/aero-dav/src/types.rs @@ -331,8 +331,8 @@ pub enum LockType { /// /// #[derive(Debug, PartialEq, Clone)] -pub struct Multistatus> { - pub responses: Vec>, +pub struct Multistatus { + pub responses: Vec>, pub responsedescription: Option, } @@ -387,6 +387,9 @@ pub struct PropName(pub Vec>); #[derive(Debug, PartialEq, Clone)] pub struct PropValue(pub Vec>); +#[derive(Debug, PartialEq, Clone)] +pub struct AnyProp(pub Vec>); + /// 14.19. propertyupdate XML Element /// /// Name: propertyupdate @@ -462,14 +465,19 @@ pub enum PropFind { /// the properties named in 'prop'. /// /// +/// +/// --- +/// +/// #[derive(Debug, PartialEq, Clone)] -pub struct PropStat> { - pub prop: N, +pub struct PropStat { + pub prop: AnyProp, pub status: Status, pub error: Option>, pub responsedescription: Option, } + /// 14.23. remove XML Element /// /// Name: remove @@ -512,16 +520,16 @@ pub struct Remove(pub PropName); /// --- rewritten as --- /// #[derive(Debug, PartialEq, Clone)] -pub enum StatusOrPropstat> { +pub enum StatusOrPropstat { // One status, multiple hrefs... Status(Vec, Status), // A single href, multiple properties... - PropStat(Href, Vec>), + PropStat(Href, Vec>), } #[derive(Debug, PartialEq, Clone)] -pub struct Response> { - pub status_or_propstat: StatusOrPropstat, +pub struct Response { + pub status_or_propstat: StatusOrPropstat, pub error: Option>, pub responsedescription: Option, pub location: Option, @@ -644,6 +652,12 @@ pub enum Timeout { /// the header value could include LWS as defined in [RFC2616], Section /// 4.2. Server implementors SHOULD strip LWS from these values before /// using as WebDAV property values. +#[derive(Debug, PartialEq, Clone)] +pub enum AnyProperty { + Request(PropertyRequest), + Value(Property), +} + #[derive(Debug, PartialEq, Clone)] pub enum PropertyRequest { CreationDate, diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 827e9d0..26f54cc 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -258,6 +258,17 @@ impl Reader { Ok(evt) } + pub async fn open_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + let evt = match self.peek() { + Event::Start(_) if self.is_tag(ns, key) => self.next().await?, + _ => return Err(ParsingError::Recoverable), + }; + + //println!("open tag {:?}", evt); + self.parents.push(evt.clone()); + Ok(evt) + } + pub async fn maybe_open(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { match self.open(ns, key).await { Ok(v) => Ok(Some(v)), @@ -266,6 +277,14 @@ impl Reader { } } + pub async fn maybe_open_start(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { + match self.open_start(ns, key).await { + Ok(v) => Ok(Some(v)), + Err(ParsingError::Recoverable) => Ok(None), + Err(e) => Err(e), + } + } + pub fn prev_attr(&self, attr: &str) -> Option { match &self.prev { Event::Start(bs) | Event::Empty(bs) => match bs.try_get_attribute(attr) { diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 252cae8..480d163 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -375,18 +375,18 @@ trait DavNode: Send { // ----- common /// building DAV responses - fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus> { + fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus { let mut names = vec![(self.path(user), self.supported_properties(user))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { names.extend(self.children(user).iter().map(|c| (c.path(user), c.supported_properties(user)))); } - dav::Multistatus::> { + dav::Multistatus:: { responses: names.into_iter().map(|(url, names)| dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href(url), vec![dav::PropStat { - prop: names, + prop: dav::AnyProp(names.0.into_iter().map(dav::AnyProperty::Request).collect()), status: dav::Status(hyper::StatusCode::OK), error: None, responsedescription: None, @@ -400,7 +400,7 @@ trait DavNode: Send { } } - fn multistatus_val(&self, user: &ArcUser, props: &dav::PropName, depth: dav::Depth) -> dav::Multistatus> { + fn multistatus_val(&self, user: &ArcUser, props: &dav::PropName, depth: dav::Depth) -> dav::Multistatus { let mut values = vec![(self.path(user), self.properties(user, props))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { values.extend(self @@ -410,12 +410,12 @@ trait DavNode: Send { ); } - dav::Multistatus::> { + dav::Multistatus:: { responses: values.into_iter().map(|(url, propval)| dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href(url), vec![dav::PropStat { - prop: propval, + prop: dav::AnyProp(propval.0.into_iter().map(dav::AnyProperty::Value).collect()), status: dav::Status(hyper::StatusCode::OK), error: None, responsedescription: None, -- cgit v1.2.3 From 2c9ea0f09c88c902c189cedaaeb9ccafcd226977 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 21:44:44 +0100 Subject: add support for 404 content --- aero-proto/src/dav.rs | 106 ++++++++++++++++++++++++++++---------------------- 1 file changed, 60 insertions(+), 46 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 480d163..b964760 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -16,7 +16,6 @@ use tokio::sync::watch; use tokio_rustls::TlsAcceptor; use tokio::net::TcpStream; use tokio::io::{AsyncRead, AsyncWrite}; -use tokio::io::AsyncWriteExt; use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; @@ -282,7 +281,7 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< dav::PropFind::Prop(inner) => inner, }; - serialize(node.multistatus_val(&user, &propname, depth)) + serialize(node.multistatus_val(&user, propname, depth)) } // ---- HTTP DAV Binding @@ -370,7 +369,7 @@ trait DavNode: Send { // node properties fn path(&self, user: &ArcUser) -> String; fn supported_properties(&self, user: &ArcUser) -> dav::PropName; - fn properties(&self, user: &ArcUser, props: &dav::PropName) -> dav::PropValue; + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; // ----- common @@ -400,26 +399,41 @@ trait DavNode: Send { } } - fn multistatus_val(&self, user: &ArcUser, props: &dav::PropName, depth: dav::Depth) -> dav::Multistatus { - let mut values = vec![(self.path(user), self.properties(user, props))]; + fn multistatus_val(&self, user: &ArcUser, props: dav::PropName, depth: dav::Depth) -> dav::Multistatus { + // Collect properties + let mut values = vec![(self.path(user), self.properties(user, props.clone()))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { values.extend(self .children(user) .iter() - .map(|c| (c.path(user), c.properties(user, props))) + .map(|c| (c.path(user), c.properties(user, props.clone()))) ); } + // Separate FOUND from NOT FOUND + let values: Vec<_> = values.into_iter().map(|(path, anyprop)| { + let mut prop_desc = vec![]; + let (found, not_found): (Vec<_>, Vec<_>) = anyprop.into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); + if !found.is_empty() { + prop_desc.push((hyper::StatusCode::OK, dav::AnyProp(found))) + } + if !not_found.is_empty() { + prop_desc.push((hyper::StatusCode::NOT_FOUND, dav::AnyProp(not_found))) + } + (path, prop_desc) + }).collect(); + + // Build response dav::Multistatus:: { - responses: values.into_iter().map(|(url, propval)| dav::Response { + responses: values.into_iter().map(|(url, propdesc)| dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href(url), - vec![dav::PropStat { - prop: dav::AnyProp(propval.0.into_iter().map(dav::AnyProperty::Value).collect()), - status: dav::Status(hyper::StatusCode::OK), + propdesc.into_iter().map(|(status, prop)| dav::PropStat { + prop, + status: dav::Status(status), error: None, responsedescription: None, - }], + }).collect(), ), error: None, location: None, @@ -459,13 +473,13 @@ impl DavNode for RootNode { dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { - dav::PropValue(prop.0.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName("DAV Root".to_string())), - dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), - _ => None, - }).collect()) + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + v => dav::AnyProperty::Request(v), + }).collect() } } @@ -498,13 +512,13 @@ impl DavNode for HomeNode { dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { - dav::PropValue(prop.0.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} home", user.username))), - dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), - _ => None, - }).collect()) + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + v => dav::AnyProperty::Request(v), + }).collect() } } @@ -538,13 +552,13 @@ impl DavNode for CalendarListNode { dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { - dav::PropValue(prop.0.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} calendars", user.username))), - dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), - _ => None, - }).collect()) + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + v => dav::AnyProperty::Request(v), + }).collect() } } @@ -583,16 +597,16 @@ impl DavNode for CalendarNode { dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { - dav::PropValue(prop.0.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} calendar", self.name))), - dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![ + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.name))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ dav::ResourceType::Collection, dav::ResourceType::Extension(cal::ResourceType::Calendar), ])), - dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("httpd/unix-directory".into())), - _ => None, - }).collect()) + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + v => dav::AnyProperty::Request(v), + }).collect() } } @@ -622,13 +636,13 @@ impl DavNode for EventNode { dav::PropertyRequest::ResourceType, ]) } - fn properties(&self, user: &ArcUser, prop: &dav::PropName) -> dav::PropValue { - dav::PropValue(prop.0.iter().filter_map(|n| match n { - dav::PropertyRequest::DisplayName => Some(dav::Property::DisplayName(format!("{} event", self.event_file))), - dav::PropertyRequest::ResourceType => Some(dav::Property::ResourceType(vec![])), - dav::PropertyRequest::GetContentType => Some(dav::Property::GetContentType("text/calendar".into())), - _ => None, - }).collect()) + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.event_file))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + v => dav::AnyProperty::Request(v), + }).collect() } } -- cgit v1.2.3 From 5bf3517acfea3694fbe586e69a0e02b94c61eb1b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 18 Mar 2024 22:56:49 +0100 Subject: Pass thunderbird autodetect... --- aero-dav/src/caldecoder.rs | 9 +++++++++ aero-dav/src/calencoder.rs | 8 ++++++++ aero-dav/src/caltypes.rs | 26 ++++++++++++++++++++++++++ aero-proto/src/dav.rs | 21 ++++++++++++++------- 4 files changed, 57 insertions(+), 7 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index dbc6e18..b124154 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -162,6 +162,11 @@ impl QRead for Violation { impl QRead for Property { async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open_start(CAL_URN, "calendar-home-set").await?.is_some() { + let href = xml.find().await?; + xml.close().await?; + return Ok(Property::CalendarHomeSet(href)) + } if xml.maybe_open_start(CAL_URN, "calendar-description").await?.is_some() { let lang = xml.prev_attr("xml:lang"); let text = xml.tag_string().await?; @@ -231,6 +236,10 @@ impl QRead for Property { impl QRead for PropertyRequest { async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(CAL_URN, "calendar-home-set").await?.is_some() { + xml.close().await?; + return Ok(Self::CalendarHomeSet) + } if xml.maybe_open(CAL_URN, "calendar-description").await?.is_some() { xml.close().await?; return Ok(Self::CalendarDescription) diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 5323229..d4e79dc 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -88,6 +88,7 @@ impl QWrite for PropertyRequest { }; match self { + Self::CalendarHomeSet => atom("calendar-home-set").await, Self::CalendarDescription => atom("calendar-description").await, Self::CalendarTimezone => atom("calendar-timezone").await, Self::SupportedCalendarComponentSet => atom("supported-calendar-component-set").await, @@ -105,6 +106,13 @@ impl QWrite for PropertyRequest { impl QWrite for Property { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { + Self::CalendarHomeSet(href) => { + let start = xml.create_cal_element("calendar-home-set"); + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + href.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } Self::CalendarDescription { lang, text } => { let mut start = xml.create_cal_element("calendar-description"); if let Some(the_lang) = lang { diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 5ac50e6..aa056d4 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -116,6 +116,7 @@ pub enum ResourceType { /// Check the matching Property object for documentation #[derive(Debug, PartialEq, Clone)] pub enum PropertyRequest { + CalendarHomeSet, CalendarDescription, CalendarTimezone, SupportedCalendarComponentSet, @@ -131,6 +132,31 @@ pub enum PropertyRequest { #[derive(Debug, PartialEq, Clone)] pub enum Property { + /// Name: calendar-home-set + /// + /// Namespace: urn:ietf:params:xml:ns:caldav + /// + /// Purpose: Identifies the URL of any WebDAV collections that contain + /// calendar collections owned by the associated principal resource. + /// + /// Conformance: This property SHOULD be defined on a principal + /// resource. If defined, it MAY be protected and SHOULD NOT be + /// returned by a PROPFIND DAV:allprop request (as defined in Section + /// 12.14.1 of [RFC2518]). + /// + /// Description: The CALDAV:calendar-home-set property is meant to allow + /// users to easily find the calendar collections owned by the + /// principal. Typically, users will group all the calendar + /// collections that they own under a common collection. This + /// property specifies the URL of collections that are either calendar + /// collections or ordinary collections that have child or descendant + /// calendar collections owned by the principal. + /// + /// Definition: + /// + /// + CalendarHomeSet(dav::Href), + /// Name: calendar-description /// /// Namespace: urn:ietf:params:xml:ns:caldav diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index b964760..c8e534e 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -212,7 +212,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result { tracing::warn!("HEAD+GET not correctly implemented"); return Ok(Response::builder() - .status(200) + .status(404) .body(text_body(""))?) }, "PROPFIND" => propfind(user, req, node).await, @@ -259,6 +259,7 @@ const ALLPROP: [dav::PropertyRequest; 10] = [ async fn propfind(user: std::sync::Arc, req: Request, node: Box) -> Result>> { let depth = depth(&req); + let status = hyper::StatusCode::from_u16(207)?; // A client may choose not to submit a request body. An empty PROPFIND // request body MUST be treated as if it were an 'allprop' request. @@ -268,7 +269,7 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< tracing::debug!(recv=?propfind, "inferred propfind request"); if matches!(propfind, dav::PropFind::PropName) { - return serialize(node.multistatus_name(&user, depth)); + return serialize(status, node.multistatus_name(&user, depth)); } let propname = match propfind { @@ -281,7 +282,7 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< dav::PropFind::Prop(inner) => inner, }; - serialize(node.multistatus_val(&user, propname, depth)) + serialize(status, node.multistatus_val(&user, propname, depth)) } // ---- HTTP DAV Binding @@ -308,7 +309,7 @@ fn text_body(txt: &'static str) -> BoxBody { BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) } -fn serialize(elem: T) -> Result>> { +fn serialize(status_ok: hyper::StatusCode, elem: T) -> Result>> { let (tx, rx) = tokio::sync::mpsc::channel::(1); // Build the writer @@ -318,7 +319,7 @@ fn serialize(elem: T) -> Result (), Err(e) => tracing::error!(err=?e, "unable to write XML declaration "), @@ -336,7 +337,8 @@ fn serialize(elem: T) -> Result) -> Vec> { @@ -517,6 +520,8 @@ impl DavNode for HomeNode { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + dav::PropertyRequest::Extension(cal::PropertyRequest::CalendarHomeSet) => + dav::AnyProperty::Value(dav::Property::Extension(cal::Property::CalendarHomeSet(dav::Href(CalendarListNode{}.path(user))))), v => dav::AnyProperty::Request(v), }).collect() } @@ -604,7 +609,9 @@ impl DavNode for CalendarNode { dav::ResourceType::Collection, dav::ResourceType::Extension(cal::ResourceType::Calendar), ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //@FIXME seems wrong but seems to be what Thunderbird expects... + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), v => dav::AnyProperty::Request(v), }).collect() } -- cgit v1.2.3 From fb6a379f43ff579dbc224fb52180ba3a6d6cde5c Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 19 Mar 2024 17:36:32 +0100 Subject: Working thunderbird autodetect --- aero-dav/src/acldecoder.rs | 68 ++++++++++++++++++++++++++++++++ aero-dav/src/aclencoder.rs | 71 ++++++++++++++++++++++++++++++++++ aero-dav/src/acltypes.rs | 38 +++++++++++++++++- aero-dav/src/caltypes.rs | 1 - aero-dav/src/lib.rs | 8 +++- aero-dav/src/realization.rs | 94 ++++++++++++++++++++++++++++++++++++++++++++- aero-dav/src/xml.rs | 2 +- aero-proto/src/dav.rs | 59 ++++++++++++++++------------ 8 files changed, 309 insertions(+), 32 deletions(-) create mode 100644 aero-dav/src/acldecoder.rs create mode 100644 aero-dav/src/aclencoder.rs diff --git a/aero-dav/src/acldecoder.rs b/aero-dav/src/acldecoder.rs new file mode 100644 index 0000000..67dfb0b --- /dev/null +++ b/aero-dav/src/acldecoder.rs @@ -0,0 +1,68 @@ +use super::acltypes::*; +use super::types as dav; +use super::xml::{QRead, Reader, IRead, DAV_URN}; +use super::error::ParsingError; + +impl QRead for Property { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open_start(DAV_URN, "owner").await?.is_some() { + let href = xml.find().await?; + xml.close().await?; + return Ok(Self::Owner(href)) + } + if xml.maybe_open_start(DAV_URN, "current-user-principal").await?.is_some() { + let user = xml.find().await?; + xml.close().await?; + return Ok(Self::CurrentUserPrincipal(user)) + } + if xml.maybe_open_start(DAV_URN, "current-user-privilege-set").await?.is_some() { + xml.close().await?; + return Ok(Self::CurrentUserPrivilegeSet(vec![])) + } + + Err(ParsingError::Recoverable) + } +} + +impl QRead for PropertyRequest { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "owner").await?.is_some() { + xml.close().await?; + return Ok(Self::Owner) + } + + if xml.maybe_open(DAV_URN, "current-user-principal").await?.is_some() { + xml.close().await?; + return Ok(Self::CurrentUserPrincipal) + } + + if xml.maybe_open(DAV_URN, "current-user-privilege-set").await?.is_some() { + xml.close().await?; + return Ok(Self::CurrentUserPrivilegeSet) + } + + Err(ParsingError::Recoverable) + } +} + +impl QRead for ResourceType { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "principal").await?.is_some() { + xml.close().await?; + return Ok(Self::Principal) + } + Err(ParsingError::Recoverable) + } +} + +// ----- +impl QRead for User { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "unauthenticated").await?.is_some() { + xml.close().await?; + return Ok(Self::Unauthenticated) + } + + dav::Href::qread(xml).await.map(Self::Authenticated) + } +} diff --git a/aero-dav/src/aclencoder.rs b/aero-dav/src/aclencoder.rs new file mode 100644 index 0000000..2fa4707 --- /dev/null +++ b/aero-dav/src/aclencoder.rs @@ -0,0 +1,71 @@ +use quick_xml::Error as QError; +use quick_xml::events::Event; + +use super::acltypes::*; +use super::xml::{QWrite, Writer, IWrite}; +use super::error::ParsingError; + +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Owner(href) => { + let start = xml.create_dav_element("owner"); + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + href.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::CurrentUserPrincipal(user) => { + let start = xml.create_dav_element("current-user-principal"); + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + user.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + }, + Self::CurrentUserPrivilegeSet(_) => { + let empty_tag = xml.create_dav_element("current-user-privilege-set"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }, + } + } +} + +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let mut atom = async |c| { + let empty_tag = xml.create_dav_element(c); + xml.q.write_event_async(Event::Empty(empty_tag)).await + }; + + match self { + Self::Owner => atom("owner").await, + Self::CurrentUserPrincipal => atom("current-user-principal").await, + Self::CurrentUserPrivilegeSet => atom("current-user-privilege-set").await, + } + } +} + +impl QWrite for ResourceType { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Principal => { + let empty_tag = xml.create_dav_element("principal"); + xml.q.write_event_async(Event::Empty(empty_tag)).await + } + } + } +} + +// ----- + +impl QWrite for User { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Unauthenticated => { + let tag = xml.create_dav_element("unauthenticated"); + xml.q.write_event_async(Event::Empty(tag)).await + }, + Self::Authenticated(href) => href.qwrite(xml).await, + } + } +} diff --git a/aero-dav/src/acltypes.rs b/aero-dav/src/acltypes.rs index f356813..d5be413 100644 --- a/aero-dav/src/acltypes.rs +++ b/aero-dav/src/acltypes.rs @@ -1,4 +1,40 @@ -//@FIXME required for a full DAV implementation +use super::types as dav; + +//RFC covered: RFC3744 (ACL core) + RFC5397 (ACL Current Principal Extension) + + +//@FIXME required for a full CalDAV implementation // See section 6. of the CalDAV RFC // It seems mainly required for free-busy that I will not implement now. // It can also be used for discovering main calendar, not sure it is used. +// Note: it is used by Thunderbird + + +#[derive(Debug, PartialEq, Clone)] +pub enum PropertyRequest { + Owner, + CurrentUserPrincipal, + CurrentUserPrivilegeSet, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum Property { + Owner(dav::Href), + CurrentUserPrincipal(User), + CurrentUserPrivilegeSet(Vec), +} + +#[derive(Debug, PartialEq, Clone)] +pub enum ResourceType { + Principal, +} + +/// Not implemented, it's a placeholder +#[derive(Debug, PartialEq, Clone)] +pub struct Privilege(()); + +#[derive(Debug, PartialEq, Clone)] +pub enum User { + Unauthenticated, + Authenticated(dav::Href), +} diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index aa056d4..602498c 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -2,7 +2,6 @@ use chrono::{DateTime,Utc}; use super::types as dav; -use super::xml; pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; diff --git a/aero-dav/src/lib.rs b/aero-dav/src/lib.rs index 0ca8243..009951a 100644 --- a/aero-dav/src/lib.rs +++ b/aero-dav/src/lib.rs @@ -16,8 +16,12 @@ pub mod caltypes; pub mod calencoder; pub mod caldecoder; -// wip -mod acltypes; +// acl (wip) +pub mod acltypes; +pub mod aclencoder; +pub mod acldecoder; + +// versioning (wip) mod versioningtypes; // final type diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 7bec729..bfed4d7 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -1,5 +1,6 @@ use super::types as dav; use super::caltypes as cal; +use super::acltypes as acl; use super::xml; use super::error; @@ -11,8 +12,8 @@ impl xml::QRead for Disabled { } } impl xml::QWrite for Disabled { - fn qwrite(&self, _xml: &mut xml::Writer) -> impl futures::Future> + Send { - async { unreachable!(); } + async fn qwrite(&self, _xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + unreachable!() } } @@ -40,3 +41,92 @@ impl dav::Extension for Calendar type ResourceType = cal::ResourceType; } +// ACL +#[derive(Debug, PartialEq, Clone)] +pub struct Acl {} +impl dav::Extension for Acl +{ + type Error = Disabled; + type Property = acl::Property; + type PropertyRequest = acl::PropertyRequest; + type ResourceType = acl::ResourceType; +} + +// All merged +#[derive(Debug, PartialEq, Clone)] +pub struct All {} +impl dav::Extension for All { + type Error = cal::Violation; + type Property = Property; + type PropertyRequest = PropertyRequest; + type ResourceType = ResourceType; +} + +#[derive(Debug, PartialEq, Clone)] +pub enum Property { + Cal(cal::Property), + Acl(acl::Property), +} +impl xml::QRead for Property { + async fn qread(xml: &mut xml::Reader) -> Result { + match cal::Property::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Property::Cal), + } + acl::Property::qread(xml).await.map(Property::Acl) + } +} +impl xml::QWrite for Property { + async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + match self { + Self::Cal(c) => c.qwrite(xml).await, + Self::Acl(a) => a.qwrite(xml).await, + } + } +} + +#[derive(Debug, PartialEq, Clone)] +pub enum PropertyRequest { + Cal(cal::PropertyRequest), + Acl(acl::PropertyRequest), +} +impl xml::QRead for PropertyRequest { + async fn qread(xml: &mut xml::Reader) -> Result { + match cal::PropertyRequest::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(PropertyRequest::Cal), + } + acl::PropertyRequest::qread(xml).await.map(PropertyRequest::Acl) + } +} +impl xml::QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + match self { + Self::Cal(c) => c.qwrite(xml).await, + Self::Acl(a) => a.qwrite(xml).await, + } + } +} + +#[derive(Debug, PartialEq, Clone)] +pub enum ResourceType { + Cal(cal::ResourceType), + Acl(acl::ResourceType), +} +impl xml::QRead for ResourceType { + async fn qread(xml: &mut xml::Reader) -> Result { + match cal::ResourceType::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(ResourceType::Cal), + } + acl::ResourceType::qread(xml).await.map(ResourceType::Acl) + } +} +impl xml::QWrite for ResourceType { + async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + match self { + Self::Cal(c) => c.qwrite(xml).await, + Self::Acl(a) => a.qwrite(xml).await, + } + } +} diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 26f54cc..020ee6c 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -264,7 +264,7 @@ impl Reader { _ => return Err(ParsingError::Recoverable), }; - //println!("open tag {:?}", evt); + //println!("open start tag {:?}", evt); self.parents.push(evt.clone()); Ok(evt) } diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index c8e534e..608aef1 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -23,7 +23,8 @@ use aero_user::login::ArcLoginProvider; use aero_collections::user::User; use aero_dav::types as dav; use aero_dav::caltypes as cal; -use aero_dav::realization::Calendar; +use aero_dav::acltypes as acl; +use aero_dav::realization::{All, self as all}; use aero_dav::xml as dxml; pub struct Server { @@ -244,7 +245,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result /// -const ALLPROP: [dav::PropertyRequest; 10] = [ +const ALLPROP: [dav::PropertyRequest; 10] = [ dav::PropertyRequest::CreationDate, dav::PropertyRequest::DisplayName, dav::PropertyRequest::GetContentLanguage, @@ -265,7 +266,7 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< // request body MUST be treated as if it were an 'allprop' request. // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly // handled, but corrupted requests are also silently handled as allprop. - let propfind = deserialize::>(req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); + let propfind = deserialize::>(req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); tracing::debug!(recv=?propfind, "inferred propfind request"); if matches!(propfind, dav::PropFind::PropName) { @@ -370,19 +371,19 @@ trait DavNode: Send { // node properties fn path(&self, user: &ArcUser) -> String; - fn supported_properties(&self, user: &ArcUser) -> dav::PropName; - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; + fn supported_properties(&self, user: &ArcUser) -> dav::PropName; + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; // ----- common /// building DAV responses - fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus { + fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus { let mut names = vec![(self.path(user), self.supported_properties(user))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { names.extend(self.children(user).iter().map(|c| (c.path(user), c.supported_properties(user)))); } - dav::Multistatus:: { + dav::Multistatus:: { responses: names.into_iter().map(|(url, names)| dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href(url), @@ -401,7 +402,7 @@ trait DavNode: Send { } } - fn multistatus_val(&self, user: &ArcUser, props: dav::PropName, depth: dav::Depth) -> dav::Multistatus { + fn multistatus_val(&self, user: &ArcUser, props: dav::PropName, depth: dav::Depth) -> dav::Multistatus { // Collect properties let mut values = vec![(self.path(user), self.properties(user, props.clone()))]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { @@ -426,7 +427,7 @@ trait DavNode: Send { }).collect(); // Build response - dav::Multistatus:: { + dav::Multistatus:: { responses: values.into_iter().map(|(url, propdesc)| dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href(url), @@ -468,18 +469,23 @@ impl DavNode for RootNode { fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(HomeNode { })] } - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), ]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + ])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(user))))))), v => dav::AnyProperty::Request(v), }).collect() } @@ -507,21 +513,24 @@ impl DavNode for HomeNode { fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(CalendarListNode { })] } - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(cal::PropertyRequest::CalendarHomeSet), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), ]) } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), + ])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(cal::PropertyRequest::CalendarHomeSet) => - dav::AnyProperty::Value(dav::Property::Extension(cal::Property::CalendarHomeSet(dav::Href(CalendarListNode{}.path(user))))), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(CalendarListNode{}.path(user)))))), v => dav::AnyProperty::Request(v), }).collect() } @@ -550,14 +559,14 @@ impl DavNode for CalendarListNode { fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(CalendarNode { name: "personal".into() })] } - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), @@ -595,19 +604,19 @@ impl DavNode for CalendarNode { fn children(&self, user: &ArcUser) -> Vec> { vec![Box::new(EventNode { calendar: self.name.to_string(), event_file: "something.ics".into() })] } - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.name))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ dav::ResourceType::Collection, - dav::ResourceType::Extension(cal::ResourceType::Calendar), + dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), ])), //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), //@FIXME seems wrong but seems to be what Thunderbird expects... @@ -637,13 +646,13 @@ impl DavNode for EventNode { fn children(&self, user: &ArcUser) -> Vec> { vec![] } - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, ]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.event_file))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), -- cgit v1.2.3 From 311bc59c1b35b748c1551a33480aaefd7cd50199 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 19 Mar 2024 18:30:26 +0100 Subject: Make Thunderbird sufficiently happy to send a REPORT --- aero-proto/src/dav.rs | 63 +++++++++++++++++++++++++++++++++------------------ 1 file changed, 41 insertions(+), 22 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 608aef1..0e2e01a 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -208,7 +208,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result return Ok(Response::builder() .status(200) .header("DAV", "1") - .header("ALLOW", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK") + .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") .body(text_body(""))?), "HEAD" | "GET" => { tracing::warn!("HEAD+GET not correctly implemented"); @@ -223,27 +223,39 @@ async fn router(user: std::sync::Arc, req: Request) -> Result -/// -/// -/// -/// -/// -/// -/// - - -/// -/// -/// -/// -/// -/// -/// -/// -/// -/// -/// +// +// +// +// +// +// +// +// + + +// +// +// +// +// +// +// +// +// +// +// + +// +// +// +// +// +// +// +// +// +// +// const ALLPROP: [dav::PropertyRequest; 10] = [ dav::PropertyRequest::CreationDate, @@ -609,6 +621,7 @@ impl DavNode for CalendarNode { dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), ]) } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { @@ -621,6 +634,10 @@ impl DavNode for CalendarNode { //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), //@FIXME seems wrong but seems to be what Thunderbird expects... dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) + => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ + cal::CompSupport(cal::Component::VEvent), + ])))), v => dav::AnyProperty::Request(v), }).collect() } @@ -650,6 +667,7 @@ impl DavNode for EventNode { dav::PropName(vec![ dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetEtag, ]) } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { @@ -657,6 +675,7 @@ impl DavNode for EventNode { dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.event_file))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), v => dav::AnyProperty::Request(v), }).collect() } -- cgit v1.2.3 From 3c2d4e69876f4e8af0e3fbf5efa774a2084be67e Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 20 Mar 2024 13:15:56 +0100 Subject: Refactor Multistatus builder to better integrate with REPORT --- aero-dav/src/caldecoder.rs | 16 +++ aero-dav/src/calencoder.rs | 15 +++ aero-dav/src/caltypes.rs | 8 +- aero-proto/src/dav.rs | 256 ++++++++++++++++++++++++++++++++------------- 4 files changed, 224 insertions(+), 71 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index b124154..f92cf09 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -25,6 +25,22 @@ impl QRead> for MkCalendarResponse { } } +impl QRead> for Report { + async fn qread(xml: &mut Reader) -> Result { + match CalendarQuery::::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Self::Query) + } + + match CalendarMultiget::::qread(xml).await { + Err(ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Self::Multiget), + } + + FreeBusyQuery::qread(xml).await.map(Self::FreeBusy) + } +} + impl QRead> for CalendarQuery { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "calendar-query").await?; diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index d4e79dc..d324c7f 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -34,6 +34,15 @@ impl QWrite for MkCalendarResponse { } // ----------------------- REPORT METHOD ------------------------------------- +impl QWrite for Report { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Query(v) => v.qwrite(xml).await, + Self::Multiget(v) => v.qwrite(xml).await, + Self::FreeBusy(v) => v.qwrite(xml).await, + } + } +} impl QWrite for CalendarQuery { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { @@ -335,6 +344,12 @@ impl QWrite for CalendarDataRequest { start.push_attribute(("content-type", mime.content_type.as_str())); start.push_attribute(("version", mime.version.as_str())); } + + // Empty tag + if self.comp.is_none() && self.recurrence.is_none() && self.limit_freebusy_set.is_none() { + return xml.q.write_event_async(Event::Empty(start.clone())).await + } + let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; if let Some(comp) = &self.comp { diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 602498c..08991a0 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -51,6 +51,12 @@ pub struct MkCalendar(pub dav::Set); pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- +#[derive(Debug, PartialEq, Clone)] +pub enum Report { + Query(CalendarQuery), + Multiget(CalendarMultiget), + FreeBusy(FreeBusyQuery), +} /// Name: calendar-query /// @@ -827,7 +833,7 @@ pub struct CalendarDataPayload { /// when nested in the DAV:prop XML element in a calendaring /// REPORT request to specify which parts of calendar object /// resources should be returned in the response; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Default)] pub struct CalendarDataRequest { pub mime: Option, pub comp: Option, diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 0e2e01a..faf5c05 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -190,6 +190,30 @@ async fn auth( router(user, req).await } +/// Path is a voluntarily feature limited +/// compared to the expressiveness of a UNIX path +/// For example getting parent with ../ is not supported, scheme is not supported, etc. +/// More complex support could be added later if needed by clients +enum Path<'a> { + Abs(Vec<&'a str>), + Rel(Vec<&'a str>), +} +impl<'a> Path<'a> { + fn new(path: &'a str) -> Result { + // This check is naive, it does not aim at detecting all fully qualified + // URL or protect from any attack, its only goal is to help debugging. + if path.starts_with("http://") || path.starts_with("https://") { + anyhow::bail!("Full URL are not supported") + } + + let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); + if path.starts_with("/") { + return Ok(Path::Abs(path_segments)) + } + Ok(Path::Rel(path_segments)) + } +} + async fn router(user: std::sync::Arc, req: Request) -> Result>> { let path = req.uri().path().to_string(); let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); @@ -217,9 +241,10 @@ async fn router(user: std::sync::Arc, req: Request) -> Result propfind(user, req, node).await, + "REPORT" => report(user, req, node).await, _ => return Ok(Response::builder() .status(501) - .body(text_body("Not implemented"))?), + .body(text_body("HTTP Method not implemented"))?), } } @@ -257,6 +282,14 @@ async fn router(user: std::sync::Arc, req: Request) -> Result // +// +// +// +// +// +// /alice/calendar/personal/something.ics +// + const ALLPROP: [dav::PropertyRequest; 10] = [ dav::PropertyRequest::CreationDate, dav::PropertyRequest::DisplayName, @@ -270,8 +303,16 @@ const ALLPROP: [dav::PropertyRequest; 10] = [ dav::PropertyRequest::SupportedLock, ]; -async fn propfind(user: std::sync::Arc, req: Request, node: Box) -> Result>> { +// ---------- Building objects + +async fn propfind(user: std::sync::Arc, req: Request, base_node: Box) -> Result>> { let depth = depth(&req); + if matches!(depth, dav::Depth::Infinity) { + return Ok(Response::builder() + .status(501) + .body(text_body("Depth: Infinity not implemented"))?) + } + let status = hyper::StatusCode::from_u16(207)?; // A client may choose not to submit a request body. An empty PROPFIND @@ -281,21 +322,78 @@ async fn propfind(user: std::sync::Arc, req: Request, node: Box< let propfind = deserialize::>(req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); tracing::debug!(recv=?propfind, "inferred propfind request"); - if matches!(propfind, dav::PropFind::PropName) { - return serialize(status, node.multistatus_name(&user, depth)); + // Collect nodes as PROPFIND is not limited at the targeted node + let mut nodes = vec![]; + if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { + nodes.extend(base_node.children(&user)); } + nodes.push(base_node); + // Expand properties request let propname = match propfind { - dav::PropFind::PropName => unreachable!(), - dav::PropFind::AllProp(None) => dav::PropName(ALLPROP.to_vec()), + dav::PropFind::PropName => None, + dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), dav::PropFind::AllProp(Some(dav::Include(mut include))) => { include.extend_from_slice(&ALLPROP); - dav::PropName(include) + Some(dav::PropName(include)) }, - dav::PropFind::Prop(inner) => inner, + dav::PropFind::Prop(inner) => Some(inner), }; - serialize(status, node.multistatus_val(&user, propname, depth)) + // Not Found is currently impossible considering the way we designed this function + let not_found = vec![]; + serialize(status, multistatus(&user, nodes, not_found, propname)) +} + + +async fn report(user: std::sync::Arc, req: Request, node: Box) -> Result>> { + let status = hyper::StatusCode::from_u16(207)?; + + let report = deserialize::>(req).await?; + + // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary + // list of URLs + let multiget = match report { + cal::Report::Multiget(m) => m, + _ => return Ok(Response::builder() + .status(501) + .body(text_body("Not implemented"))?), + }; + + // Getting the list of nodes + /*let nodes = multiget.href.iter().map(|h| match Path::new(h.0.as_str()) { + + });*/ + + todo!(); + //serialize(status, node.multistatus_val(&user, multiget +} + +fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { + // Collect properties on existing objects + let mut responses: Vec> = match props { + Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), + None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), + }; + + // Register not found objects only if relevant + if !not_found.is_empty() { + responses.push(dav::Response { + status_or_propstat: dav::StatusOrPropstat::Status( + not_found, + dav::Status(hyper::StatusCode::NOT_FOUND), + ), + error: None, + location: None, + responsedescription: None, + }); + } + + // Build response + dav::Multistatus:: { + responses, + responsedescription: None, + } } // ---- HTTP DAV Binding @@ -314,7 +412,8 @@ fn depth(req: &Request) -> dav::Depth { match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { Some(Ok("0")) => dav::Depth::Zero, Some(Ok("1")) => dav::Depth::One, - _ => dav::Depth::Infinity, + Some(Ok("Infinity")) => dav::Depth::Infinity, + _ => dav::Depth::Zero, } } @@ -386,75 +485,58 @@ trait DavNode: Send { fn supported_properties(&self, user: &ArcUser) -> dav::PropName; fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; - // ----- common - - /// building DAV responses - fn multistatus_name(&self, user: &ArcUser, depth: dav::Depth) -> dav::Multistatus { - let mut names = vec![(self.path(user), self.supported_properties(user))]; - if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - names.extend(self.children(user).iter().map(|c| (c.path(user), c.supported_properties(user)))); - } - - dav::Multistatus:: { - responses: names.into_iter().map(|(url, names)| dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(url), - vec![dav::PropStat { - prop: dav::AnyProp(names.0.into_iter().map(dav::AnyProperty::Request).collect()), - status: dav::Status(hyper::StatusCode::OK), + // --- shared + fn response_propname(&self, user: &ArcUser) -> dav::Response { + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(self.path(user)), + vec![ + dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), error: None, responsedescription: None, - }], - ), - error: None, - location: None, - responsedescription: None, - }).collect(), - responsedescription: None, + } + ], + ), + error: None, + location: None, + responsedescription: None } } - fn multistatus_val(&self, user: &ArcUser, props: dav::PropName, depth: dav::Depth) -> dav::Multistatus { - // Collect properties - let mut values = vec![(self.path(user), self.properties(user, props.clone()))]; - if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - values.extend(self - .children(user) - .iter() - .map(|c| (c.path(user), c.properties(user, props.clone()))) - ); + fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { + let mut prop_desc = vec![]; + let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); + + // If at least one property has been found on this object, adding a HTTP 200 propstat to + // the response + if !found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(found), + error: None, + responsedescription: None, + }); } - // Separate FOUND from NOT FOUND - let values: Vec<_> = values.into_iter().map(|(path, anyprop)| { - let mut prop_desc = vec![]; - let (found, not_found): (Vec<_>, Vec<_>) = anyprop.into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); - if !found.is_empty() { - prop_desc.push((hyper::StatusCode::OK, dav::AnyProp(found))) - } - if !not_found.is_empty() { - prop_desc.push((hyper::StatusCode::NOT_FOUND, dav::AnyProp(not_found))) - } - (path, prop_desc) - }).collect(); - - // Build response - dav::Multistatus:: { - responses: values.into_iter().map(|(url, propdesc)| dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(url), - propdesc.into_iter().map(|(status, prop)| dav::PropStat { - prop, - status: dav::Status(status), - error: None, - responsedescription: None, - }).collect(), - ), + // If at least one property can't be found on this object, adding a HTTP 404 propstat to + // the response + if !not_found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::NOT_FOUND), + prop: dav::AnyProp(not_found), error: None, - location: None, responsedescription: None, - }).collect(), - responsedescription: None, + }) + } + + // Build the finale response + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(self.path(user)), prop_desc), + error: None, + location: None, + responsedescription: None } } } @@ -643,6 +725,37 @@ impl DavNode for CalendarNode { } } +const FAKE_ICS: &str = r#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTIMEZONE +LAST-MODIFIED:20040110T032845Z +TZID:US/Eastern +BEGIN:DAYLIGHT +DTSTART:20000404T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZNAME:EDT +TZOFFSETFROM:-0500 +TZOFFSETTO:-0400 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:20001026T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:EST +TZOFFSETFROM:-0400 +TZOFFSETTO:-0500 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTAMP:20240406T001102Z +DTSTART;TZID=US/Eastern:20240406T100000 +DURATION:PT1H +SUMMARY:Event #1 +Description:Go Steelers! +UID:74855313FA803DA593CD579A@example.com +END:VEVENT +END:VCALENDAR"#; + struct EventNode { calendar: String, event_file: String, @@ -668,6 +781,7 @@ impl DavNode for EventNode { dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), ]) } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { @@ -676,6 +790,8 @@ impl DavNode for EventNode { dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { mime: None, payload: FAKE_ICS.into() })))), v => dav::AnyProperty::Request(v), }).collect() } -- cgit v1.2.3 From 22e4f295556fdd4c25cf43983a56ff74acab7739 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 20 Mar 2024 14:46:07 +0100 Subject: working report calendar-multiget --- aero-dav/src/caldecoder.rs | 4 +-- aero-dav/src/xml.rs | 1 + aero-proto/src/dav.rs | 61 ++++++++++++++++++++++++++++++++-------------- 3 files changed, 46 insertions(+), 20 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index f92cf09..008668e 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -69,7 +69,7 @@ impl QRead> for CalendarQuery { impl QRead> for CalendarMultiget { async fn qread(xml: &mut Reader) -> Result { - xml.open(CAL_URN, "free-busy-query").await?; + xml.open(CAL_URN, "calendar-multiget").await?; let mut selector = None; let mut href = Vec::new(); @@ -93,7 +93,7 @@ impl QRead> for CalendarMultiget { impl QRead for FreeBusyQuery { async fn qread(xml: &mut Reader) -> Result { - xml.open(CAL_URN, "calendar-multiple-get").await?; + xml.open(CAL_URN, "free-busy-query").await?; let range = xml.find().await?; xml.close().await?; Ok(FreeBusyQuery(range)) diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index 020ee6c..d57093e 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -259,6 +259,7 @@ impl Reader { } pub async fn open_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + //println!("try open start tag {:?}, on {:?}", key, self.peek()); let evt = match self.peek() { Event::Start(_) if self.is_tag(ns, key) => self.next().await?, _ => return Err(ParsingError::Recoverable), diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index faf5c05..0ef615a 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -103,7 +103,17 @@ impl Server { match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { let login = login.clone(); tracing::info!("{:?} {:?}", req.method(), req.uri()); - auth(login, req) + async { + match auth(login, req).await { + Ok(v) => Ok(v), + Err(e) => { + tracing::error!(err=?e, "internal error"); + Response::builder() + .status(500) + .body(text_body("Internal error")) + }, + } + } })).await { Err(e) => tracing::warn!(err=?e, "connection failed"), Ok(()) => tracing::trace!("connection terminated with success"), @@ -218,7 +228,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); - let node = match Box::new(RootNode {}).fetch(&user, &path_segments) { + let node = match (RootNode {}).fetch(&user, &path_segments) { Ok(v) => v, Err(e) => { tracing::warn!(err=?e, "dav node fetch failed"); @@ -361,12 +371,22 @@ async fn report(user: std::sync::Arc, req: Request, node: Box, Vec<_>) = multiget.href.into_iter().map(|h| match Path::new(h.0.as_str()) { + Ok(Path::Abs(p)) => RootNode{}.fetch(&user, p.as_slice()).or(Err(h)), + Ok(Path::Rel(p)) => node.fetch(&user, p.as_slice()).or(Err(h)), + Err(_) => Err(h), + }).partition(|v| matches!(v, Result::Ok(_))); + let ok_node = ok_node.into_iter().filter_map(|v| v.ok()).collect(); + let not_found = not_found.into_iter().filter_map(|v| v.err()).collect(); + + // Getting props + let props = match multiget.selector { + None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), + Some(cal::CalendarSelector::PropName) => None, + Some(cal::CalendarSelector::Prop(inner)) => Some(inner), + }; - todo!(); - //serialize(status, node.multistatus_val(&user, multiget + serialize(status, multistatus(&user, ok_node, not_found, props)) } fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { @@ -478,7 +498,7 @@ trait DavNode: Send { // recurence fn children(&self, user: &ArcUser) -> Vec>; - fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result>; + fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result>; // node properties fn path(&self, user: &ArcUser) -> String; @@ -541,11 +561,12 @@ trait DavNode: Send { } } +#[derive(Clone)] struct RootNode {} impl DavNode for RootNode { - fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { if path.len() == 0 { - return Ok(self) + return Ok(Box::new(self.clone())) } if path[0] == user.username { @@ -585,11 +606,12 @@ impl DavNode for RootNode { } } +#[derive(Clone)] struct HomeNode {} impl DavNode for HomeNode { - fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { if path.len() == 0 { - return Ok(self) + return Ok(Box::new(self.clone())) } if path[0] == "calendar" { @@ -630,11 +652,12 @@ impl DavNode for HomeNode { } } +#[derive(Clone)] struct CalendarListNode {} impl DavNode for CalendarListNode { - fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { if path.len() == 0 { - return Ok(self) + return Ok(Box::new(self.clone())) } //@FIXME hardcoded logic @@ -670,13 +693,14 @@ impl DavNode for CalendarListNode { } } +#[derive(Clone)] struct CalendarNode { name: String, } impl DavNode for CalendarNode { - fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { if path.len() == 0 { - return Ok(self) + return Ok(Box::new(self.clone())) } //@FIXME hardcoded logic @@ -756,14 +780,15 @@ UID:74855313FA803DA593CD579A@example.com END:VEVENT END:VCALENDAR"#; +#[derive(Clone)] struct EventNode { calendar: String, event_file: String, } impl DavNode for EventNode { - fn fetch(self: Box, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { if path.len() == 0 { - return Ok(self) + return Ok(Box::new(self.clone())) } Err(anyhow!("Not found")) -- cgit v1.2.3 From ed47855ef1a6c9d10d48080367ff8b280530e362 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 20 Mar 2024 17:31:54 +0100 Subject: Share UniqueIdent between collections --- aero-collections/src/calendar/mod.rs | 6 +- aero-collections/src/calendar/namespace.rs | 47 ++++++++++++++ aero-collections/src/lib.rs | 1 + aero-collections/src/mail/incoming.rs | 2 +- aero-collections/src/mail/mailbox.rs | 2 +- aero-collections/src/mail/mod.rs | 1 - aero-collections/src/mail/namespace.rs | 2 +- aero-collections/src/mail/query.rs | 2 +- aero-collections/src/mail/snapshot.rs | 2 +- aero-collections/src/mail/uidindex.rs | 2 +- aero-collections/src/mail/unique_ident.rs | 101 ----------------------------- aero-collections/src/unique_ident.rs | 101 +++++++++++++++++++++++++++++ aero-collections/src/user.rs | 19 +++++- aero-proto/src/dav.rs | 13 +++- aero-proto/src/imap/index.rs | 2 +- aero-proto/src/imap/mailbox_view.rs | 2 +- 16 files changed, 189 insertions(+), 116 deletions(-) create mode 100644 aero-collections/src/calendar/namespace.rs delete mode 100644 aero-collections/src/mail/unique_ident.rs create mode 100644 aero-collections/src/unique_ident.rs diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 19e3340..708e1f1 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -1 +1,5 @@ -//@FIXME Event Index +pub mod namespace; + +pub struct Calendar { + a: u64, +} diff --git a/aero-collections/src/calendar/namespace.rs b/aero-collections/src/calendar/namespace.rs new file mode 100644 index 0000000..cf8a159 --- /dev/null +++ b/aero-collections/src/calendar/namespace.rs @@ -0,0 +1,47 @@ +use anyhow::Result; +use std::collections::{HashMap, BTreeMap}; +use std::sync::{Weak, Arc}; + +use serde::{Deserialize, Serialize}; + +use aero_user::storage; + +use crate::unique_ident::UniqueIdent; +use crate::user::User; +use super::Calendar; + +pub(crate) const CAL_LIST_PK: &str = "calendars"; +pub(crate) const CAL_LIST_SK: &str = "list"; + +pub(crate) struct CalendarNs(std::sync::Mutex>>); +impl CalendarNs { + pub fn new() -> Self { + Self(std::sync::Mutex::new(HashMap::new())) + } + + pub fn list(&self) { + todo!(); + } +} + +#[derive(Serialize, Deserialize)] +pub(crate) struct CalendarList(BTreeMap); + +#[derive(Serialize, Deserialize, Clone, Copy, Debug)] +pub(crate) struct CalendarListEntry { + id_lww: (u64, Option), +} + +impl CalendarList { + pub(crate) async fn load(user: &Arc) -> Result<(Self, Option)> { + todo!(); + } + + pub(crate) async fn save(user: &Arc, ct: Option) -> Result<()> { + todo!(); + } + + pub(crate) fn new() -> Self { + Self(BTreeMap::new()) + } +} diff --git a/aero-collections/src/lib.rs b/aero-collections/src/lib.rs index adcfc93..269cd13 100644 --- a/aero-collections/src/lib.rs +++ b/aero-collections/src/lib.rs @@ -1,3 +1,4 @@ +pub mod unique_ident; pub mod user; pub mod mail; pub mod calendar; diff --git a/aero-collections/src/mail/incoming.rs b/aero-collections/src/mail/incoming.rs index 8220461..cd2f8fd 100644 --- a/aero-collections/src/mail/incoming.rs +++ b/aero-collections/src/mail/incoming.rs @@ -15,7 +15,7 @@ use aero_bayou::timestamp::now_msec; use crate::mail::mailbox::Mailbox; use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::*; +use crate::unique_ident::*; use crate::user::User; use crate::mail::IMF; diff --git a/aero-collections/src/mail/mailbox.rs b/aero-collections/src/mail/mailbox.rs index a767678..25aacf5 100644 --- a/aero-collections/src/mail/mailbox.rs +++ b/aero-collections/src/mail/mailbox.rs @@ -9,7 +9,7 @@ use aero_bayou::Bayou; use aero_bayou::timestamp::now_msec; use crate::mail::uidindex::*; -use crate::mail::unique_ident::*; +use crate::unique_ident::*; use crate::mail::IMF; pub struct Mailbox { diff --git a/aero-collections/src/mail/mod.rs b/aero-collections/src/mail/mod.rs index 85361f3..ca9b08b 100644 --- a/aero-collections/src/mail/mod.rs +++ b/aero-collections/src/mail/mod.rs @@ -3,7 +3,6 @@ pub mod mailbox; pub mod query; pub mod snapshot; pub mod uidindex; -pub mod unique_ident; pub mod namespace; // Internet Message Format diff --git a/aero-collections/src/mail/namespace.rs b/aero-collections/src/mail/namespace.rs index 452ac68..b1f6a70 100644 --- a/aero-collections/src/mail/namespace.rs +++ b/aero-collections/src/mail/namespace.rs @@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize}; use aero_bayou::timestamp::now_msec; use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::{gen_ident, UniqueIdent}; +use crate::unique_ident::{gen_ident, UniqueIdent}; pub const MAILBOX_HIERARCHY_DELIMITER: char = '.'; diff --git a/aero-collections/src/mail/query.rs b/aero-collections/src/mail/query.rs index 3e6fe99..7faba41 100644 --- a/aero-collections/src/mail/query.rs +++ b/aero-collections/src/mail/query.rs @@ -1,6 +1,6 @@ use super::mailbox::MailMeta; use super::snapshot::FrozenMailbox; -use super::unique_ident::UniqueIdent; +use crate::unique_ident::UniqueIdent; use anyhow::Result; use futures::future::FutureExt; use futures::stream::{BoxStream, Stream, StreamExt}; diff --git a/aero-collections/src/mail/snapshot.rs b/aero-collections/src/mail/snapshot.rs index ed756b5..9503d4d 100644 --- a/aero-collections/src/mail/snapshot.rs +++ b/aero-collections/src/mail/snapshot.rs @@ -2,10 +2,10 @@ use std::sync::Arc; use anyhow::Result; +use crate::unique_ident::UniqueIdent; use super::mailbox::Mailbox; use super::query::{Query, QueryScope}; use super::uidindex::UidIndex; -use super::unique_ident::UniqueIdent; /// A Frozen Mailbox has a snapshot of the current mailbox /// state that is desynchronized with the real mailbox state. diff --git a/aero-collections/src/mail/uidindex.rs b/aero-collections/src/mail/uidindex.rs index 637a1ac..ca975a3 100644 --- a/aero-collections/src/mail/uidindex.rs +++ b/aero-collections/src/mail/uidindex.rs @@ -4,7 +4,7 @@ use im::{HashMap, OrdMap, OrdSet}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use aero_bayou::*; -use crate::mail::unique_ident::UniqueIdent; +use crate::unique_ident::UniqueIdent; pub type ModSeq = NonZeroU64; pub type ImapUid = NonZeroU32; diff --git a/aero-collections/src/mail/unique_ident.rs b/aero-collections/src/mail/unique_ident.rs deleted file mode 100644 index 0987a2c..0000000 --- a/aero-collections/src/mail/unique_ident.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::str::FromStr; -use std::sync::atomic::{AtomicU64, Ordering}; - -use lazy_static::lazy_static; -use rand::prelude::*; -use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; - -use aero_bayou::timestamp::now_msec; - -/// An internal Mail Identifier is composed of two components: -/// - a process identifier, 128 bits, itself composed of: -/// - the timestamp of when the process started, 64 bits -/// - a 64-bit random number -/// - a sequence number, 64 bits -/// They are not part of the protocol but an internal representation -/// required by Aerogramme. -/// Their main property is to be unique without having to rely -/// on synchronization between IMAP processes. -#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub struct UniqueIdent(pub [u8; 24]); - -struct IdentGenerator { - pid: u128, - sn: AtomicU64, -} - -impl IdentGenerator { - fn new() -> Self { - let time = now_msec() as u128; - let rand = thread_rng().gen::() as u128; - Self { - pid: (time << 64) | rand, - sn: AtomicU64::new(0), - } - } - - fn gen(&self) -> UniqueIdent { - let sn = self.sn.fetch_add(1, Ordering::Relaxed); - let mut res = [0u8; 24]; - res[0..16].copy_from_slice(&u128::to_be_bytes(self.pid)); - res[16..24].copy_from_slice(&u64::to_be_bytes(sn)); - UniqueIdent(res) - } -} - -lazy_static! { - static ref GENERATOR: IdentGenerator = IdentGenerator::new(); -} - -pub fn gen_ident() -> UniqueIdent { - GENERATOR.gen() -} - -// -- serde -- - -impl<'de> Deserialize<'de> for UniqueIdent { - fn deserialize(d: D) -> Result - where - D: Deserializer<'de>, - { - let v = String::deserialize(d)?; - UniqueIdent::from_str(&v).map_err(D::Error::custom) - } -} - -impl Serialize for UniqueIdent { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl std::fmt::Display for UniqueIdent { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -impl std::fmt::Debug for UniqueIdent { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -impl FromStr for UniqueIdent { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - let bytes = hex::decode(s).map_err(|_| "invalid hex")?; - - if bytes.len() != 24 { - return Err("bad length"); - } - - let mut tmp = [0u8; 24]; - tmp[..].copy_from_slice(&bytes); - Ok(UniqueIdent(tmp)) - } -} diff --git a/aero-collections/src/unique_ident.rs b/aero-collections/src/unique_ident.rs new file mode 100644 index 0000000..e4eea7a --- /dev/null +++ b/aero-collections/src/unique_ident.rs @@ -0,0 +1,101 @@ +use std::str::FromStr; +use std::sync::atomic::{AtomicU64, Ordering}; + +use lazy_static::lazy_static; +use rand::prelude::*; +use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; + +use aero_bayou::timestamp::now_msec; + +/// An internal Aerogramme identifier is composed of two components: +/// - a process identifier, 128 bits, itself composed of: +/// - the timestamp of when the process started, 64 bits +/// - a 64-bit random number +/// - a sequence number, 64 bits +/// They are not part of the protocol but an internal representation +/// required by Aerogramme. +/// Their main property is to be unique without having to rely +/// on synchronization between (IMAP) processes. +#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)] +pub struct UniqueIdent(pub [u8; 24]); + +struct IdentGenerator { + pid: u128, + sn: AtomicU64, +} + +impl IdentGenerator { + fn new() -> Self { + let time = now_msec() as u128; + let rand = thread_rng().gen::() as u128; + Self { + pid: (time << 64) | rand, + sn: AtomicU64::new(0), + } + } + + fn gen(&self) -> UniqueIdent { + let sn = self.sn.fetch_add(1, Ordering::Relaxed); + let mut res = [0u8; 24]; + res[0..16].copy_from_slice(&u128::to_be_bytes(self.pid)); + res[16..24].copy_from_slice(&u64::to_be_bytes(sn)); + UniqueIdent(res) + } +} + +lazy_static! { + static ref GENERATOR: IdentGenerator = IdentGenerator::new(); +} + +pub fn gen_ident() -> UniqueIdent { + GENERATOR.gen() +} + +// -- serde -- + +impl<'de> Deserialize<'de> for UniqueIdent { + fn deserialize(d: D) -> Result + where + D: Deserializer<'de>, + { + let v = String::deserialize(d)?; + UniqueIdent::from_str(&v).map_err(D::Error::custom) + } +} + +impl Serialize for UniqueIdent { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl std::fmt::Display for UniqueIdent { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl std::fmt::Debug for UniqueIdent { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + +impl FromStr for UniqueIdent { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + let bytes = hex::decode(s).map_err(|_| "invalid hex")?; + + if bytes.len() != 24 { + return Err("bad length"); + } + + let mut tmp = [0u8; 24]; + tmp[..].copy_from_slice(&bytes); + Ok(UniqueIdent(tmp)) + } +} diff --git a/aero-collections/src/user.rs b/aero-collections/src/user.rs index 193ce90..0c6b931 100644 --- a/aero-collections/src/user.rs +++ b/aero-collections/src/user.rs @@ -12,19 +12,27 @@ use aero_user::storage; use crate::mail::incoming::incoming_mail_watch_process; use crate::mail::mailbox::Mailbox; use crate::mail::uidindex::ImapUidvalidity; -use crate::mail::unique_ident::UniqueIdent; +use crate::unique_ident::UniqueIdent; use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; +use crate::calendar::Calendar; //@FIXME User should be totally rewriten -//to extract the local mailbox list -//to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) +// to extract the local mailbox list +// to the mail/namespace.rs file (and mailbox list should be reworded as mail namespace) + +//@FIXME User should be run in a LocalSet +// to remove most - if not all - synchronizations types. +// Especially RwLock & co. pub struct User { pub username: String, pub creds: Credentials, pub storage: storage::Store, pub mailboxes: std::sync::Mutex>>, + pub calendars: std::sync::Mutex>>, + // Handle on worker processing received email + // (moving emails from the mailqueue to the user's INBOX) tx_inbox_id: watch::Sender>, } @@ -178,6 +186,7 @@ impl User { storage, tx_inbox_id, mailboxes: std::sync::Mutex::new(HashMap::new()), + calendars: std::sync::Mutex::new(HashMap::new()), }); // Ensure INBOX exists (done inside load_mailbox_list) @@ -204,6 +213,10 @@ impl User { } } + // The idea here is that: + // 1. Opening a mailbox that is not already opened takes a significant amount of time + // 2. We don't want to lock the whole HashMap that contain the mailboxes during this + // operation which is why we droppped the lock above but take it again below. let mb = Arc::new(Mailbox::open(&self.creds, id, min_uidvalidity).await?); let mut cache = self.mailboxes.lock().unwrap(); diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 0ef615a..3420f86 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -27,6 +27,8 @@ use aero_dav::acltypes as acl; use aero_dav::realization::{All, self as all}; use aero_dav::xml as dxml; +type ArcUser = std::sync::Arc; + pub struct Server { bind_addr: SocketAddr, login_provider: ArcLoginProvider, @@ -359,7 +361,15 @@ async fn propfind(user: std::sync::Arc, req: Request, base_node: async fn report(user: std::sync::Arc, req: Request, node: Box) -> Result>> { let status = hyper::StatusCode::from_u16(207)?; - let report = deserialize::>(req).await?; + let report = match deserialize::>(req).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "unable to decode REPORT body"); + return Ok(Response::builder() + .status(400) + .body(text_body("Bad request"))?) + } + }; // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary // list of URLs @@ -492,7 +502,6 @@ async fn deserialize>(req: Request) -> Result { //--- -type ArcUser = std::sync::Arc; trait DavNode: Send { // ------- specialized logic diff --git a/aero-proto/src/imap/index.rs b/aero-proto/src/imap/index.rs index 3de46be..afe6991 100644 --- a/aero-proto/src/imap/index.rs +++ b/aero-proto/src/imap/index.rs @@ -4,7 +4,7 @@ use anyhow::{anyhow, Result}; use imap_codec::imap_types::sequence::{SeqOrUid, Sequence, SequenceSet}; use aero_collections::mail::uidindex::{ImapUid, ModSeq, UidIndex}; -use aero_collections::mail::unique_ident::UniqueIdent; +use aero_collections::unique_ident::UniqueIdent; pub struct Index<'a> { pub imap_index: Vec>, diff --git a/aero-proto/src/imap/mailbox_view.rs b/aero-proto/src/imap/mailbox_view.rs index 5154359..0ef33d6 100644 --- a/aero-proto/src/imap/mailbox_view.rs +++ b/aero-proto/src/imap/mailbox_view.rs @@ -17,7 +17,7 @@ use aero_collections::mail::mailbox::Mailbox; use aero_collections::mail::query::QueryScope; use aero_collections::mail::snapshot::FrozenMailbox; use aero_collections::mail::uidindex::{ImapUid, ImapUidvalidity, ModSeq}; -use aero_collections::mail::unique_ident::UniqueIdent; +use aero_collections::unique_ident::UniqueIdent; use crate::imap::attributes::AttributesProxy; use crate::imap::flags; -- cgit v1.2.3 From bc0f897803cbb9b7537010e9d4714a2a0b2a6872 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 26 Mar 2024 15:08:04 +0100 Subject: Calendar Namespace --- aero-collections/src/calendar/mod.rs | 15 ++ aero-collections/src/calendar/namespace.rs | 302 +++++++++++++++++++++++++++-- aero-collections/src/mail/mailbox.rs | 2 +- aero-collections/src/user.rs | 6 +- 4 files changed, 310 insertions(+), 15 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 708e1f1..d2217b8 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -1,5 +1,20 @@ pub mod namespace; +use anyhow::Result; + +use aero_user::login::Credentials; + +use crate::unique_ident::*; + pub struct Calendar { a: u64, } + +impl Calendar { + pub(crate) async fn open( + creds: &Credentials, + id: UniqueIdent, + ) -> Result { + todo!(); + } +} diff --git a/aero-collections/src/calendar/namespace.rs b/aero-collections/src/calendar/namespace.rs index cf8a159..2fbc364 100644 --- a/aero-collections/src/calendar/namespace.rs +++ b/aero-collections/src/calendar/namespace.rs @@ -1,47 +1,327 @@ -use anyhow::Result; +use anyhow::{bail, Result}; use std::collections::{HashMap, BTreeMap}; use std::sync::{Weak, Arc}; use serde::{Deserialize, Serialize}; +use aero_bayou::timestamp::now_msec; use aero_user::storage; +use aero_user::cryptoblob::{open_deserialize, seal_serialize}; -use crate::unique_ident::UniqueIdent; +use crate::unique_ident::{gen_ident, UniqueIdent}; use crate::user::User; use super::Calendar; pub(crate) const CAL_LIST_PK: &str = "calendars"; pub(crate) const CAL_LIST_SK: &str = "list"; +pub(crate) const MAIN_CAL: &str = "Personal"; +pub(crate) const MAX_CALNAME_CHARS: usize = 32; pub(crate) struct CalendarNs(std::sync::Mutex>>); + impl CalendarNs { + /// Create a new calendar namespace pub fn new() -> Self { Self(std::sync::Mutex::new(HashMap::new())) } - pub fn list(&self) { - todo!(); + /// Open a calendar by name + pub async fn open(&self, user: &Arc, name: &str) -> Result>> { + let (list, _ct) = CalendarList::load(user).await?; + + match list.get(name) { + None => Ok(None), + Some(ident) => Ok(Some(self.open_by_id(user, ident).await?)), + } + } + + /// Open a calendar by unique id + /// Check user.rs::open_mailbox_by_id to understand this function + pub async fn open_by_id(&self, user: &Arc, id: UniqueIdent) -> Result> { + { + let cache = self.0.lock().unwrap(); + if let Some(cal) = cache.get(&id).and_then(Weak::upgrade) { + return Ok(cal); + } + } + + let cal = Arc::new(Calendar::open(&user.creds, id).await?); + + let mut cache = self.0.lock().unwrap(); + if let Some(concurrent_cal) = cache.get(&id).and_then(Weak::upgrade) { + drop(cal); // we worked for nothing but at least we didn't starve someone else + Ok(concurrent_cal) + } else { + cache.insert(id, Arc::downgrade(&cal)); + Ok(cal) + } + } + + /// List calendars + pub async fn list(&self, user: &Arc) -> Result> { + CalendarList::load(user).await.map(|(list, _)| list.names()) + } + + /// Delete a calendar from the index + pub async fn delete(&self, user: &Arc, name: &str) -> Result<()> { + // We currently assume that main cal is a bit specific + if name == MAIN_CAL { + bail!("Cannot delete main calendar"); + } + + let (mut list, ct) = CalendarList::load(user).await?; + if list.has(name) { + //@TODO: actually delete calendar content + list.bind(name, None); + list.save(user, ct).await?; + Ok(()) + } else { + bail!("Calendar {} does not exist", name); + } + } + + /// Rename a calendar in the index + pub async fn rename(&self, user: &Arc, old: &str, new: &str) -> Result<()> { + if old == MAIN_CAL { + bail!("Renaming main calendar is not supported currently"); + } + if !new.chars().all(char::is_alphanumeric) { + bail!("Unsupported characters in new calendar name, only alphanumeric characters are allowed currently"); + } + if new.len() > MAX_CALNAME_CHARS { + bail!("Calendar name can't contain more than 32 characters"); + } + + let (mut list, ct) = CalendarList::load(user).await?; + list.rename(old, new)?; + list.save(user, ct).await?; + + Ok(()) + } + + /// Create calendar + pub async fn create(&self, user: &Arc, name: &str) -> Result<()> { + if name == MAIN_CAL { + bail!("Main calendar is automatically created, can't create it manually"); + } + if !name.chars().all(char::is_alphanumeric) { + bail!("Unsupported characters in new calendar name, only alphanumeric characters are allowed"); + } + if name.len() > MAX_CALNAME_CHARS { + bail!("Calendar name can't contain more than 32 characters"); + } + + let (mut list, ct) = CalendarList::load(user).await?; + match list.create(name) { + CalendarExists::Existed(_) => bail!("Calendar {} already exists", name), + CalendarExists::Created(_) => (), + } + list.save(user, ct).await?; + + Ok(()) + } + + /// Has calendar + pub async fn has(&self, user: &Arc, name: &str) -> Result { + CalendarList::load(user).await.map(|(list, _)| list.has(name)) } } +// ------ +// ------ From this point, implementation is hidden from the rest of the crate +// ------ + #[derive(Serialize, Deserialize)] -pub(crate) struct CalendarList(BTreeMap); +struct CalendarList(BTreeMap); #[derive(Serialize, Deserialize, Clone, Copy, Debug)] -pub(crate) struct CalendarListEntry { +struct CalendarListEntry { id_lww: (u64, Option), } impl CalendarList { - pub(crate) async fn load(user: &Arc) -> Result<(Self, Option)> { - todo!(); + // ---- Index persistence related functions + + /// Load from storage + async fn load(user: &Arc) -> Result<(Self, Option)> { + let row_ref = storage::RowRef::new(CAL_LIST_PK, CAL_LIST_SK); + let (mut list, row) = match user + .storage + .row_fetch(&storage::Selector::Single(&row_ref)) + .await + { + Err(storage::StorageError::NotFound) => (Self::new(), None), + Err(e) => return Err(e.into()), + Ok(rv) => { + let mut list = Self::new(); + let (row_ref, row_vals) = match rv.into_iter().next() { + Some(row_val) => (row_val.row_ref, row_val.value), + None => (row_ref, vec![]), + }; + + for v in row_vals { + if let storage::Alternative::Value(vbytes) = v { + let list2 = open_deserialize::(&vbytes, &user.creds.keys.master)?; + list.merge(list2); + } + } + (list, Some(row_ref)) + } + }; + + // Create default calendars (currently only one calendar is created) + let is_default_cal_missing = [MAIN_CAL] + .iter() + .map(|calname| list.create(calname)) + .fold(false, |acc, r| { + acc || matches!(r, CalendarExists::Created(..)) + }); + + // Save the index if we created a new calendar + if is_default_cal_missing { + list.save(user, row.clone()).await?; + } + + Ok((list, row)) } - pub(crate) async fn save(user: &Arc, ct: Option) -> Result<()> { - todo!(); + /// Save an updated index + async fn save(&self, user: &Arc, ct: Option) -> Result<()> { + let list_blob = seal_serialize(self, &user.creds.keys.master)?; + let rref = ct.unwrap_or(storage::RowRef::new(CAL_LIST_PK, CAL_LIST_SK)); + let row_val = storage::RowVal::new(rref, list_blob); + user.storage.row_insert(vec![row_val]).await?; + Ok(()) } - pub(crate) fn new() -> Self { + // ----- Index manipulation functions + + /// Ensure that a given calendar exists + /// (Don't forget to save if it returns CalendarExists::Created) + fn create(&mut self, name: &str) -> CalendarExists { + if let Some(CalendarListEntry { + id_lww: (_, Some(id)) + }) = self.0.get(name) + { + return CalendarExists::Existed(*id); + } + + let id = gen_ident(); + self.bind(name, Some(id)).unwrap(); + CalendarExists::Created(id) + } + + /// Get a list of all calendar names + fn names(&self) -> Vec { + self.0 + .iter() + .filter(|(_, v)| v.id_lww.1.is_some()) + .map(|(k, _)| k.to_string()) + .collect() + } + + /// For a given calendar name, get its Unique Identifier + fn get(&self, name: &str) -> Option { + self.0.get(name).map(|CalendarListEntry { + id_lww: (_, ident), + }| *ident).flatten() + } + + /// Check if a given calendar name exists + fn has(&self, name: &str) -> bool { + self.get(name).is_some() + } + + /// Rename a calendar + fn rename(&mut self, old: &str, new: &str) -> Result<()> { + if self.has(new) { + bail!("Calendar {} already exists", new); + } + let ident = match self.get(old) { + None => bail!("Calendar {} does not exist", old), + Some(ident) => ident, + }; + + self.bind(old, None); + self.bind(new, Some(ident)); + + Ok(()) + } + + // ----- Internal logic + + /// New is not publicly exposed, use `load` instead + fn new() -> Self { Self(BTreeMap::new()) } + + /// Low level index updating logic (used to add/rename/delete) an entry + fn bind(&mut self, name: &str, id: Option) -> Option<()> { + let (ts, id) = match self.0.get_mut(name) { + None => { + if id.is_none() { + // User wants to delete entry with given name (passed id is None) + // Entry does not exist (get_mut is None) + // Nothing to do + return None; + } else { + // User wants entry with given name to be present (id is Some) + // Entry does not exist + // Initialize entry + (now_msec(), id) + } + } + Some(CalendarListEntry { + id_lww, + }) => { + if id_lww.1 == id { + // Entry is already equals to the requested id (Option self.id_lww.0 + || (other.id_lww.0 == self.id_lww.0 && other.id_lww.1 > self.id_lww.1) + { + self.id_lww = other.id_lww; + } + } +} + +pub(crate) enum CalendarExists { + Created(UniqueIdent), + Existed(UniqueIdent), } diff --git a/aero-collections/src/mail/mailbox.rs b/aero-collections/src/mail/mailbox.rs index 25aacf5..f797be6 100644 --- a/aero-collections/src/mail/mailbox.rs +++ b/aero-collections/src/mail/mailbox.rs @@ -8,8 +8,8 @@ use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store use aero_bayou::Bayou; use aero_bayou::timestamp::now_msec; -use crate::mail::uidindex::*; use crate::unique_ident::*; +use crate::mail::uidindex::*; use crate::mail::IMF; pub struct Mailbox { diff --git a/aero-collections/src/user.rs b/aero-collections/src/user.rs index 0c6b931..9ed342f 100644 --- a/aero-collections/src/user.rs +++ b/aero-collections/src/user.rs @@ -14,7 +14,7 @@ use crate::mail::mailbox::Mailbox; use crate::mail::uidindex::ImapUidvalidity; use crate::unique_ident::UniqueIdent; use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; -use crate::calendar::Calendar; +use crate::calendar::namespace::CalendarNs; //@FIXME User should be totally rewriten // to extract the local mailbox list @@ -29,7 +29,7 @@ pub struct User { pub creds: Credentials, pub storage: storage::Store, pub mailboxes: std::sync::Mutex>>, - pub calendars: std::sync::Mutex>>, + pub calendars: CalendarNs, // Handle on worker processing received email // (moving emails from the mailqueue to the user's INBOX) @@ -186,7 +186,7 @@ impl User { storage, tx_inbox_id, mailboxes: std::sync::Mutex::new(HashMap::new()), - calendars: std::sync::Mutex::new(HashMap::new()), + calendars: CalendarNs::new(), }); // Ensure INBOX exists (done inside load_mailbox_list) -- cgit v1.2.3 From 0b57200eeb6780e843c5f798bdc53781eb83d51f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 27 Mar 2024 10:33:46 +0100 Subject: Dav DAG wip --- aero-collections/src/calendar/mod.rs | 16 ++- aero-collections/src/davdag.rs | 185 +++++++++++++++++++++++++++++++++++ aero-collections/src/lib.rs | 1 + 3 files changed, 201 insertions(+), 1 deletion(-) create mode 100644 aero-collections/src/davdag.rs diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index d2217b8..6537a4e 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -1,13 +1,19 @@ pub mod namespace; use anyhow::Result; +use tokio::sync::RwLock; +use aero_bayou::Bayou; use aero_user::login::Credentials; +use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; +use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; use crate::unique_ident::*; +use crate::davdag::DavDag; pub struct Calendar { - a: u64, + pub(super) id: UniqueIdent, + internal: RwLock, } impl Calendar { @@ -18,3 +24,11 @@ impl Calendar { todo!(); } } + +struct CalendarInternal { + id: UniqueIdent, + cal_path: String, + encryption_key: Key, + storage: Store, + uid_index: Bayou, +} diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs new file mode 100644 index 0000000..696b985 --- /dev/null +++ b/aero-collections/src/davdag.rs @@ -0,0 +1,185 @@ +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use im::{OrdMap, OrdSet, ordset}; + +use aero_bayou::*; + +use crate::unique_ident::UniqueIdent; + +/// Parents are only persisted in the event log, +/// not in the checkpoints. +pub type Parents = Vec; +pub type Etag = String; +pub type FileName = String; +pub type IndexEntry = (FileName, Etag); + +#[derive(Clone, Default)] +pub struct DavDag { + /// Source of trust + pub table: OrdMap, + + /// Indexes optimized for queries + pub idx_by_filename: OrdMap, + + /// Partial synchronization graph + /// parent -> direct children + pub successors: OrdMap>, + + /// Head nodes + pub heads: OrdSet, +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub enum DavDagOp { + /// Merge is a virtual operation run when multiple heads are discovered + Merge(Parents, UniqueIdent), + + /// Add an item to the collection + Put(Parents, UniqueIdent, IndexEntry), + + /// Delete an item from the collection + Delete(Parents, UniqueIdent), +} + +impl DavDag { + pub fn op_merge(&self, ident: UniqueIdent) -> DavDagOp { + DavDagOp::Merge(self.heads_vec(), ident) + } + + pub fn op_put(&self, ident: UniqueIdent, entry: IndexEntry) -> DavDagOp { + DavDagOp::Put(self.heads_vec(), ident, entry) + } + + pub fn op_delete(&self, ident: UniqueIdent) -> DavDagOp { + DavDagOp::Delete(self.heads_vec(), ident) + } + + // HELPER functions + pub fn heads_vec(&self) -> Vec { + self.heads.clone().into_iter().collect() + } + + // INTERNAL functions + fn register(&mut self, ident: UniqueIdent, entry: IndexEntry) { + // Insert item in the source of trust + self.table.insert(ident, entry.clone()); + + // Update the cache + let (filename, _etag) = entry; + self.idx_by_filename.insert(filename, ident); + } + + fn unregister(&mut self, ident: &UniqueIdent) { + // Query the source of truth to get the information we + // need to clean the indexes + let (filename, _etag) = match self.table.get(ident) { + Some(v) => v, + None => return, + }; + self.idx_by_filename.remove(filename); + + // Finally clear item from the source of trust + self.table.remove(ident); + } + + // @FIXME: maybe in case of error we could simply disable the sync graph + // and ask the client to rely on manual sync. For now, we are skipping the event + // which is midly satisfying. + fn sync_dag(&mut self, child: &UniqueIdent, parents: &[UniqueIdent]) -> bool { + // All parents must exist in successors otherwise we can't accept item + // do the check + update successors + let mut try_successors = self.successors.clone(); + for par in parents.iter() { + match try_successors.get_mut(par) { + None => { + tracing::warn!("Unable to push a Dav DAG sync op into the graph, an event is missing, it's a bug"); + return false + }, + Some(v) => v.insert(*child), + }; + } + self.successors = try_successors; + + // Remove from HEADS this event's parents + parents.iter().for_each(|par| { self.heads.remove(par); }); + + // This event becomes a new HEAD in turn + self.heads.insert(*child); + + // This event is also a future successor + self.successors.insert(*child, ordset![]); + + true + } +} + +impl BayouState for DavDag { + type Op = DavDagOp; + + fn apply(&self, op: &Self::Op) -> Self { + let mut new = self.clone(); + + match op { + DavDagOp::Put(parents, ident, entry) => { + if new.sync_dag(ident, parents.as_slice()) { + new.register(*ident, entry.clone()); + } + }, + DavDagOp::Delete(parents, ident) => { + if new.sync_dag(ident, parents.as_slice()) { + new.unregister(ident); + } + }, + DavDagOp::Merge(parents, ident) => { + new.sync_dag(ident, parents.as_slice()); + } + } + + new + } +} + +// CUSTOM SERIALIZATION & DESERIALIZATION +#[derive(Serialize, Deserialize)] +struct DavDagSerializedRepr { + items: Vec<(UniqueIdent, IndexEntry)>, + heads: Vec, +} + +impl<'de> Deserialize<'de> for DavDag { + fn deserialize(d: D) -> Result + where + D: Deserializer<'de>, + { + let val: DavDagSerializedRepr = DavDagSerializedRepr::deserialize(d)?; + let mut davdag = DavDag::default(); + + // Build the table + index + val.items.into_iter().for_each(|(ident, entry)| davdag.register(ident, entry)); + + // Initialize the synchronization DAG with its roots + val.heads.into_iter().for_each(|ident| { + davdag.successors.insert(ident, ordset![]); + davdag.heads.insert(ident); + }); + + Ok(davdag) + } +} + +impl Serialize for DavDag { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + // Indexes are rebuilt on the fly, we serialize only the core database + let items = self.table.iter().map(|(ident, entry)| (*ident, entry.clone())).collect(); + + // We keep only the head entries from the sync graph, + // these entries will be used to initialize it back when deserializing + let heads = self.heads_vec(); + + // Finale serialization object + let val = DavDagSerializedRepr { items, heads }; + val.serialize(serializer) + } +} diff --git a/aero-collections/src/lib.rs b/aero-collections/src/lib.rs index 269cd13..ef8b8d8 100644 --- a/aero-collections/src/lib.rs +++ b/aero-collections/src/lib.rs @@ -1,4 +1,5 @@ pub mod unique_ident; +pub mod davdag; pub mod user; pub mod mail; pub mod calendar; -- cgit v1.2.3 From a146a0babc25547f269c784e090e308fa831ab32 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 27 Mar 2024 15:09:18 +0100 Subject: Sync algorithm --- aero-collections/src/davdag.rs | 82 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 77 insertions(+), 5 deletions(-) diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs index 696b985..59dcc7b 100644 --- a/aero-collections/src/davdag.rs +++ b/aero-collections/src/davdag.rs @@ -1,3 +1,4 @@ +use anyhow::{bail, Result}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use im::{OrdMap, OrdSet, ordset}; @@ -23,9 +24,14 @@ pub struct DavDag { /// Partial synchronization graph /// parent -> direct children pub successors: OrdMap>, - + pub ancestors: OrdMap>, + + /// All nodes + pub all_nodes: OrdSet, /// Head nodes pub heads: OrdSet, + /// Origin nodes + pub origins: OrdSet, } #[derive(Clone, Serialize, Deserialize, Debug)] @@ -54,11 +60,57 @@ impl DavDag { } // HELPER functions + + /// All HEAD events pub fn heads_vec(&self) -> Vec { self.heads.clone().into_iter().collect() } + /// Resolve a sync token + pub fn resolve(&self, known: UniqueIdent) -> Result> { + let already_known = self.all_ancestors(known); + + // We can't capture all missing events if we are not connected + // to all sinks of the graph, ie. if we don't already know all the sinks. + if !self.origins.is_subset(already_known.clone()) { + bail!("Not enough history to produce a correct diff, a full resync is needed"); + } + + // Missing items are all existing graph items from which + // we removed all items known by the given node. + // In other words, all values in all_nodes that are not in already_known. + Ok(self.all_nodes.clone().relative_complement(already_known)) + } + + /// Find all ancestors of a given + fn all_ancestors(&self, known: UniqueIdent) -> OrdSet { + let mut all_known: OrdSet = OrdSet::new(); + let mut to_collect = vec![known]; + loop { + let cursor = match to_collect.pop() { + // Loop stops here + None => break, + Some(v) => v, + }; + + if all_known.insert(cursor).is_some() { + // Item already processed + continue + } + + // Collect parents + let parents = match self.ancestors.get(&cursor) { + None => continue, + Some(c) => c, + }; + to_collect.extend(parents.iter()); + } + all_known + } + // INTERNAL functions + + /// Register a WebDAV item (put, copy, move) fn register(&mut self, ident: UniqueIdent, entry: IndexEntry) { // Insert item in the source of trust self.table.insert(ident, entry.clone()); @@ -68,6 +120,7 @@ impl DavDag { self.idx_by_filename.insert(filename, ident); } + /// Unregister a WebDAV item (delete, move) fn unregister(&mut self, ident: &UniqueIdent) { // Query the source of truth to get the information we // need to clean the indexes @@ -84,8 +137,11 @@ impl DavDag { // @FIXME: maybe in case of error we could simply disable the sync graph // and ask the client to rely on manual sync. For now, we are skipping the event // which is midly satisfying. + + /// When an event is processed, update the synchronization DAG fn sync_dag(&mut self, child: &UniqueIdent, parents: &[UniqueIdent]) -> bool { - // All parents must exist in successors otherwise we can't accept item + // --- Update SUCCESSORS + // All parents must exist in successors otherwise we can't accept item: // do the check + update successors let mut try_successors = self.successors.clone(); for par in parents.iter() { @@ -99,15 +155,29 @@ impl DavDag { } self.successors = try_successors; + // This event is also a future successor + self.successors.insert(*child, ordset![]); + + // --- Update ANCESTORS + // We register ancestors as it is required for the sync algorithm + self.ancestors.insert(*child, parents.iter().fold(ordset![], |mut acc, p| { + acc.insert(*p); + acc + })); + + // --- Update ORIGINS + // If this event has no parents, it's an origin + if parents.is_empty() { + self.origins.insert(*child); + } + + // --- Update HEADS // Remove from HEADS this event's parents parents.iter().for_each(|par| { self.heads.remove(par); }); // This event becomes a new HEAD in turn self.heads.insert(*child); - // This event is also a future successor - self.successors.insert(*child, ordset![]); - true } } @@ -160,6 +230,8 @@ impl<'de> Deserialize<'de> for DavDag { val.heads.into_iter().for_each(|ident| { davdag.successors.insert(ident, ordset![]); davdag.heads.insert(ident); + davdag.origins.insert(ident); + davdag.all_nodes.insert(ident); }); Ok(davdag) -- cgit v1.2.3 From 9afbfeb42794a71170fe4c46c911446bcc217660 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 27 Mar 2024 16:16:37 +0100 Subject: Testing DAG sync --- aero-collections/src/davdag.rs | 174 ++++++++++++++++++++++++++--------------- 1 file changed, 113 insertions(+), 61 deletions(-) diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs index 59dcc7b..63a76a8 100644 --- a/aero-collections/src/davdag.rs +++ b/aero-collections/src/davdag.rs @@ -4,14 +4,18 @@ use im::{OrdMap, OrdSet, ordset}; use aero_bayou::*; -use crate::unique_ident::UniqueIdent; +use crate::unique_ident::{gen_ident, UniqueIdent}; /// Parents are only persisted in the event log, /// not in the checkpoints. -pub type Parents = Vec; +pub type Token = UniqueIdent; +pub type Parents = Vec; +pub type SyncDesc = (Parents, Token); + +pub type BlobId = UniqueIdent; pub type Etag = String; pub type FileName = String; -pub type IndexEntry = (FileName, Etag); +pub type IndexEntry = (BlobId, FileName, Etag); #[derive(Clone, Default)] pub struct DavDag { @@ -22,8 +26,6 @@ pub struct DavDag { pub idx_by_filename: OrdMap, /// Partial synchronization graph - /// parent -> direct children - pub successors: OrdMap>, pub ancestors: OrdMap>, /// All nodes @@ -37,33 +39,46 @@ pub struct DavDag { #[derive(Clone, Serialize, Deserialize, Debug)] pub enum DavDagOp { /// Merge is a virtual operation run when multiple heads are discovered - Merge(Parents, UniqueIdent), + Merge(SyncDesc), /// Add an item to the collection - Put(Parents, UniqueIdent, IndexEntry), + Put(SyncDesc, IndexEntry), /// Delete an item from the collection - Delete(Parents, UniqueIdent), + Delete(SyncDesc, BlobId), +} +impl DavDagOp { + pub fn token(&self) -> Token { + match self { + Self::Merge((_, t)) => *t, + Self::Put((_, t), _) => *t, + Self::Delete((_, t), _) => *t, + } + } } impl DavDag { - pub fn op_merge(&self, ident: UniqueIdent) -> DavDagOp { - DavDagOp::Merge(self.heads_vec(), ident) + pub fn op_merge(&self) -> DavDagOp { + DavDagOp::Merge(self.sync_desc()) } - pub fn op_put(&self, ident: UniqueIdent, entry: IndexEntry) -> DavDagOp { - DavDagOp::Put(self.heads_vec(), ident, entry) + pub fn op_put(&self, entry: IndexEntry) -> DavDagOp { + DavDagOp::Put(self.sync_desc(), entry) } - pub fn op_delete(&self, ident: UniqueIdent) -> DavDagOp { - DavDagOp::Delete(self.heads_vec(), ident) + pub fn op_delete(&self, ident: BlobId) -> DavDagOp { + DavDagOp::Delete(self.sync_desc(), ident) } // HELPER functions - /// All HEAD events - pub fn heads_vec(&self) -> Vec { - self.heads.clone().into_iter().collect() + pub fn heads_vec(&self) -> Vec { + self.heads.clone().into_iter().collect() + } + + /// A sync descriptor + pub fn sync_desc(&self) -> SyncDesc { + (self.heads_vec(), gen_ident()) } /// Resolve a sync token @@ -71,18 +86,21 @@ impl DavDag { let already_known = self.all_ancestors(known); // We can't capture all missing events if we are not connected - // to all sinks of the graph, ie. if we don't already know all the sinks. + // to all sinks of the graph, + // ie. if we don't already know all the sinks, + // ie. if we are missing so much history that + // the event log has been transformed into a checkpoint if !self.origins.is_subset(already_known.clone()) { bail!("Not enough history to produce a correct diff, a full resync is needed"); } - // Missing items are all existing graph items from which - // we removed all items known by the given node. - // In other words, all values in all_nodes that are not in already_known. + // Missing items are *all existing graph items* from which + // we removed *all items known by the given node*. + // In other words, all values in `all_nodes` that are not in `already_known`. Ok(self.all_nodes.clone().relative_complement(already_known)) } - /// Find all ancestors of a given + /// Find all ancestors of a given node fn all_ancestors(&self, known: UniqueIdent) -> OrdSet { let mut all_known: OrdSet = OrdSet::new(); let mut to_collect = vec![known]; @@ -111,21 +129,23 @@ impl DavDag { // INTERNAL functions /// Register a WebDAV item (put, copy, move) - fn register(&mut self, ident: UniqueIdent, entry: IndexEntry) { + fn register(&mut self, entry: IndexEntry) { + let (blob_id, filename, _etag) = entry.clone(); + // Insert item in the source of trust - self.table.insert(ident, entry.clone()); + self.table.insert(blob_id, entry); // Update the cache - let (filename, _etag) = entry; - self.idx_by_filename.insert(filename, ident); + self.idx_by_filename.insert(filename, blob_id); } /// Unregister a WebDAV item (delete, move) fn unregister(&mut self, ident: &UniqueIdent) { // Query the source of truth to get the information we // need to clean the indexes - let (filename, _etag) = match self.table.get(ident) { + let (_blob_id, filename, _etag) = match self.table.get(ident) { Some(v) => v, + // Element does not exist, return early None => return, }; self.idx_by_filename.remove(filename); @@ -134,29 +154,9 @@ impl DavDag { self.table.remove(ident); } - // @FIXME: maybe in case of error we could simply disable the sync graph - // and ask the client to rely on manual sync. For now, we are skipping the event - // which is midly satisfying. - /// When an event is processed, update the synchronization DAG - fn sync_dag(&mut self, child: &UniqueIdent, parents: &[UniqueIdent]) -> bool { - // --- Update SUCCESSORS - // All parents must exist in successors otherwise we can't accept item: - // do the check + update successors - let mut try_successors = self.successors.clone(); - for par in parents.iter() { - match try_successors.get_mut(par) { - None => { - tracing::warn!("Unable to push a Dav DAG sync op into the graph, an event is missing, it's a bug"); - return false - }, - Some(v) => v.insert(*child), - }; - } - self.successors = try_successors; - - // This event is also a future successor - self.successors.insert(*child, ordset![]); + fn sync_dag(&mut self, sync_desc: &SyncDesc) -> bool { + let (parents, child) = sync_desc; // --- Update ANCESTORS // We register ancestors as it is required for the sync algorithm @@ -177,6 +177,9 @@ impl DavDag { // This event becomes a new HEAD in turn self.heads.insert(*child); + + // --- Update ALL NODES + self.all_nodes.insert(*child); true } @@ -189,18 +192,18 @@ impl BayouState for DavDag { let mut new = self.clone(); match op { - DavDagOp::Put(parents, ident, entry) => { - if new.sync_dag(ident, parents.as_slice()) { - new.register(*ident, entry.clone()); + DavDagOp::Put(sync_desc, entry) => { + if new.sync_dag(sync_desc) { + new.register(entry.clone()); } }, - DavDagOp::Delete(parents, ident) => { - if new.sync_dag(ident, parents.as_slice()) { - new.unregister(ident); + DavDagOp::Delete(sync_desc, blob_id) => { + if new.sync_dag(sync_desc) { + new.unregister(blob_id); } }, - DavDagOp::Merge(parents, ident) => { - new.sync_dag(ident, parents.as_slice()); + DavDagOp::Merge(sync_desc) => { + new.sync_dag(sync_desc); } } @@ -211,7 +214,7 @@ impl BayouState for DavDag { // CUSTOM SERIALIZATION & DESERIALIZATION #[derive(Serialize, Deserialize)] struct DavDagSerializedRepr { - items: Vec<(UniqueIdent, IndexEntry)>, + items: Vec, heads: Vec, } @@ -224,11 +227,10 @@ impl<'de> Deserialize<'de> for DavDag { let mut davdag = DavDag::default(); // Build the table + index - val.items.into_iter().for_each(|(ident, entry)| davdag.register(ident, entry)); + val.items.into_iter().for_each(|entry| davdag.register(entry)); // Initialize the synchronization DAG with its roots val.heads.into_iter().for_each(|ident| { - davdag.successors.insert(ident, ordset![]); davdag.heads.insert(ident); davdag.origins.insert(ident); davdag.all_nodes.insert(ident); @@ -244,7 +246,7 @@ impl Serialize for DavDag { S: Serializer, { // Indexes are rebuilt on the fly, we serialize only the core database - let items = self.table.iter().map(|(ident, entry)| (*ident, entry.clone())).collect(); + let items = self.table.iter().map(|(_, entry)| entry.clone()).collect(); // We keep only the head entries from the sync graph, // these entries will be used to initialize it back when deserializing @@ -255,3 +257,53 @@ impl Serialize for DavDag { val.serialize(serializer) } } + +// ---- TESTS ---- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn base() { + let mut state = DavDag::default(); + + // Add item 1 + { + let m = UniqueIdent([0x01; 24]); + let ev = state.op_put((m, "cal.ics".into(), "321-321".into())); + state = state.apply(&ev); + + assert_eq!(state.table.len(), 1); + assert_eq!(state.resolve(ev.token()).unwrap().len(), 0); + } + + // Add 2 concurrent items + let (t1, t2) = { + let blob1 = UniqueIdent([0x02; 24]); + let ev1 = state.op_put((blob1, "cal2.ics".into(), "321-321".into())); + + let blob2 = UniqueIdent([0x01; 24]); + let ev2 = state.op_delete(blob2); + + state = state.apply(&ev1); + state = state.apply(&ev2); + + assert_eq!(state.table.len(), 1); + assert_eq!(state.resolve(ev1.token()).unwrap(), ordset![ev2.token()]); + + (ev1.token(), ev2.token()) + }; + + // Add later a new item + { + let blob3 = UniqueIdent([0x03; 24]); + let ev = state.op_put((blob3, "cal3.ics".into(), "321-321".into())); + + state = state.apply(&ev); + assert_eq!(state.table.len(), 2); + assert_eq!(state.resolve(ev.token()).unwrap().len(), 0); + assert_eq!(state.resolve(t1).unwrap(), ordset![t2, ev.token()]); + } + } +} -- cgit v1.2.3 From f179479c308876f2f41de695cc0375d7fd20b233 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 4 Apr 2024 11:28:15 +0200 Subject: WIP implem storage --- aero-collections/src/calendar/mod.rs | 79 ++++++++++++++++++++++++++++++++++-- aero-collections/src/davdag.rs | 61 +++++++++++++++++----------- 2 files changed, 113 insertions(+), 27 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 6537a4e..936f8c3 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -9,7 +9,7 @@ use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; use crate::unique_ident::*; -use crate::davdag::DavDag; +use crate::davdag::{DavDag, IndexEntry, Token, BlobId, SyncChange}; pub struct Calendar { pub(super) id: UniqueIdent, @@ -20,8 +20,49 @@ impl Calendar { pub(crate) async fn open( creds: &Credentials, id: UniqueIdent, - ) -> Result { - todo!(); + ) -> Result { + let bayou_path = format!("calendar/dag/{}", id); + let cal_path = format!("calendar/events/{}", id); + + let mut davdag = Bayou::::new(creds, bayou_path).await?; + davdag.sync().await?; + + let internal = RwLock::new(CalendarInternal { + id, + encryption_key: creds.keys.master.clone(), + storage: creds.storage.build().await?, + davdag, + cal_path, + }); + + Ok(Self { id, internal }) + } + + /// Sync data with backing store + pub async fn force_sync(&self) -> Result<()> { + self.internal.write().await.force_sync().await + } + + /// Sync data with backing store only if changes are detected + /// or last sync is too old + pub async fn opportunistic_sync(&self) -> Result<()> { + self.internal.write().await.opportunistic_sync().await + } + + pub async fn get(&self, blob_id: UniqueIdent, message_key: &Key) -> Result> { + self.internal.read().await.get(blob_id, message_key).await + } + + pub async fn diff(&self, sync_token: Token) -> Result<(Token, Vec)> { + self.internal.read().await.diff(sync_token).await + } + + pub async fn put<'a>(&self, entry: IndexEntry, evt: &'a [u8]) -> Result { + self.internal.write().await.put(entry, evt).await + } + + pub async fn delete(&self, blob_id: UniqueIdent) -> Result { + self.internal.write().await.delete(blob_id).await } } @@ -30,5 +71,35 @@ struct CalendarInternal { cal_path: String, encryption_key: Key, storage: Store, - uid_index: Bayou, + davdag: Bayou, +} + +impl CalendarInternal { + async fn force_sync(&mut self) -> Result<()> { + self.davdag.sync().await?; + Ok(()) + } + + async fn opportunistic_sync(&mut self) -> Result<()> { + self.davdag.opportunistic_sync().await?; + Ok(()) + } + + async fn get(&self, blob_id: BlobId, message_key: &Key) -> Result> { + todo!() + } + + async fn put<'a>(&mut self, entry: IndexEntry, evt: &'a [u8]) -> Result { + //@TODO write event to S3 + //@TODO add entry into Bayou + todo!(); + } + + async fn delete(&mut self, blob_id: BlobId) -> Result { + todo!(); + } + + async fn diff(&self, sync_token: Token) -> Result<(Token, Vec)> { + todo!(); + } } diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs index 63a76a8..f668831 100644 --- a/aero-collections/src/davdag.rs +++ b/aero-collections/src/davdag.rs @@ -20,20 +20,31 @@ pub type IndexEntry = (BlobId, FileName, Etag); #[derive(Clone, Default)] pub struct DavDag { /// Source of trust - pub table: OrdMap, + pub table: OrdMap, /// Indexes optimized for queries - pub idx_by_filename: OrdMap, + pub idx_by_filename: OrdMap, + + // ------------ Below this line, data is ephemeral, ie. not checkpointed /// Partial synchronization graph - pub ancestors: OrdMap>, + pub ancestors: OrdMap>, /// All nodes - pub all_nodes: OrdSet, + pub all_nodes: OrdSet, /// Head nodes - pub heads: OrdSet, + pub heads: OrdSet, /// Origin nodes - pub origins: OrdSet, + pub origins: OrdSet, + + /// File change token by token + pub change: OrdMap, +} + +#[derive(Clone, Debug)] +pub enum SyncChange { + Ok(FileName), + NotFound(FileName), } #[derive(Clone, Serialize, Deserialize, Debug)] @@ -66,8 +77,8 @@ impl DavDag { DavDagOp::Put(self.sync_desc(), entry) } - pub fn op_delete(&self, ident: BlobId) -> DavDagOp { - DavDagOp::Delete(self.sync_desc(), ident) + pub fn op_delete(&self, blob_id: BlobId) -> DavDagOp { + DavDagOp::Delete(self.sync_desc(), blob_id) } // HELPER functions @@ -129,33 +140,41 @@ impl DavDag { // INTERNAL functions /// Register a WebDAV item (put, copy, move) - fn register(&mut self, entry: IndexEntry) { + fn register(&mut self, sync_token: Option, entry: IndexEntry) { let (blob_id, filename, _etag) = entry.clone(); // Insert item in the source of trust self.table.insert(blob_id, entry); // Update the cache - self.idx_by_filename.insert(filename, blob_id); + self.idx_by_filename.insert(filename.to_string(), blob_id); + + // Record the change in the ephemeral synchronization map + if let Some(sync_token) = sync_token { + self.change.insert(sync_token, SyncChange::Ok(filename)); + } } /// Unregister a WebDAV item (delete, move) - fn unregister(&mut self, ident: &UniqueIdent) { + fn unregister(&mut self, sync_token: Token, blob_id: &BlobId) { // Query the source of truth to get the information we // need to clean the indexes - let (_blob_id, filename, _etag) = match self.table.get(ident) { + let (_blob_id, filename, _etag) = match self.table.get(blob_id) { Some(v) => v, // Element does not exist, return early None => return, }; self.idx_by_filename.remove(filename); + // Record the change in the ephemeral synchronization map + self.change.insert(sync_token, SyncChange::NotFound(filename.to_string())); + // Finally clear item from the source of trust - self.table.remove(ident); + self.table.remove(blob_id); } /// When an event is processed, update the synchronization DAG - fn sync_dag(&mut self, sync_desc: &SyncDesc) -> bool { + fn sync_dag(&mut self, sync_desc: &SyncDesc) { let (parents, child) = sync_desc; // --- Update ANCESTORS @@ -180,8 +199,6 @@ impl DavDag { // --- Update ALL NODES self.all_nodes.insert(*child); - - true } } @@ -193,14 +210,12 @@ impl BayouState for DavDag { match op { DavDagOp::Put(sync_desc, entry) => { - if new.sync_dag(sync_desc) { - new.register(entry.clone()); - } + new.sync_dag(sync_desc); + new.register(Some(sync_desc.1), entry.clone()); }, DavDagOp::Delete(sync_desc, blob_id) => { - if new.sync_dag(sync_desc) { - new.unregister(blob_id); - } + new.sync_dag(sync_desc); + new.unregister(sync_desc.1, blob_id); }, DavDagOp::Merge(sync_desc) => { new.sync_dag(sync_desc); @@ -227,7 +242,7 @@ impl<'de> Deserialize<'de> for DavDag { let mut davdag = DavDag::default(); // Build the table + index - val.items.into_iter().for_each(|entry| davdag.register(entry)); + val.items.into_iter().for_each(|entry| davdag.register(None, entry)); // Initialize the synchronization DAG with its roots val.heads.into_iter().for_each(|ident| { -- cgit v1.2.3 From 2efdd40b8edd83c3fef3d94f7e62b41b86e49959 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 4 Apr 2024 11:57:32 +0200 Subject: Write PUT --- aero-collections/src/calendar/mod.rs | 53 +++++++++++++++++++++++++++++++----- 1 file changed, 46 insertions(+), 7 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 936f8c3..7e5a8c1 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -38,6 +38,8 @@ impl Calendar { Ok(Self { id, internal }) } + // ---- DAG sync utilities + /// Sync data with backing store pub async fn force_sync(&self) -> Result<()> { self.internal.write().await.force_sync().await @@ -49,24 +51,40 @@ impl Calendar { self.internal.write().await.opportunistic_sync().await } - pub async fn get(&self, blob_id: UniqueIdent, message_key: &Key) -> Result> { - self.internal.read().await.get(blob_id, message_key).await + // ---- Data API + + /// Access the DAG internal data (you can get the list of files for example) + pub async fn dag(&self) -> DavDag { + // Cloning is cheap + self.internal.read().await.davdag.state().clone() } + /// The diff API is a write API as we might need to push a merge node + /// to get a new sync token pub async fn diff(&self, sync_token: Token) -> Result<(Token, Vec)> { - self.internal.read().await.diff(sync_token).await + self.internal.write().await.diff(sync_token).await } + /// Get a specific event + pub async fn get(&self, evt_id: UniqueIdent, message_key: &Key) -> Result> { + self.internal.read().await.get(evt_id, message_key).await + } + + /// Put a specific event pub async fn put<'a>(&self, entry: IndexEntry, evt: &'a [u8]) -> Result { self.internal.write().await.put(entry, evt).await } + /// Delete a specific event pub async fn delete(&self, blob_id: UniqueIdent) -> Result { self.internal.write().await.delete(blob_id).await } } +use base64::Engine; +const MESSAGE_KEY: &str = "message-key"; struct CalendarInternal { + #[allow(dead_code)] id: UniqueIdent, cal_path: String, encryption_key: Key, @@ -90,16 +108,37 @@ impl CalendarInternal { } async fn put<'a>(&mut self, entry: IndexEntry, evt: &'a [u8]) -> Result { - //@TODO write event to S3 - //@TODO add entry into Bayou - todo!(); + let message_key = gen_key(); + + let encrypted_msg_key = cryptoblob::seal(&message_key.as_ref(), &self.encryption_key)?; + let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_msg_key); + + // Write event to S3 + let message_blob = cryptoblob::seal(evt, &message_key)?; + let blob_val = BlobVal::new( + BlobRef(format!("{}/{}", self.cal_path, entry.0)), + message_blob, + ) + .with_meta(MESSAGE_KEY.to_string(), key_header); + + self.storage + .blob_insert(blob_val) + .await?; + + // Add entry to Bayou + let davstate = self.davdag.state(); + let put_op = davstate.op_put(entry); + let token = put_op.token(); + self.davdag.push(put_op).await?; + + Ok(token) } async fn delete(&mut self, blob_id: BlobId) -> Result { todo!(); } - async fn diff(&self, sync_token: Token) -> Result<(Token, Vec)> { + async fn diff(&mut self, sync_token: Token) -> Result<(Token, Vec)> { todo!(); } } -- cgit v1.2.3 From 272b93f04a0640e056fe994f48cb2837eacdad46 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 4 Apr 2024 14:59:47 +0200 Subject: GET logic --- aero-collections/src/calendar/mod.rs | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 7e5a8c1..0e0e65f 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -1,6 +1,6 @@ pub mod namespace; -use anyhow::Result; +use anyhow::{anyhow, Result}; use tokio::sync::RwLock; use aero_bayou::Bayou; @@ -66,8 +66,8 @@ impl Calendar { } /// Get a specific event - pub async fn get(&self, evt_id: UniqueIdent, message_key: &Key) -> Result> { - self.internal.read().await.get(evt_id, message_key).await + pub async fn get(&self, evt_id: UniqueIdent) -> Result> { + self.internal.read().await.get(evt_id).await } /// Put a specific event @@ -103,8 +103,24 @@ impl CalendarInternal { Ok(()) } - async fn get(&self, blob_id: BlobId, message_key: &Key) -> Result> { - todo!() + async fn get(&self, blob_id: BlobId) -> Result> { + // Fetch message from S3 + let blob_ref = storage::BlobRef(format!("{}/{}", self.cal_path, blob_id)); + let object = self.storage.blob_fetch(&blob_ref).await?; + + // Decrypt message key from headers + let key_encrypted_b64 = object + .meta + .get(MESSAGE_KEY) + .ok_or(anyhow!("Missing key in metadata"))?; + let key_encrypted = base64::engine::general_purpose::STANDARD.decode(key_encrypted_b64)?; + let message_key_raw = cryptoblob::open(&key_encrypted, &self.encryption_key)?; + let message_key = + cryptoblob::Key::from_slice(&message_key_raw).ok_or(anyhow!("Invalid message key"))?; + + // Decrypt body + let body = object.value; + cryptoblob::open(&body, &message_key) } async fn put<'a>(&mut self, entry: IndexEntry, evt: &'a [u8]) -> Result { -- cgit v1.2.3 From 054bd52279faefd327be092ea7ec13f75f0a6163 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 4 Apr 2024 15:40:26 +0200 Subject: Implement diff --- aero-collections/src/calendar/mod.rs | 43 +++++++++++++++++++++++++++++++----- aero-collections/src/davdag.rs | 4 ++-- aero-collections/src/mail/mailbox.rs | 2 +- 3 files changed, 41 insertions(+), 8 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 0e0e65f..127f41b 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -1,12 +1,12 @@ pub mod namespace; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, bail, Result}; use tokio::sync::RwLock; use aero_bayou::Bayou; use aero_user::login::Credentials; -use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; -use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; +use aero_user::cryptoblob::{self, gen_key, Key}; +use aero_user::storage::{self, BlobRef, BlobVal, Store}; use crate::unique_ident::*; use crate::davdag::{DavDag, IndexEntry, Token, BlobId, SyncChange}; @@ -151,10 +151,43 @@ impl CalendarInternal { } async fn delete(&mut self, blob_id: BlobId) -> Result { - todo!(); + let davstate = self.davdag.state(); + + if davstate.table.contains_key(&blob_id) { + bail!("Cannot delete event that doesn't exist"); + } + + let del_op = davstate.op_delete(blob_id); + let token = del_op.token(); + self.davdag.push(del_op).await?; + + let blob_ref = BlobRef(format!("{}/{}", self.cal_path, blob_id)); + self.storage.blob_rm(&blob_ref).await?; + + Ok(token) } async fn diff(&mut self, sync_token: Token) -> Result<(Token, Vec)> { - todo!(); + let davstate = self.davdag.state(); + + let token_changed = davstate.resolve(sync_token)?; + let changes = token_changed + .iter() + .filter_map(|t: &Token| davstate.change.get(t)) + .map(|s| s.clone()) + .collect(); + + let heads = davstate.heads_vec(); + let token = match heads.as_slice() { + [ token ] => *token, + _ => { + let op_mg = davstate.op_merge(); + let token = op_mg.token(); + self.davdag.push(op_mg).await?; + token + } + }; + + Ok((token, changes)) } } diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs index f668831..3aaebb8 100644 --- a/aero-collections/src/davdag.rs +++ b/aero-collections/src/davdag.rs @@ -93,7 +93,7 @@ impl DavDag { } /// Resolve a sync token - pub fn resolve(&self, known: UniqueIdent) -> Result> { + pub fn resolve(&self, known: Token) -> Result> { let already_known = self.all_ancestors(known); // We can't capture all missing events if we are not connected @@ -112,7 +112,7 @@ impl DavDag { } /// Find all ancestors of a given node - fn all_ancestors(&self, known: UniqueIdent) -> OrdSet { + fn all_ancestors(&self, known: Token) -> OrdSet { let mut all_known: OrdSet = OrdSet::new(); let mut to_collect = vec![known]; loop { diff --git a/aero-collections/src/mail/mailbox.rs b/aero-collections/src/mail/mailbox.rs index f797be6..fcdb21e 100644 --- a/aero-collections/src/mail/mailbox.rs +++ b/aero-collections/src/mail/mailbox.rs @@ -375,7 +375,7 @@ impl MailboxInternal { async fn delete(&mut self, ident: UniqueIdent) -> Result<()> { if !self.uid_index.state().table.contains_key(&ident) { - bail!("Cannot delete mail that doesn't exit"); + bail!("Cannot delete mail that doesn't exist"); } let del_mail_op = self.uid_index.state().op_mail_del(ident); -- cgit v1.2.3 From a2d2649ef92324ccd314ee787577ed504522824a Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 5 Apr 2024 10:19:07 +0200 Subject: WIP dav integration --- aero-collections/src/calendar/namespace.rs | 2 +- aero-proto/src/dav.rs | 126 ++++++++++++++++------------- 2 files changed, 70 insertions(+), 58 deletions(-) diff --git a/aero-collections/src/calendar/namespace.rs b/aero-collections/src/calendar/namespace.rs index 2fbc364..9c21d19 100644 --- a/aero-collections/src/calendar/namespace.rs +++ b/aero-collections/src/calendar/namespace.rs @@ -17,7 +17,7 @@ pub(crate) const CAL_LIST_SK: &str = "list"; pub(crate) const MAIN_CAL: &str = "Personal"; pub(crate) const MAX_CALNAME_CHARS: usize = 32; -pub(crate) struct CalendarNs(std::sync::Mutex>>); +pub struct CalendarNs(std::sync::Mutex>>); impl CalendarNs { /// Create a new calendar namespace diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 3420f86..72dbc5d 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -1,7 +1,7 @@ use std::net::SocketAddr; use std::sync::Arc; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, bail, Result}; use base64::Engine; use hyper::service::service_fn; use hyper::{Request, Response, body::Bytes}; @@ -20,7 +20,7 @@ use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; -use aero_collections::user::User; +use aero_collections::{user::User, calendar::Calendar}; use aero_dav::types as dav; use aero_dav::caltypes as cal; use aero_dav::acltypes as acl; @@ -230,6 +230,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); + let node = match (RootNode {}).fetch(&user, &path_segments) { Ok(v) => v, Err(e) => { @@ -252,6 +253,12 @@ async fn router(user: std::sync::Arc, req: Request) -> Result { + todo!(); + }, + "DELETE" => { + todo!(); + }, "PROPFIND" => propfind(user, req, node).await, "REPORT" => report(user, req, node).await, _ => return Ok(Response::builder() @@ -260,48 +267,6 @@ async fn router(user: std::sync::Arc, req: Request) -> Result -// -// -// -// -// -// -// - - -// -// -// -// -// -// -// -// -// -// -// - -// -// -// -// -// -// -// -// -// -// -// - -// -// -// -// -// -// /alice/calendar/personal/something.ics -// - const ALLPROP: [dav::PropertyRequest; 10] = [ dav::PropertyRequest::CreationDate, dav::PropertyRequest::DisplayName, @@ -501,13 +466,14 @@ async fn deserialize>(req: Request) -> Result { } //--- - +use futures::{future, future::BoxFuture, future::FutureExt}; trait DavNode: Send { // ------- specialized logic // recurence - fn children(&self, user: &ArcUser) -> Vec>; - fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result>; + // @FIXME not satisfied by BoxFutures but I have no better idea currently + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; // node properties fn path(&self, user: &ArcUser) -> String; @@ -573,17 +539,18 @@ trait DavNode: Send { #[derive(Clone)] struct RootNode {} impl DavNode for RootNode { - fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { if path.len() == 0 { - return Ok(Box::new(self.clone())) + let this = self.clone(); + return async { Ok(Box::new(this) as Box) }.boxed(); } if path[0] == user.username { let child = Box::new(HomeNode {}); - return child.fetch(user, &path[1..]) + return child.fetch(user, &path[1..]); } - Err(anyhow!("Not found")) + async { Err(anyhow!("Not found")) }.boxed() } fn path(&self, user: &ArcUser) -> String { @@ -664,26 +631,29 @@ impl DavNode for HomeNode { #[derive(Clone)] struct CalendarListNode {} impl DavNode for CalendarListNode { - fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { + async fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { if path.len() == 0 { return Ok(Box::new(self.clone())) } //@FIXME hardcoded logic - if path[0] == "personal" { + /*if path[0] == "personal" { let child = Box::new(CalendarNode { name: "personal".to_string() }); return child.fetch(user, &path[1..]) + }*/ + if !user.calendars.has(user, path[0]).await? { + bail!("Not found"); } - - Err(anyhow!("Not found")) + let child = Box::new(CalendarNode { name: path[0].to_string() }); + child.fetch(user, &path[1..]) } fn path(&self, user: &ArcUser) -> String { format!("/{}/calendar/", user.username) } - fn children(&self, user: &ArcUser) -> Vec> { - vec![Box::new(CalendarNode { name: "personal".into() })] + async fn children(&self, user: &ArcUser) -> Vec> { + user.calendars.list(user).await.map(|name| Box::new(CalendarNode { name: name.to_string() })).collect() } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ @@ -832,3 +802,45 @@ impl DavNode for EventNode { } + +// +// +// +// +// +// +// +// + + +// +// +// +// +// +// +// +// +// +// +// + +// +// +// +// +// +// +// +// +// +// +// + +// +// +// +// +// +// /alice/calendar/personal/something.ics +// -- cgit v1.2.3 From bf7fb559656785dcf65d2ad9a30cb59efb83b9f5 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Fri, 5 Apr 2024 15:30:10 +0200 Subject: It compiles again --- aero-proto/src/dav.rs | 198 +++++++++++++++++++++++++++++++++----------------- 1 file changed, 132 insertions(+), 66 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 72dbc5d..98767a1 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -20,7 +20,7 @@ use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; -use aero_collections::{user::User, calendar::Calendar}; +use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; use aero_dav::types as dav; use aero_dav::caltypes as cal; use aero_dav::acltypes as acl; @@ -231,7 +231,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); - let node = match (RootNode {}).fetch(&user, &path_segments) { + let node = match (RootNode {}).fetch(&user, &path_segments).await { Ok(v) => v, Err(e) => { tracing::warn!(err=?e, "dav node fetch failed"); @@ -302,7 +302,7 @@ async fn propfind(user: std::sync::Arc, req: Request, base_node: // Collect nodes as PROPFIND is not limited at the targeted node let mut nodes = vec![]; if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - nodes.extend(base_node.children(&user)); + nodes.extend(base_node.children(&user).await); } nodes.push(base_node); @@ -346,13 +346,19 @@ async fn report(user: std::sync::Arc, req: Request, node: Box, Vec<_>) = multiget.href.into_iter().map(|h| match Path::new(h.0.as_str()) { - Ok(Path::Abs(p)) => RootNode{}.fetch(&user, p.as_slice()).or(Err(h)), - Ok(Path::Rel(p)) => node.fetch(&user, p.as_slice()).or(Err(h)), - Err(_) => Err(h), - }).partition(|v| matches!(v, Result::Ok(_))); - let ok_node = ok_node.into_iter().filter_map(|v| v.ok()).collect(); - let not_found = not_found.into_iter().filter_map(|v| v.err()).collect(); + let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); + for h in multiget.href.into_iter() { + let maybe_collected_node = match Path::new(h.0.as_str()) { + Ok(Path::Abs(p)) => RootNode{}.fetch(&user, p.as_slice()).await.or(Err(h)), + Ok(Path::Rel(p)) => node.fetch(&user, p.as_slice()).await.or(Err(h)), + Err(_) => Err(h), + }; + + match maybe_collected_node { + Ok(v) => ok_node.push(v), + Err(h) => not_found.push(h), + }; + } // Getting props let props = match multiget.selector { @@ -553,13 +559,14 @@ impl DavNode for RootNode { async { Err(anyhow!("Not found")) }.boxed() } + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![Box::new(HomeNode { }) as Box] }.boxed() + } + fn path(&self, user: &ArcUser) -> String { "/".into() } - fn children(&self, user: &ArcUser) -> Vec> { - vec![Box::new(HomeNode { })] - } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, @@ -585,26 +592,34 @@ impl DavNode for RootNode { #[derive(Clone)] struct HomeNode {} impl DavNode for HomeNode { - fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { if path.len() == 0 { - return Ok(Box::new(self.clone())) + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() } if path[0] == "calendar" { - let child = Box::new(CalendarListNode {}); - return child.fetch(user, &path[1..]) + return async { + let child = Box::new(CalendarListNode::new(user).await?); + child.fetch(user, &path[1..]).await + }.boxed(); } - Err(anyhow!("Not found")) + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { + CalendarListNode::new(user).await + .map(|c| vec![Box::new(c) as Box]) + .unwrap_or(vec![]) + }.boxed() } fn path(&self, user: &ArcUser) -> String { format!("/{}/", user.username) } - fn children(&self, user: &ArcUser) -> Vec> { - vec![Box::new(CalendarListNode { })] - } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, @@ -622,39 +637,63 @@ impl DavNode for HomeNode { ])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(CalendarListNode{}.path(user)))))), + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(/*CalendarListNode{}.path(user)*/ todo!()))))), v => dav::AnyProperty::Request(v), }).collect() } } #[derive(Clone)] -struct CalendarListNode {} +struct CalendarListNode { + list: Vec, +} +impl CalendarListNode { + async fn new(user: &ArcUser) -> Result { + let list = user.calendars.list(user).await?; + Ok(Self { list }) + } +} impl DavNode for CalendarListNode { - async fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { if path.len() == 0 { - return Ok(Box::new(self.clone())) + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed(); } - //@FIXME hardcoded logic - /*if path[0] == "personal" { - let child = Box::new(CalendarNode { name: "personal".to_string() }); - return child.fetch(user, &path[1..]) - }*/ - if !user.calendars.has(user, path[0]).await? { - bail!("Not found"); - } - let child = Box::new(CalendarNode { name: path[0].to_string() }); - child.fetch(user, &path[1..]) + async { + let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; + let child = Box::new(CalendarNode { + col: cal, + calname: path[0].to_string() + }); + child.fetch(user, &path[1..]).await + }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + let list = self.list.clone(); + async move { + //@FIXME maybe we want to be lazy here?! + futures::stream::iter(list.iter()) + .filter_map(|name| async move { + user.calendars.open(user, name).await + .ok() + .flatten() + .map(|v| (name, v)) + }) + .map(|(name, cal)| Box::new(CalendarNode { + col: cal, + calname: name.to_string(), + }) as Box) + .collect::>>() + .await + }.boxed() } fn path(&self, user: &ArcUser) -> String { format!("/{}/calendar/", user.username) } - async fn children(&self, user: &ArcUser) -> Vec> { - user.calendars.list(user).await.map(|name| Box::new(CalendarNode { name: name.to_string() })).collect() - } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, @@ -674,33 +713,53 @@ impl DavNode for CalendarListNode { #[derive(Clone)] struct CalendarNode { - name: String, + col: Arc, + calname: String, } impl DavNode for CalendarNode { - fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { if path.len() == 0 { - return Ok(Box::new(self.clone())) + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() } - //@FIXME hardcoded logic - if path[0] == "something.ics" { - let child = Box::new(EventNode { - calendar: self.name.to_string(), - event_file: "something.ics".to_string(), - }); - return child.fetch(user, &path[1..]) - } + let col = self.col.clone(); + let calname = self.calname.clone(); + async move { + if let Some(blob_id) = col.dag().await.idx_by_filename.get(path[0]) { + let child = Box::new(EventNode { + col: col.clone(), + calname, + filename: path[0].to_string(), + blob_id: *blob_id, + }); + return child.fetch(user, &path[1..]).await + } - Err(anyhow!("Not found")) + Err(anyhow!("Not found")) + }.boxed() } - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/", user.username, self.name) + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + let col = self.col.clone(); + let calname = self.calname.clone(); + + async move { + col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { + Box::new(EventNode { + col: col.clone(), + calname: calname.clone(), + filename: filename.to_string(), + blob_id: *blob_id, + }) as Box + }).collect() + }.boxed() } - fn children(&self, user: &ArcUser) -> Vec> { - vec![Box::new(EventNode { calendar: self.name.to_string(), event_file: "something.ics".into() })] + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/", user.username, self.calname) } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, @@ -711,7 +770,7 @@ impl DavNode for CalendarNode { } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.name))), + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.calname))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ dav::ResourceType::Collection, dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), @@ -761,25 +820,29 @@ END:VCALENDAR"#; #[derive(Clone)] struct EventNode { - calendar: String, - event_file: String, + col: Arc, + calname: String, + filename: String, + blob_id: BlobId, } impl DavNode for EventNode { - fn fetch(&self, user: &ArcUser, path: &[&str]) -> Result> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { if path.len() == 0 { - return Ok(Box::new(self.clone())) + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() } - Err(anyhow!("Not found")) + async { Err(anyhow!("Not found")) }.boxed() } - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/{}", user.username, self.calendar, self.event_file) + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![] }.boxed() } - fn children(&self, user: &ArcUser) -> Vec> { - vec![] + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) } + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![ dav::PropertyRequest::DisplayName, @@ -790,12 +853,15 @@ impl DavNode for EventNode { } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.event_file))), + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.filename))), dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { mime: None, payload: FAKE_ICS.into() })))), + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { + mime: None, + payload: FAKE_ICS.into() + })))), v => dav::AnyProperty::Request(v), }).collect() } -- cgit v1.2.3 From 66eac8ec7a420f20fa01bb77ccc25d964a404af3 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 6 Apr 2024 13:05:57 +0200 Subject: Refactor DAV --- aero-proto/src/dav.rs | 269 +++++++++++++++++++++++++++----------------------- 1 file changed, 146 insertions(+), 123 deletions(-) diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs index 98767a1..424d4be 100644 --- a/aero-proto/src/dav.rs +++ b/aero-proto/src/dav.rs @@ -240,6 +240,7 @@ async fn router(user: std::sync::Arc, req: Request) -> Result return Ok(Response::builder() @@ -259,8 +260,8 @@ async fn router(user: std::sync::Arc, req: Request) -> Result { todo!(); }, - "PROPFIND" => propfind(user, req, node).await, - "REPORT" => report(user, req, node).await, + "PROPFIND" => response.propfind().await, + "REPORT" => response.report().await, _ => return Ok(Response::builder() .status(501) .body(text_body("HTTP Method not implemented"))?), @@ -282,123 +283,7 @@ const ALLPROP: [dav::PropertyRequest; 10] = [ // ---------- Building objects -async fn propfind(user: std::sync::Arc, req: Request, base_node: Box) -> Result>> { - let depth = depth(&req); - if matches!(depth, dav::Depth::Infinity) { - return Ok(Response::builder() - .status(501) - .body(text_body("Depth: Infinity not implemented"))?) - } - - let status = hyper::StatusCode::from_u16(207)?; - - // A client may choose not to submit a request body. An empty PROPFIND - // request body MUST be treated as if it were an 'allprop' request. - // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly - // handled, but corrupted requests are also silently handled as allprop. - let propfind = deserialize::>(req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); - tracing::debug!(recv=?propfind, "inferred propfind request"); - - // Collect nodes as PROPFIND is not limited at the targeted node - let mut nodes = vec![]; - if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - nodes.extend(base_node.children(&user).await); - } - nodes.push(base_node); - - // Expand properties request - let propname = match propfind { - dav::PropFind::PropName => None, - dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), - dav::PropFind::AllProp(Some(dav::Include(mut include))) => { - include.extend_from_slice(&ALLPROP); - Some(dav::PropName(include)) - }, - dav::PropFind::Prop(inner) => Some(inner), - }; - - // Not Found is currently impossible considering the way we designed this function - let not_found = vec![]; - serialize(status, multistatus(&user, nodes, not_found, propname)) -} - - -async fn report(user: std::sync::Arc, req: Request, node: Box) -> Result>> { - let status = hyper::StatusCode::from_u16(207)?; - - let report = match deserialize::>(req).await { - Ok(v) => v, - Err(e) => { - tracing::error!(err=?e, "unable to decode REPORT body"); - return Ok(Response::builder() - .status(400) - .body(text_body("Bad request"))?) - } - }; - - // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary - // list of URLs - let multiget = match report { - cal::Report::Multiget(m) => m, - _ => return Ok(Response::builder() - .status(501) - .body(text_body("Not implemented"))?), - }; - - // Getting the list of nodes - let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); - for h in multiget.href.into_iter() { - let maybe_collected_node = match Path::new(h.0.as_str()) { - Ok(Path::Abs(p)) => RootNode{}.fetch(&user, p.as_slice()).await.or(Err(h)), - Ok(Path::Rel(p)) => node.fetch(&user, p.as_slice()).await.or(Err(h)), - Err(_) => Err(h), - }; - - match maybe_collected_node { - Ok(v) => ok_node.push(v), - Err(h) => not_found.push(h), - }; - } - - // Getting props - let props = match multiget.selector { - None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), - Some(cal::CalendarSelector::PropName) => None, - Some(cal::CalendarSelector::Prop(inner)) => Some(inner), - }; - - serialize(status, multistatus(&user, ok_node, not_found, props)) -} - -fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { - // Collect properties on existing objects - let mut responses: Vec> = match props { - Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), - None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), - }; - - // Register not found objects only if relevant - if !not_found.is_empty() { - responses.push(dav::Response { - status_or_propstat: dav::StatusOrPropstat::Status( - not_found, - dav::Status(hyper::StatusCode::NOT_FOUND), - ), - error: None, - location: None, - responsedescription: None, - }); - } - - // Build response - dav::Multistatus:: { - responses, - responsedescription: None, - } -} - // ---- HTTP DAV Binding - use futures::stream::TryStreamExt; use http_body_util::BodyStream; use http_body_util::StreamBody; @@ -473,20 +358,27 @@ async fn deserialize>(req: Request) -> Result { //--- use futures::{future, future::BoxFuture, future::FutureExt}; -trait DavNode: Send { - // ------- specialized logic - // recurence - // @FIXME not satisfied by BoxFutures but I have no better idea currently +/// A DAV node should implement the following methods +/// @FIXME not satisfied by BoxFutures but I have no better idea currently +trait DavNode: Send { + // recurence, filesystem hierarchy + /// This node direct children fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; + /// Recursively fetch a child (progress inside the filesystem hierarchy) fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; // node properties + /// Get the path fn path(&self, user: &ArcUser) -> String; + /// Get the supported WebDAV properties fn supported_properties(&self, user: &ArcUser) -> dav::PropName; + /// Get the values for the given properties fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; - // --- shared + //@FIXME maybe add etag, maybe add a way to set content + + /// Utility function to get a propname response from a node fn response_propname(&self, user: &ArcUser) -> dav::Response { dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( @@ -506,6 +398,7 @@ trait DavNode: Send { } } + /// Utility function to get a prop response from a node & a list of propname fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { let mut prop_desc = vec![]; let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); @@ -542,6 +435,136 @@ trait DavNode: Send { } } +struct DavResponse { + node: Box, + user: std::sync::Arc, + req: Request, +} +impl DavResponse { + // --- Public API --- + + /// REPORT has been first described in the "Versioning Extension" of WebDAV + /// It allows more complex queries compared to PROPFIND + /// + /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. + /// A rewrite would be required to make it more generic (with the extension system that has + /// been introduced in aero-dav) + async fn report(self) -> Result>> { + let status = hyper::StatusCode::from_u16(207)?; + + let report = match deserialize::>(self.req).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "unable to decode REPORT body"); + return Ok(Response::builder() + .status(400) + .body(text_body("Bad request"))?) + } + }; + + // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary + // list of URLs + let multiget = match report { + cal::Report::Multiget(m) => m, + _ => return Ok(Response::builder() + .status(501) + .body(text_body("Not implemented"))?), + }; + + // Getting the list of nodes + let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); + for h in multiget.href.into_iter() { + let maybe_collected_node = match Path::new(h.0.as_str()) { + Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Err(_) => Err(h), + }; + + match maybe_collected_node { + Ok(v) => ok_node.push(v), + Err(h) => not_found.push(h), + }; + } + + // Getting props + let props = match multiget.selector { + None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), + Some(cal::CalendarSelector::PropName) => None, + Some(cal::CalendarSelector::Prop(inner)) => Some(inner), + }; + + serialize(status, Self::multistatus(&self.user, ok_node, not_found, props)) + } + + /// PROPFIND is the standard way to fetch WebDAV properties + async fn propfind(self) -> Result>> { + let depth = depth(&self.req); + if matches!(depth, dav::Depth::Infinity) { + return Ok(Response::builder() + .status(501) + .body(text_body("Depth: Infinity not implemented"))?) + } + + let status = hyper::StatusCode::from_u16(207)?; + + // A client may choose not to submit a request body. An empty PROPFIND + // request body MUST be treated as if it were an 'allprop' request. + // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly + // handled, but corrupted requests are also silently handled as allprop. + let propfind = deserialize::>(self.req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); + tracing::debug!(recv=?propfind, "inferred propfind request"); + + // Collect nodes as PROPFIND is not limited to the targeted node + let mut nodes = vec![]; + if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { + nodes.extend(self.node.children(&self.user).await); + } + nodes.push(self.node); + + // Expand properties request + let propname = match propfind { + dav::PropFind::PropName => None, + dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), + dav::PropFind::AllProp(Some(dav::Include(mut include))) => { + include.extend_from_slice(&ALLPROP); + Some(dav::PropName(include)) + }, + dav::PropFind::Prop(inner) => Some(inner), + }; + + // Not Found is currently impossible considering the way we designed this function + let not_found = vec![]; + serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) + } + + // --- Internal functions --- + /// Utility function to build a multistatus response from + /// a list of DavNodes + fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { + // Collect properties on existing objects + let mut responses: Vec> = match props { + Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), + None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), + }; + + // Register not found objects only if relevant + if !not_found.is_empty() { + responses.push(dav::Response { + status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), + error: None, + location: None, + responsedescription: None, + }); + } + + // Build response + dav::Multistatus:: { + responses, + responsedescription: None, + } + } +} + #[derive(Clone)] struct RootNode {} impl DavNode for RootNode { -- cgit v1.2.3 From 2bda8ef081d9c8f47081845bb4545a12b6ae8a18 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 18 Apr 2024 13:55:57 +0200 Subject: split dav module in multiple files --- aero-proto/src/dav.rs | 935 --------------------------------------- aero-proto/src/dav/codec.rs | 80 ++++ aero-proto/src/dav/middleware.rs | 70 +++ aero-proto/src/dav/mod.rs | 822 ++++++++++++++++++++++++++++++++++ 4 files changed, 972 insertions(+), 935 deletions(-) delete mode 100644 aero-proto/src/dav.rs create mode 100644 aero-proto/src/dav/codec.rs create mode 100644 aero-proto/src/dav/middleware.rs create mode 100644 aero-proto/src/dav/mod.rs diff --git a/aero-proto/src/dav.rs b/aero-proto/src/dav.rs deleted file mode 100644 index 424d4be..0000000 --- a/aero-proto/src/dav.rs +++ /dev/null @@ -1,935 +0,0 @@ -use std::net::SocketAddr; -use std::sync::Arc; - -use anyhow::{anyhow, bail, Result}; -use base64::Engine; -use hyper::service::service_fn; -use hyper::{Request, Response, body::Bytes}; -use hyper::server::conn::http1 as http; -use hyper::rt::{Read, Write}; -use hyper::body::Incoming; -use hyper_util::rt::TokioIo; -use http_body_util::Full; -use futures::stream::{FuturesUnordered, StreamExt}; -use tokio::net::TcpListener; -use tokio::sync::watch; -use tokio_rustls::TlsAcceptor; -use tokio::net::TcpStream; -use tokio::io::{AsyncRead, AsyncWrite}; -use rustls_pemfile::{certs, private_key}; - -use aero_user::config::{DavConfig, DavUnsecureConfig}; -use aero_user::login::ArcLoginProvider; -use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; -use aero_dav::types as dav; -use aero_dav::caltypes as cal; -use aero_dav::acltypes as acl; -use aero_dav::realization::{All, self as all}; -use aero_dav::xml as dxml; - -type ArcUser = std::sync::Arc; - -pub struct Server { - bind_addr: SocketAddr, - login_provider: ArcLoginProvider, - tls: Option, -} - -pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { - Server { - bind_addr: config.bind_addr, - login_provider: login, - tls: None, - } -} - -pub fn new(config: DavConfig, login: ArcLoginProvider) -> Result { - let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( - config.certs, - )?)) - .collect::, _>>()?; - let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( - config.key, - )?))? - .unwrap(); - - let tls_config = rustls::ServerConfig::builder() - .with_no_client_auth() - .with_single_cert(loaded_certs, loaded_key)?; - let acceptor = TlsAcceptor::from(Arc::new(tls_config)); - - Ok(Server { - bind_addr: config.bind_addr, - login_provider: login, - tls: Some(acceptor), - }) -} - -trait Stream: Read + Write + Send + Unpin {} -impl Stream for TokioIo {} - -impl Server { - pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { - let tcp = TcpListener::bind(self.bind_addr).await?; - tracing::info!("DAV server listening on {:#}", self.bind_addr); - - let mut connections = FuturesUnordered::new(); - while !*must_exit.borrow() { - let wait_conn_finished = async { - if connections.is_empty() { - futures::future::pending().await - } else { - connections.next().await - } - }; - let (socket, remote_addr) = tokio::select! { - a = tcp.accept() => a?, - _ = wait_conn_finished => continue, - _ = must_exit.changed() => continue, - }; - tracing::info!("Accepted connection from {}", remote_addr); - let stream = match self.build_stream(socket).await { - Ok(v) => v, - Err(e) => { - tracing::error!(err=?e, "TLS acceptor failed"); - continue - } - }; - - let login = self.login_provider.clone(); - let conn = tokio::spawn(async move { - //@FIXME should create a generic "public web" server on which "routers" could be - //abitrarily bound - //@FIXME replace with a handler supporting http2 and TLS - - match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { - let login = login.clone(); - tracing::info!("{:?} {:?}", req.method(), req.uri()); - async { - match auth(login, req).await { - Ok(v) => Ok(v), - Err(e) => { - tracing::error!(err=?e, "internal error"); - Response::builder() - .status(500) - .body(text_body("Internal error")) - }, - } - } - })).await { - Err(e) => tracing::warn!(err=?e, "connection failed"), - Ok(()) => tracing::trace!("connection terminated with success"), - } - }); - connections.push(conn); - } - drop(tcp); - - tracing::info!("Server shutting down, draining remaining connections..."); - while connections.next().await.is_some() {} - - Ok(()) - } - - async fn build_stream(&self, socket: TcpStream) -> Result> { - match self.tls.clone() { - Some(acceptor) => { - let stream = acceptor.accept(socket).await?; - Ok(Box::new(TokioIo::new(stream))) - } - None => Ok(Box::new(TokioIo::new(socket))), - } - } -} - -use http_body_util::BodyExt; - -//@FIXME We should not support only BasicAuth -async fn auth( - login: ArcLoginProvider, - req: Request, -) -> Result>> { - let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { - Some(hv) => hv.to_str()?, - None => { - tracing::info!("Missing authorization field"); - return Ok(Response::builder() - .status(401) - .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") - .body(text_body("Missing Authorization field"))?) - }, - }; - - let b64_creds_maybe_padded = match auth_val.split_once(" ") { - Some(("Basic", b64)) => b64, - _ => { - tracing::info!("Unsupported authorization field"); - return Ok(Response::builder() - .status(400) - .body(text_body("Unsupported Authorization field"))?) - }, - }; - - // base64urlencoded may have trailing equals, base64urlsafe has not - // theoretically authorization is padded but "be liberal in what you accept" - let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); - - // Decode base64 - let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; - let str_creds = std::str::from_utf8(&creds)?; - - // Split username and password - let (username, password) = str_creds - .split_once(':') - .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; - - // Call login provider - let creds = match login.login(username, password).await { - Ok(c) => c, - Err(_) => { - tracing::info!(user=username, "Wrong credentials"); - return Ok(Response::builder() - .status(401) - .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") - .body(text_body("Wrong credentials"))?) - }, - }; - - // Build a user - let user = User::new(username.into(), creds).await?; - - // Call router with user - router(user, req).await -} - -/// Path is a voluntarily feature limited -/// compared to the expressiveness of a UNIX path -/// For example getting parent with ../ is not supported, scheme is not supported, etc. -/// More complex support could be added later if needed by clients -enum Path<'a> { - Abs(Vec<&'a str>), - Rel(Vec<&'a str>), -} -impl<'a> Path<'a> { - fn new(path: &'a str) -> Result { - // This check is naive, it does not aim at detecting all fully qualified - // URL or protect from any attack, its only goal is to help debugging. - if path.starts_with("http://") || path.starts_with("https://") { - anyhow::bail!("Full URL are not supported") - } - - let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); - if path.starts_with("/") { - return Ok(Path::Abs(path_segments)) - } - Ok(Path::Rel(path_segments)) - } -} - -async fn router(user: std::sync::Arc, req: Request) -> Result>> { - let path = req.uri().path().to_string(); - let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); - let method = req.method().as_str().to_uppercase(); - - let node = match (RootNode {}).fetch(&user, &path_segments).await { - Ok(v) => v, - Err(e) => { - tracing::warn!(err=?e, "dav node fetch failed"); - return Ok(Response::builder() - .status(404) - .body(text_body("Resource not found"))?) - } - }; - let response = DavResponse { node, user, req }; - - match method.as_str() { - "OPTIONS" => return Ok(Response::builder() - .status(200) - .header("DAV", "1") - .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") - .body(text_body(""))?), - "HEAD" | "GET" => { - tracing::warn!("HEAD+GET not correctly implemented"); - return Ok(Response::builder() - .status(404) - .body(text_body(""))?) - }, - "PUT" => { - todo!(); - }, - "DELETE" => { - todo!(); - }, - "PROPFIND" => response.propfind().await, - "REPORT" => response.report().await, - _ => return Ok(Response::builder() - .status(501) - .body(text_body("HTTP Method not implemented"))?), - } -} - -const ALLPROP: [dav::PropertyRequest; 10] = [ - dav::PropertyRequest::CreationDate, - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::GetContentLanguage, - dav::PropertyRequest::GetContentLength, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::GetEtag, - dav::PropertyRequest::GetLastModified, - dav::PropertyRequest::LockDiscovery, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::SupportedLock, -]; - -// ---------- Building objects - -// ---- HTTP DAV Binding -use futures::stream::TryStreamExt; -use http_body_util::BodyStream; -use http_body_util::StreamBody; -use http_body_util::combinators::BoxBody; -use hyper::body::Frame; -use tokio_util::sync::PollSender; -use std::io::{Error, ErrorKind}; -use futures::sink::SinkExt; -use tokio_util::io::{SinkWriter, CopyToBytes}; - -fn depth(req: &Request) -> dav::Depth { - match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { - Some(Ok("0")) => dav::Depth::Zero, - Some(Ok("1")) => dav::Depth::One, - Some(Ok("Infinity")) => dav::Depth::Infinity, - _ => dav::Depth::Zero, - } -} - -fn text_body(txt: &'static str) -> BoxBody { - BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) -} - -fn serialize(status_ok: hyper::StatusCode, elem: T) -> Result>> { - let (tx, rx) = tokio::sync::mpsc::channel::(1); - - // Build the writer - tokio::task::spawn(async move { - let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe)); - let mut writer = SinkWriter::new(CopyToBytes::new(sink)); - let q = quick_xml::writer::Writer::new_with_indent(&mut writer, b' ', 4); - let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()), ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()) ]; - let mut qwriter = dxml::Writer { q, ns_to_apply }; - let decl = quick_xml::events::BytesDecl::from_start(quick_xml::events::BytesStart::from_content("xml version=\"1.0\" encoding=\"utf-8\"", 0)); - match qwriter.q.write_event_async(quick_xml::events::Event::Decl(decl)).await { - Ok(_) => (), - Err(e) => tracing::error!(err=?e, "unable to write XML declaration "), - } - match elem.qwrite(&mut qwriter).await { - Ok(_) => tracing::debug!("fully serialized object"), - Err(e) => tracing::error!(err=?e, "failed to serialize object"), - } - }); - - - // Build the reader - let recv = tokio_stream::wrappers::ReceiverStream::new(rx); - let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v)))); - let boxed_body = BoxBody::new(stream); - - let response = Response::builder() - .status(status_ok) - .header("content-type", "application/xml; charset=\"utf-8\"") - .body(boxed_body)?; - - Ok(response) -} - - -/// Deserialize a request body to an XML request -async fn deserialize>(req: Request) -> Result { - let stream_of_frames = BodyStream::new(req.into_body()); - let stream_of_bytes = stream_of_frames - .try_filter_map(|frame| async move { Ok(frame.into_data().ok()) }) - .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)); - let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); - let async_read = std::pin::pin!(async_read); - let mut rdr = dxml::Reader::new(quick_xml::reader::NsReader::from_reader(async_read)).await?; - let parsed = rdr.find::().await?; - Ok(parsed) -} - -//--- -use futures::{future, future::BoxFuture, future::FutureExt}; - -/// A DAV node should implement the following methods -/// @FIXME not satisfied by BoxFutures but I have no better idea currently -trait DavNode: Send { - // recurence, filesystem hierarchy - /// This node direct children - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; - /// Recursively fetch a child (progress inside the filesystem hierarchy) - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; - - // node properties - /// Get the path - fn path(&self, user: &ArcUser) -> String; - /// Get the supported WebDAV properties - fn supported_properties(&self, user: &ArcUser) -> dav::PropName; - /// Get the values for the given properties - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; - - //@FIXME maybe add etag, maybe add a way to set content - - /// Utility function to get a propname response from a node - fn response_propname(&self, user: &ArcUser) -> dav::Response { - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(self.path(user)), - vec![ - dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), - prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), - error: None, - responsedescription: None, - } - ], - ), - error: None, - location: None, - responsedescription: None - } - } - - /// Utility function to get a prop response from a node & a list of propname - fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { - let mut prop_desc = vec![]; - let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); - - // If at least one property has been found on this object, adding a HTTP 200 propstat to - // the response - if !found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), - prop: dav::AnyProp(found), - error: None, - responsedescription: None, - }); - } - - // If at least one property can't be found on this object, adding a HTTP 404 propstat to - // the response - if !not_found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::NOT_FOUND), - prop: dav::AnyProp(not_found), - error: None, - responsedescription: None, - }) - } - - // Build the finale response - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(self.path(user)), prop_desc), - error: None, - location: None, - responsedescription: None - } - } -} - -struct DavResponse { - node: Box, - user: std::sync::Arc, - req: Request, -} -impl DavResponse { - // --- Public API --- - - /// REPORT has been first described in the "Versioning Extension" of WebDAV - /// It allows more complex queries compared to PROPFIND - /// - /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. - /// A rewrite would be required to make it more generic (with the extension system that has - /// been introduced in aero-dav) - async fn report(self) -> Result>> { - let status = hyper::StatusCode::from_u16(207)?; - - let report = match deserialize::>(self.req).await { - Ok(v) => v, - Err(e) => { - tracing::error!(err=?e, "unable to decode REPORT body"); - return Ok(Response::builder() - .status(400) - .body(text_body("Bad request"))?) - } - }; - - // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary - // list of URLs - let multiget = match report { - cal::Report::Multiget(m) => m, - _ => return Ok(Response::builder() - .status(501) - .body(text_body("Not implemented"))?), - }; - - // Getting the list of nodes - let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); - for h in multiget.href.into_iter() { - let maybe_collected_node = match Path::new(h.0.as_str()) { - Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice()).await.or(Err(h)), - Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice()).await.or(Err(h)), - Err(_) => Err(h), - }; - - match maybe_collected_node { - Ok(v) => ok_node.push(v), - Err(h) => not_found.push(h), - }; - } - - // Getting props - let props = match multiget.selector { - None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), - Some(cal::CalendarSelector::PropName) => None, - Some(cal::CalendarSelector::Prop(inner)) => Some(inner), - }; - - serialize(status, Self::multistatus(&self.user, ok_node, not_found, props)) - } - - /// PROPFIND is the standard way to fetch WebDAV properties - async fn propfind(self) -> Result>> { - let depth = depth(&self.req); - if matches!(depth, dav::Depth::Infinity) { - return Ok(Response::builder() - .status(501) - .body(text_body("Depth: Infinity not implemented"))?) - } - - let status = hyper::StatusCode::from_u16(207)?; - - // A client may choose not to submit a request body. An empty PROPFIND - // request body MUST be treated as if it were an 'allprop' request. - // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly - // handled, but corrupted requests are also silently handled as allprop. - let propfind = deserialize::>(self.req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); - tracing::debug!(recv=?propfind, "inferred propfind request"); - - // Collect nodes as PROPFIND is not limited to the targeted node - let mut nodes = vec![]; - if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - nodes.extend(self.node.children(&self.user).await); - } - nodes.push(self.node); - - // Expand properties request - let propname = match propfind { - dav::PropFind::PropName => None, - dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), - dav::PropFind::AllProp(Some(dav::Include(mut include))) => { - include.extend_from_slice(&ALLPROP); - Some(dav::PropName(include)) - }, - dav::PropFind::Prop(inner) => Some(inner), - }; - - // Not Found is currently impossible considering the way we designed this function - let not_found = vec![]; - serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) - } - - // --- Internal functions --- - /// Utility function to build a multistatus response from - /// a list of DavNodes - fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { - // Collect properties on existing objects - let mut responses: Vec> = match props { - Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), - None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), - }; - - // Register not found objects only if relevant - if !not_found.is_empty() { - responses.push(dav::Response { - status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), - error: None, - location: None, - responsedescription: None, - }); - } - - // Build response - dav::Multistatus:: { - responses, - responsedescription: None, - } - } -} - -#[derive(Clone)] -struct RootNode {} -impl DavNode for RootNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let this = self.clone(); - return async { Ok(Box::new(this) as Box) }.boxed(); - } - - if path[0] == user.username { - let child = Box::new(HomeNode {}); - return child.fetch(user, &path[1..]); - } - - async { Err(anyhow!("Not found")) }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { vec![Box::new(HomeNode { }) as Box] }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - "/".into() - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), - ]) - } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(user))))))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -#[derive(Clone)] -struct HomeNode {} -impl DavNode for HomeNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() - } - - if path[0] == "calendar" { - return async { - let child = Box::new(CalendarListNode::new(user).await?); - child.fetch(user, &path[1..]).await - }.boxed(); - } - - async { Err(anyhow!("Not found")) }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { - CalendarListNode::new(user).await - .map(|c| vec![Box::new(c) as Box]) - .unwrap_or(vec![]) - }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/", user.username) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), - ]) - } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), - ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(/*CalendarListNode{}.path(user)*/ todo!()))))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -#[derive(Clone)] -struct CalendarListNode { - list: Vec, -} -impl CalendarListNode { - async fn new(user: &ArcUser) -> Result { - let list = user.calendars.list(user).await?; - Ok(Self { list }) - } -} -impl DavNode for CalendarListNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed(); - } - - async { - let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; - let child = Box::new(CalendarNode { - col: cal, - calname: path[0].to_string() - }); - child.fetch(user, &path[1..]).await - }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - let list = self.list.clone(); - async move { - //@FIXME maybe we want to be lazy here?! - futures::stream::iter(list.iter()) - .filter_map(|name| async move { - user.calendars.open(user, name).await - .ok() - .flatten() - .map(|v| (name, v)) - }) - .map(|(name, cal)| Box::new(CalendarNode { - col: cal, - calname: name.to_string(), - }) as Box) - .collect::>>() - .await - }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/", user.username) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - ]) - } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -#[derive(Clone)] -struct CalendarNode { - col: Arc, - calname: String, -} -impl DavNode for CalendarNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() - } - - let col = self.col.clone(); - let calname = self.calname.clone(); - async move { - if let Some(blob_id) = col.dag().await.idx_by_filename.get(path[0]) { - let child = Box::new(EventNode { - col: col.clone(), - calname, - filename: path[0].to_string(), - blob_id: *blob_id, - }); - return child.fetch(user, &path[1..]).await - } - - Err(anyhow!("Not found")) - }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - let col = self.col.clone(); - let calname = self.calname.clone(); - - async move { - col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { - Box::new(EventNode { - col: col.clone(), - calname: calname.clone(), - filename: filename.to_string(), - blob_id: *blob_id, - }) as Box - }).collect() - }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/", user.username, self.calname) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), - ]) - } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.calname))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), - ])), - //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - //@FIXME seems wrong but seems to be what Thunderbird expects... - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) - => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ - cal::CompSupport(cal::Component::VEvent), - ])))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -const FAKE_ICS: &str = r#"BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Example Corp.//CalDAV Client//EN -BEGIN:VTIMEZONE -LAST-MODIFIED:20040110T032845Z -TZID:US/Eastern -BEGIN:DAYLIGHT -DTSTART:20000404T020000 -RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 -TZNAME:EDT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -END:DAYLIGHT -BEGIN:STANDARD -DTSTART:20001026T020000 -RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 -TZNAME:EST -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTAMP:20240406T001102Z -DTSTART;TZID=US/Eastern:20240406T100000 -DURATION:PT1H -SUMMARY:Event #1 -Description:Go Steelers! -UID:74855313FA803DA593CD579A@example.com -END:VEVENT -END:VCALENDAR"#; - -#[derive(Clone)] -struct EventNode { - col: Arc, - calname: String, - filename: String, - blob_id: BlobId, -} -impl DavNode for EventNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() - } - - async { Err(anyhow!("Not found")) }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { vec![] }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), - ]) - } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.filename))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), - dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { - mime: None, - payload: FAKE_ICS.into() - })))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - - - -// -// -// -// -// -// -// -// - - -// -// -// -// -// -// -// -// -// -// -// - -// -// -// -// -// -// -// -// -// -// -// - -// -// -// -// -// -// /alice/calendar/personal/something.ics -// diff --git a/aero-proto/src/dav/codec.rs b/aero-proto/src/dav/codec.rs new file mode 100644 index 0000000..08af2fe --- /dev/null +++ b/aero-proto/src/dav/codec.rs @@ -0,0 +1,80 @@ +use anyhow::Result; +use hyper::{Request, Response, body::Bytes}; +use hyper::body::Incoming; +use http_body_util::Full; +use futures::stream::StreamExt; +use futures::stream::TryStreamExt; +use http_body_util::BodyStream; +use http_body_util::StreamBody; +use http_body_util::combinators::BoxBody; +use hyper::body::Frame; +use tokio_util::sync::PollSender; +use std::io::{Error, ErrorKind}; +use futures::sink::SinkExt; +use tokio_util::io::{SinkWriter, CopyToBytes}; +use http_body_util::BodyExt; + +use aero_dav::types as dav; +use aero_dav::xml as dxml; + +pub(crate) fn depth(req: &Request) -> dav::Depth { + match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { + Some(Ok("0")) => dav::Depth::Zero, + Some(Ok("1")) => dav::Depth::One, + Some(Ok("Infinity")) => dav::Depth::Infinity, + _ => dav::Depth::Zero, + } +} + +pub(crate) fn text_body(txt: &'static str) -> BoxBody { + BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) +} + +pub(crate) fn serialize(status_ok: hyper::StatusCode, elem: T) -> Result>> { + let (tx, rx) = tokio::sync::mpsc::channel::(1); + + // Build the writer + tokio::task::spawn(async move { + let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe)); + let mut writer = SinkWriter::new(CopyToBytes::new(sink)); + let q = quick_xml::writer::Writer::new_with_indent(&mut writer, b' ', 4); + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()), ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()) ]; + let mut qwriter = dxml::Writer { q, ns_to_apply }; + let decl = quick_xml::events::BytesDecl::from_start(quick_xml::events::BytesStart::from_content("xml version=\"1.0\" encoding=\"utf-8\"", 0)); + match qwriter.q.write_event_async(quick_xml::events::Event::Decl(decl)).await { + Ok(_) => (), + Err(e) => tracing::error!(err=?e, "unable to write XML declaration "), + } + match elem.qwrite(&mut qwriter).await { + Ok(_) => tracing::debug!("fully serialized object"), + Err(e) => tracing::error!(err=?e, "failed to serialize object"), + } + }); + + + // Build the reader + let recv = tokio_stream::wrappers::ReceiverStream::new(rx); + let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v)))); + let boxed_body = BoxBody::new(stream); + + let response = Response::builder() + .status(status_ok) + .header("content-type", "application/xml; charset=\"utf-8\"") + .body(boxed_body)?; + + Ok(response) +} + + +/// Deserialize a request body to an XML request +pub(crate) async fn deserialize>(req: Request) -> Result { + let stream_of_frames = BodyStream::new(req.into_body()); + let stream_of_bytes = stream_of_frames + .try_filter_map(|frame| async move { Ok(frame.into_data().ok()) }) + .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)); + let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); + let async_read = std::pin::pin!(async_read); + let mut rdr = dxml::Reader::new(quick_xml::reader::NsReader::from_reader(async_read)).await?; + let parsed = rdr.find::().await?; + Ok(parsed) +} diff --git a/aero-proto/src/dav/middleware.rs b/aero-proto/src/dav/middleware.rs new file mode 100644 index 0000000..c4edbd8 --- /dev/null +++ b/aero-proto/src/dav/middleware.rs @@ -0,0 +1,70 @@ +use anyhow::{anyhow, Result}; +use base64::Engine; +use hyper::{Request, Response, body::Bytes}; +use hyper::body::Incoming; +use http_body_util::combinators::BoxBody; + +use aero_user::login::ArcLoginProvider; +use aero_collections::user::User; + +use super::codec::text_body; + +type ArcUser = std::sync::Arc; + +pub(super) async fn auth<'a>( + login: ArcLoginProvider, + req: Request, + next: impl Fn(ArcUser, Request) -> futures::future::BoxFuture<'a, Result>>>, +) -> Result>> { + let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { + Some(hv) => hv.to_str()?, + None => { + tracing::info!("Missing authorization field"); + return Ok(Response::builder() + .status(401) + .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") + .body(text_body("Missing Authorization field"))?) + }, + }; + + let b64_creds_maybe_padded = match auth_val.split_once(" ") { + Some(("Basic", b64)) => b64, + _ => { + tracing::info!("Unsupported authorization field"); + return Ok(Response::builder() + .status(400) + .body(text_body("Unsupported Authorization field"))?) + }, + }; + + // base64urlencoded may have trailing equals, base64urlsafe has not + // theoretically authorization is padded but "be liberal in what you accept" + let b64_creds_clean = b64_creds_maybe_padded.trim_end_matches('='); + + // Decode base64 + let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; + let str_creds = std::str::from_utf8(&creds)?; + + // Split username and password + let (username, password) = str_creds + .split_once(':') + .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; + + // Call login provider + let creds = match login.login(username, password).await { + Ok(c) => c, + Err(_) => { + tracing::info!(user=username, "Wrong credentials"); + return Ok(Response::builder() + .status(401) + .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") + .body(text_body("Wrong credentials"))?) + }, + }; + + // Build a user + let user = User::new(username.into(), creds).await?; + + // Call router with user + next(user, req).await +} diff --git a/aero-proto/src/dav/mod.rs b/aero-proto/src/dav/mod.rs new file mode 100644 index 0000000..379e210 --- /dev/null +++ b/aero-proto/src/dav/mod.rs @@ -0,0 +1,822 @@ +mod middleware; +mod codec; + +use std::net::SocketAddr; +use std::sync::Arc; + +use anyhow::{anyhow, bail, Result}; +use hyper::service::service_fn; +use hyper::{Request, Response, body::Bytes}; +use hyper::server::conn::http1 as http; +use hyper::rt::{Read, Write}; +use hyper::body::Incoming; +use hyper_util::rt::TokioIo; +use futures::stream::{FuturesUnordered, StreamExt}; +use tokio::net::TcpListener; +use tokio::sync::watch; +use tokio_rustls::TlsAcceptor; +use tokio::net::TcpStream; +use tokio::io::{AsyncRead, AsyncWrite}; +use rustls_pemfile::{certs, private_key}; + +use aero_user::config::{DavConfig, DavUnsecureConfig}; +use aero_user::login::ArcLoginProvider; +use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; +use aero_dav::types as dav; +use aero_dav::caltypes as cal; +use aero_dav::acltypes as acl; +use aero_dav::realization::{All, self as all}; + +use crate::dav::codec::{serialize, deserialize, depth, text_body}; + +type ArcUser = std::sync::Arc; + +pub struct Server { + bind_addr: SocketAddr, + login_provider: ArcLoginProvider, + tls: Option, +} + +pub fn new_unsecure(config: DavUnsecureConfig, login: ArcLoginProvider) -> Server { + Server { + bind_addr: config.bind_addr, + login_provider: login, + tls: None, + } +} + +pub fn new(config: DavConfig, login: ArcLoginProvider) -> Result { + let loaded_certs = certs(&mut std::io::BufReader::new(std::fs::File::open( + config.certs, + )?)) + .collect::, _>>()?; + let loaded_key = private_key(&mut std::io::BufReader::new(std::fs::File::open( + config.key, + )?))? + .unwrap(); + + let tls_config = rustls::ServerConfig::builder() + .with_no_client_auth() + .with_single_cert(loaded_certs, loaded_key)?; + let acceptor = TlsAcceptor::from(Arc::new(tls_config)); + + Ok(Server { + bind_addr: config.bind_addr, + login_provider: login, + tls: Some(acceptor), + }) +} + +trait Stream: Read + Write + Send + Unpin {} +impl Stream for TokioIo {} + +impl Server { + pub async fn run(self: Self, mut must_exit: watch::Receiver) -> Result<()> { + let tcp = TcpListener::bind(self.bind_addr).await?; + tracing::info!("DAV server listening on {:#}", self.bind_addr); + + let mut connections = FuturesUnordered::new(); + while !*must_exit.borrow() { + let wait_conn_finished = async { + if connections.is_empty() { + futures::future::pending().await + } else { + connections.next().await + } + }; + let (socket, remote_addr) = tokio::select! { + a = tcp.accept() => a?, + _ = wait_conn_finished => continue, + _ = must_exit.changed() => continue, + }; + tracing::info!("Accepted connection from {}", remote_addr); + let stream = match self.build_stream(socket).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "TLS acceptor failed"); + continue + } + }; + + let login = self.login_provider.clone(); + let conn = tokio::spawn(async move { + //@FIXME should create a generic "public web" server on which "routers" could be + //abitrarily bound + //@FIXME replace with a handler supporting http2 and TLS + + match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { + let login = login.clone(); + tracing::info!("{:?} {:?}", req.method(), req.uri()); + async { + match middleware::auth(login, req, |user, request| async { router(user, request).await }.boxed()).await { + Ok(v) => Ok(v), + Err(e) => { + tracing::error!(err=?e, "internal error"); + Response::builder() + .status(500) + .body(codec::text_body("Internal error")) + }, + } + } + })).await { + Err(e) => tracing::warn!(err=?e, "connection failed"), + Ok(()) => tracing::trace!("connection terminated with success"), + } + }); + connections.push(conn); + } + drop(tcp); + + tracing::info!("Server shutting down, draining remaining connections..."); + while connections.next().await.is_some() {} + + Ok(()) + } + + async fn build_stream(&self, socket: TcpStream) -> Result> { + match self.tls.clone() { + Some(acceptor) => { + let stream = acceptor.accept(socket).await?; + Ok(Box::new(TokioIo::new(stream))) + } + None => Ok(Box::new(TokioIo::new(socket))), + } + } +} + +use http_body_util::BodyExt; + +//@FIXME We should not support only BasicAuth + + +/// Path is a voluntarily feature limited +/// compared to the expressiveness of a UNIX path +/// For example getting parent with ../ is not supported, scheme is not supported, etc. +/// More complex support could be added later if needed by clients +enum Path<'a> { + Abs(Vec<&'a str>), + Rel(Vec<&'a str>), +} +impl<'a> Path<'a> { + fn new(path: &'a str) -> Result { + // This check is naive, it does not aim at detecting all fully qualified + // URL or protect from any attack, its only goal is to help debugging. + if path.starts_with("http://") || path.starts_with("https://") { + anyhow::bail!("Full URL are not supported") + } + + let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); + if path.starts_with("/") { + return Ok(Path::Abs(path_segments)) + } + Ok(Path::Rel(path_segments)) + } +} + +async fn router(user: std::sync::Arc, req: Request) -> Result>> { + let path = req.uri().path().to_string(); + let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); + let method = req.method().as_str().to_uppercase(); + + let node = match (RootNode {}).fetch(&user, &path_segments).await { + Ok(v) => v, + Err(e) => { + tracing::warn!(err=?e, "dav node fetch failed"); + return Ok(Response::builder() + .status(404) + .body(codec::text_body("Resource not found"))?) + } + }; + let response = DavResponse { node, user, req }; + + match method.as_str() { + "OPTIONS" => return Ok(Response::builder() + .status(200) + .header("DAV", "1") + .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") + .body(codec::text_body(""))?), + "HEAD" | "GET" => { + tracing::warn!("HEAD+GET not correctly implemented"); + return Ok(Response::builder() + .status(404) + .body(codec::text_body(""))?) + }, + "PUT" => { + todo!(); + }, + "DELETE" => { + todo!(); + }, + "PROPFIND" => response.propfind().await, + "REPORT" => response.report().await, + _ => return Ok(Response::builder() + .status(501) + .body(codec::text_body("HTTP Method not implemented"))?), + } +} + +const ALLPROP: [dav::PropertyRequest; 10] = [ + dav::PropertyRequest::CreationDate, + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::GetContentLanguage, + dav::PropertyRequest::GetContentLength, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::GetLastModified, + dav::PropertyRequest::LockDiscovery, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::SupportedLock, +]; + +// ---------- Building objects + +// ---- HTTP DAV Binding +use futures::stream::TryStreamExt; +use http_body_util::BodyStream; +use http_body_util::StreamBody; +use http_body_util::combinators::BoxBody; +use hyper::body::Frame; +use tokio_util::sync::PollSender; +use std::io::{Error, ErrorKind}; +use futures::sink::SinkExt; +use tokio_util::io::{SinkWriter, CopyToBytes}; + + + +//--- +use futures::{future, future::BoxFuture, future::FutureExt}; + +/// A DAV node should implement the following methods +/// @FIXME not satisfied by BoxFutures but I have no better idea currently +trait DavNode: Send { + // recurence, filesystem hierarchy + /// This node direct children + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; + /// Recursively fetch a child (progress inside the filesystem hierarchy) + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; + + // node properties + /// Get the path + fn path(&self, user: &ArcUser) -> String; + /// Get the supported WebDAV properties + fn supported_properties(&self, user: &ArcUser) -> dav::PropName; + /// Get the values for the given properties + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; + + //@FIXME maybe add etag, maybe add a way to set content + + /// Utility function to get a propname response from a node + fn response_propname(&self, user: &ArcUser) -> dav::Response { + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(self.path(user)), + vec![ + dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), + error: None, + responsedescription: None, + } + ], + ), + error: None, + location: None, + responsedescription: None + } + } + + /// Utility function to get a prop response from a node & a list of propname + fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { + let mut prop_desc = vec![]; + let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); + + // If at least one property has been found on this object, adding a HTTP 200 propstat to + // the response + if !found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(found), + error: None, + responsedescription: None, + }); + } + + // If at least one property can't be found on this object, adding a HTTP 404 propstat to + // the response + if !not_found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::NOT_FOUND), + prop: dav::AnyProp(not_found), + error: None, + responsedescription: None, + }) + } + + // Build the finale response + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(self.path(user)), prop_desc), + error: None, + location: None, + responsedescription: None + } + } +} + +struct DavResponse { + node: Box, + user: std::sync::Arc, + req: Request, +} +impl DavResponse { + // --- Public API --- + + /// REPORT has been first described in the "Versioning Extension" of WebDAV + /// It allows more complex queries compared to PROPFIND + /// + /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. + /// A rewrite would be required to make it more generic (with the extension system that has + /// been introduced in aero-dav) + async fn report(self) -> Result>> { + let status = hyper::StatusCode::from_u16(207)?; + + let report = match deserialize::>(self.req).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "unable to decode REPORT body"); + return Ok(Response::builder() + .status(400) + .body(text_body("Bad request"))?) + } + }; + + // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary + // list of URLs + let multiget = match report { + cal::Report::Multiget(m) => m, + _ => return Ok(Response::builder() + .status(501) + .body(text_body("Not implemented"))?), + }; + + // Getting the list of nodes + let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); + for h in multiget.href.into_iter() { + let maybe_collected_node = match Path::new(h.0.as_str()) { + Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Err(_) => Err(h), + }; + + match maybe_collected_node { + Ok(v) => ok_node.push(v), + Err(h) => not_found.push(h), + }; + } + + // Getting props + let props = match multiget.selector { + None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), + Some(cal::CalendarSelector::PropName) => None, + Some(cal::CalendarSelector::Prop(inner)) => Some(inner), + }; + + serialize(status, Self::multistatus(&self.user, ok_node, not_found, props)) + } + + /// PROPFIND is the standard way to fetch WebDAV properties + async fn propfind(self) -> Result>> { + let depth = depth(&self.req); + if matches!(depth, dav::Depth::Infinity) { + return Ok(Response::builder() + .status(501) + .body(text_body("Depth: Infinity not implemented"))?) + } + + let status = hyper::StatusCode::from_u16(207)?; + + // A client may choose not to submit a request body. An empty PROPFIND + // request body MUST be treated as if it were an 'allprop' request. + // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly + // handled, but corrupted requests are also silently handled as allprop. + let propfind = deserialize::>(self.req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); + tracing::debug!(recv=?propfind, "inferred propfind request"); + + // Collect nodes as PROPFIND is not limited to the targeted node + let mut nodes = vec![]; + if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { + nodes.extend(self.node.children(&self.user).await); + } + nodes.push(self.node); + + // Expand properties request + let propname = match propfind { + dav::PropFind::PropName => None, + dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), + dav::PropFind::AllProp(Some(dav::Include(mut include))) => { + include.extend_from_slice(&ALLPROP); + Some(dav::PropName(include)) + }, + dav::PropFind::Prop(inner) => Some(inner), + }; + + // Not Found is currently impossible considering the way we designed this function + let not_found = vec![]; + serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) + } + + // --- Internal functions --- + /// Utility function to build a multistatus response from + /// a list of DavNodes + fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { + // Collect properties on existing objects + let mut responses: Vec> = match props { + Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), + None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), + }; + + // Register not found objects only if relevant + if !not_found.is_empty() { + responses.push(dav::Response { + status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), + error: None, + location: None, + responsedescription: None, + }); + } + + // Build response + dav::Multistatus:: { + responses, + responsedescription: None, + } + } +} + +#[derive(Clone)] +struct RootNode {} +impl DavNode for RootNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let this = self.clone(); + return async { Ok(Box::new(this) as Box) }.boxed(); + } + + if path[0] == user.username { + let child = Box::new(HomeNode {}); + return child.fetch(user, &path[1..]); + } + + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![Box::new(HomeNode { }) as Box] }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + "/".into() + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), + ]) + } + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + ])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(user))))))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +#[derive(Clone)] +struct HomeNode {} +impl DavNode for HomeNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + if path[0] == "calendar" { + return async { + let child = Box::new(CalendarListNode::new(user).await?); + child.fetch(user, &path[1..]).await + }.boxed(); + } + + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { + CalendarListNode::new(user).await + .map(|c| vec![Box::new(c) as Box]) + .unwrap_or(vec![]) + }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/", user.username) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), + ]) + } + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), + ])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(/*CalendarListNode{}.path(user)*/ todo!()))))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +#[derive(Clone)] +struct CalendarListNode { + list: Vec, +} +impl CalendarListNode { + async fn new(user: &ArcUser) -> Result { + let list = user.calendars.list(user).await?; + Ok(Self { list }) + } +} +impl DavNode for CalendarListNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed(); + } + + async { + let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; + let child = Box::new(CalendarNode { + col: cal, + calname: path[0].to_string() + }); + child.fetch(user, &path[1..]).await + }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + let list = self.list.clone(); + async move { + //@FIXME maybe we want to be lazy here?! + futures::stream::iter(list.iter()) + .filter_map(|name| async move { + user.calendars.open(user, name).await + .ok() + .flatten() + .map(|v| (name, v)) + }) + .map(|(name, cal)| Box::new(CalendarNode { + col: cal, + calname: name.to_string(), + }) as Box) + .collect::>>() + .await + }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/", user.username) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + ]) + } + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +#[derive(Clone)] +struct CalendarNode { + col: Arc, + calname: String, +} +impl DavNode for CalendarNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + let col = self.col.clone(); + let calname = self.calname.clone(); + async move { + if let Some(blob_id) = col.dag().await.idx_by_filename.get(path[0]) { + let child = Box::new(EventNode { + col: col.clone(), + calname, + filename: path[0].to_string(), + blob_id: *blob_id, + }); + return child.fetch(user, &path[1..]).await + } + + Err(anyhow!("Not found")) + }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + let col = self.col.clone(); + let calname = self.calname.clone(); + + async move { + col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { + Box::new(EventNode { + col: col.clone(), + calname: calname.clone(), + filename: filename.to_string(), + blob_id: *blob_id, + }) as Box + }).collect() + }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/", user.username, self.calname) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), + ]) + } + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.calname))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), + ])), + //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //@FIXME seems wrong but seems to be what Thunderbird expects... + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) + => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ + cal::CompSupport(cal::Component::VEvent), + ])))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +const FAKE_ICS: &str = r#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTIMEZONE +LAST-MODIFIED:20040110T032845Z +TZID:US/Eastern +BEGIN:DAYLIGHT +DTSTART:20000404T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZNAME:EDT +TZOFFSETFROM:-0500 +TZOFFSETTO:-0400 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:20001026T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:EST +TZOFFSETFROM:-0400 +TZOFFSETTO:-0500 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTAMP:20240406T001102Z +DTSTART;TZID=US/Eastern:20240406T100000 +DURATION:PT1H +SUMMARY:Event #1 +Description:Go Steelers! +UID:74855313FA803DA593CD579A@example.com +END:VEVENT +END:VCALENDAR"#; + +#[derive(Clone)] +struct EventNode { + col: Arc, + calname: String, + filename: String, + blob_id: BlobId, +} +impl DavNode for EventNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![] }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), + ]) + } + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.filename))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { + mime: None, + payload: FAKE_ICS.into() + })))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + + + +// +// +// +// +// +// +// +// + + +// +// +// +// +// +// +// +// +// +// +// + +// +// +// +// +// +// +// +// +// +// +// + +// +// +// +// +// +// /alice/calendar/personal/something.ics +// -- cgit v1.2.3 From e2bf412337fbbfc70f67ac84fc2ee9268c0c337e Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 18 Apr 2024 16:08:10 +0200 Subject: Finalize refactor --- aero-proto/src/dav/controller.rs | 227 ++++++++++++++ aero-proto/src/dav/mod.rs | 655 +-------------------------------------- aero-proto/src/dav/node.rs | 85 +++++ aero-proto/src/dav/resource.rs | 340 ++++++++++++++++++++ 4 files changed, 660 insertions(+), 647 deletions(-) create mode 100644 aero-proto/src/dav/controller.rs create mode 100644 aero-proto/src/dav/node.rs create mode 100644 aero-proto/src/dav/resource.rs diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs new file mode 100644 index 0000000..79ead0a --- /dev/null +++ b/aero-proto/src/dav/controller.rs @@ -0,0 +1,227 @@ +use anyhow::Result; +use http_body_util::combinators::BoxBody; +use hyper::body::Incoming; +use hyper::{Request, Response, body::Bytes}; + +use aero_collections::user::User; +use aero_dav::types as dav; +use aero_dav::realization::All; +use aero_dav::caltypes as cal; + +use crate::dav::codec::{serialize, deserialize, depth, text_body}; +use crate::dav::node::DavNode; +use crate::dav::resource::RootNode; +use crate::dav::codec; + +type ArcUser = std::sync::Arc; + +const ALLPROP: [dav::PropertyRequest; 10] = [ + dav::PropertyRequest::CreationDate, + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::GetContentLanguage, + dav::PropertyRequest::GetContentLength, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::GetLastModified, + dav::PropertyRequest::LockDiscovery, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::SupportedLock, +]; + +pub(crate) struct Controller { + node: Box, + user: std::sync::Arc, + req: Request, +} +impl Controller { + pub(crate) async fn route(user: std::sync::Arc, req: Request) -> Result>> { + let path = req.uri().path().to_string(); + let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); + let method = req.method().as_str().to_uppercase(); + + let node = match (RootNode {}).fetch(&user, &path_segments).await { + Ok(v) => v, + Err(e) => { + tracing::warn!(err=?e, "dav node fetch failed"); + return Ok(Response::builder() + .status(404) + .body(codec::text_body("Resource not found"))?) + } + }; + let ctrl = Self { node, user, req }; + + match method.as_str() { + "OPTIONS" => Ok(Response::builder() + .status(200) + .header("DAV", "1") + .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") + .body(codec::text_body(""))?), + "HEAD" | "GET" => { + tracing::warn!("HEAD+GET not correctly implemented"); + Ok(Response::builder() + .status(404) + .body(codec::text_body(""))?) + }, + "PUT" => { + todo!(); + }, + "DELETE" => { + todo!(); + }, + "PROPFIND" => ctrl.propfind().await, + "REPORT" => ctrl.report().await, + _ => Ok(Response::builder() + .status(501) + .body(codec::text_body("HTTP Method not implemented"))?), + } + } + + + // --- Public API --- + + /// REPORT has been first described in the "Versioning Extension" of WebDAV + /// It allows more complex queries compared to PROPFIND + /// + /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. + /// A rewrite would be required to make it more generic (with the extension system that has + /// been introduced in aero-dav) + async fn report(self) -> Result>> { + let status = hyper::StatusCode::from_u16(207)?; + + let report = match deserialize::>(self.req).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "unable to decode REPORT body"); + return Ok(Response::builder() + .status(400) + .body(text_body("Bad request"))?) + } + }; + + // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary + // list of URLs + let multiget = match report { + cal::Report::Multiget(m) => m, + _ => return Ok(Response::builder() + .status(501) + .body(text_body("Not implemented"))?), + }; + + // Getting the list of nodes + let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); + for h in multiget.href.into_iter() { + let maybe_collected_node = match Path::new(h.0.as_str()) { + Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Err(_) => Err(h), + }; + + match maybe_collected_node { + Ok(v) => ok_node.push(v), + Err(h) => not_found.push(h), + }; + } + + // Getting props + let props = match multiget.selector { + None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), + Some(cal::CalendarSelector::PropName) => None, + Some(cal::CalendarSelector::Prop(inner)) => Some(inner), + }; + + serialize(status, Self::multistatus(&self.user, ok_node, not_found, props)) + } + + /// PROPFIND is the standard way to fetch WebDAV properties + async fn propfind(self) -> Result>> { + let depth = depth(&self.req); + if matches!(depth, dav::Depth::Infinity) { + return Ok(Response::builder() + .status(501) + .body(text_body("Depth: Infinity not implemented"))?) + } + + let status = hyper::StatusCode::from_u16(207)?; + + // A client may choose not to submit a request body. An empty PROPFIND + // request body MUST be treated as if it were an 'allprop' request. + // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly + // handled, but corrupted requests are also silently handled as allprop. + let propfind = deserialize::>(self.req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); + tracing::debug!(recv=?propfind, "inferred propfind request"); + + // Collect nodes as PROPFIND is not limited to the targeted node + let mut nodes = vec![]; + if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { + nodes.extend(self.node.children(&self.user).await); + } + nodes.push(self.node); + + // Expand properties request + let propname = match propfind { + dav::PropFind::PropName => None, + dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), + dav::PropFind::AllProp(Some(dav::Include(mut include))) => { + include.extend_from_slice(&ALLPROP); + Some(dav::PropName(include)) + }, + dav::PropFind::Prop(inner) => Some(inner), + }; + + // Not Found is currently impossible considering the way we designed this function + let not_found = vec![]; + serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) + } + + // --- Internal functions --- + /// Utility function to build a multistatus response from + /// a list of DavNodes + fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { + // Collect properties on existing objects + let mut responses: Vec> = match props { + Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), + None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), + }; + + // Register not found objects only if relevant + if !not_found.is_empty() { + responses.push(dav::Response { + status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), + error: None, + location: None, + responsedescription: None, + }); + } + + // Build response + dav::Multistatus:: { + responses, + responsedescription: None, + } + } +} + + +/// Path is a voluntarily feature limited +/// compared to the expressiveness of a UNIX path +/// For example getting parent with ../ is not supported, scheme is not supported, etc. +/// More complex support could be added later if needed by clients +enum Path<'a> { + Abs(Vec<&'a str>), + Rel(Vec<&'a str>), +} +impl<'a> Path<'a> { + fn new(path: &'a str) -> Result { + // This check is naive, it does not aim at detecting all fully qualified + // URL or protect from any attack, its only goal is to help debugging. + if path.starts_with("http://") || path.starts_with("https://") { + anyhow::bail!("Full URL are not supported") + } + + let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); + if path.starts_with("/") { + return Ok(Path::Abs(path_segments)) + } + Ok(Path::Rel(path_segments)) + } +} diff --git a/aero-proto/src/dav/mod.rs b/aero-proto/src/dav/mod.rs index 379e210..de2e690 100644 --- a/aero-proto/src/dav/mod.rs +++ b/aero-proto/src/dav/mod.rs @@ -1,35 +1,31 @@ mod middleware; +mod controller; mod codec; +mod node; +mod resource; use std::net::SocketAddr; use std::sync::Arc; -use anyhow::{anyhow, bail, Result}; +use anyhow::Result; use hyper::service::service_fn; -use hyper::{Request, Response, body::Bytes}; +use hyper::{Request, Response}; use hyper::server::conn::http1 as http; use hyper::rt::{Read, Write}; -use hyper::body::Incoming; use hyper_util::rt::TokioIo; use futures::stream::{FuturesUnordered, StreamExt}; use tokio::net::TcpListener; use tokio::sync::watch; use tokio_rustls::TlsAcceptor; use tokio::net::TcpStream; +use futures::future::FutureExt; use tokio::io::{AsyncRead, AsyncWrite}; use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; -use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; -use aero_dav::types as dav; -use aero_dav::caltypes as cal; -use aero_dav::acltypes as acl; -use aero_dav::realization::{All, self as all}; -use crate::dav::codec::{serialize, deserialize, depth, text_body}; - -type ArcUser = std::sync::Arc; +use crate::dav::controller::Controller; pub struct Server { bind_addr: SocketAddr, @@ -108,7 +104,7 @@ impl Server { let login = login.clone(); tracing::info!("{:?} {:?}", req.method(), req.uri()); async { - match middleware::auth(login, req, |user, request| async { router(user, request).await }.boxed()).await { + match middleware::auth(login, req, |user, request| async { Controller::route(user, request).await }.boxed()).await { Ok(v) => Ok(v), Err(e) => { tracing::error!(err=?e, "internal error"); @@ -144,641 +140,6 @@ impl Server { } } -use http_body_util::BodyExt; - -//@FIXME We should not support only BasicAuth - - -/// Path is a voluntarily feature limited -/// compared to the expressiveness of a UNIX path -/// For example getting parent with ../ is not supported, scheme is not supported, etc. -/// More complex support could be added later if needed by clients -enum Path<'a> { - Abs(Vec<&'a str>), - Rel(Vec<&'a str>), -} -impl<'a> Path<'a> { - fn new(path: &'a str) -> Result { - // This check is naive, it does not aim at detecting all fully qualified - // URL or protect from any attack, its only goal is to help debugging. - if path.starts_with("http://") || path.starts_with("https://") { - anyhow::bail!("Full URL are not supported") - } - - let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); - if path.starts_with("/") { - return Ok(Path::Abs(path_segments)) - } - Ok(Path::Rel(path_segments)) - } -} - -async fn router(user: std::sync::Arc, req: Request) -> Result>> { - let path = req.uri().path().to_string(); - let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); - let method = req.method().as_str().to_uppercase(); - - let node = match (RootNode {}).fetch(&user, &path_segments).await { - Ok(v) => v, - Err(e) => { - tracing::warn!(err=?e, "dav node fetch failed"); - return Ok(Response::builder() - .status(404) - .body(codec::text_body("Resource not found"))?) - } - }; - let response = DavResponse { node, user, req }; - - match method.as_str() { - "OPTIONS" => return Ok(Response::builder() - .status(200) - .header("DAV", "1") - .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") - .body(codec::text_body(""))?), - "HEAD" | "GET" => { - tracing::warn!("HEAD+GET not correctly implemented"); - return Ok(Response::builder() - .status(404) - .body(codec::text_body(""))?) - }, - "PUT" => { - todo!(); - }, - "DELETE" => { - todo!(); - }, - "PROPFIND" => response.propfind().await, - "REPORT" => response.report().await, - _ => return Ok(Response::builder() - .status(501) - .body(codec::text_body("HTTP Method not implemented"))?), - } -} - -const ALLPROP: [dav::PropertyRequest; 10] = [ - dav::PropertyRequest::CreationDate, - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::GetContentLanguage, - dav::PropertyRequest::GetContentLength, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::GetEtag, - dav::PropertyRequest::GetLastModified, - dav::PropertyRequest::LockDiscovery, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::SupportedLock, -]; - -// ---------- Building objects - -// ---- HTTP DAV Binding -use futures::stream::TryStreamExt; -use http_body_util::BodyStream; -use http_body_util::StreamBody; -use http_body_util::combinators::BoxBody; -use hyper::body::Frame; -use tokio_util::sync::PollSender; -use std::io::{Error, ErrorKind}; -use futures::sink::SinkExt; -use tokio_util::io::{SinkWriter, CopyToBytes}; - - - -//--- -use futures::{future, future::BoxFuture, future::FutureExt}; - -/// A DAV node should implement the following methods -/// @FIXME not satisfied by BoxFutures but I have no better idea currently -trait DavNode: Send { - // recurence, filesystem hierarchy - /// This node direct children - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; - /// Recursively fetch a child (progress inside the filesystem hierarchy) - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; - - // node properties - /// Get the path - fn path(&self, user: &ArcUser) -> String; - /// Get the supported WebDAV properties - fn supported_properties(&self, user: &ArcUser) -> dav::PropName; - /// Get the values for the given properties - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; - - //@FIXME maybe add etag, maybe add a way to set content - - /// Utility function to get a propname response from a node - fn response_propname(&self, user: &ArcUser) -> dav::Response { - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(self.path(user)), - vec![ - dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), - prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), - error: None, - responsedescription: None, - } - ], - ), - error: None, - location: None, - responsedescription: None - } - } - - /// Utility function to get a prop response from a node & a list of propname - fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { - let mut prop_desc = vec![]; - let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); - - // If at least one property has been found on this object, adding a HTTP 200 propstat to - // the response - if !found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), - prop: dav::AnyProp(found), - error: None, - responsedescription: None, - }); - } - - // If at least one property can't be found on this object, adding a HTTP 404 propstat to - // the response - if !not_found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::NOT_FOUND), - prop: dav::AnyProp(not_found), - error: None, - responsedescription: None, - }) - } - - // Build the finale response - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(self.path(user)), prop_desc), - error: None, - location: None, - responsedescription: None - } - } -} - -struct DavResponse { - node: Box, - user: std::sync::Arc, - req: Request, -} -impl DavResponse { - // --- Public API --- - - /// REPORT has been first described in the "Versioning Extension" of WebDAV - /// It allows more complex queries compared to PROPFIND - /// - /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. - /// A rewrite would be required to make it more generic (with the extension system that has - /// been introduced in aero-dav) - async fn report(self) -> Result>> { - let status = hyper::StatusCode::from_u16(207)?; - - let report = match deserialize::>(self.req).await { - Ok(v) => v, - Err(e) => { - tracing::error!(err=?e, "unable to decode REPORT body"); - return Ok(Response::builder() - .status(400) - .body(text_body("Bad request"))?) - } - }; - - // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary - // list of URLs - let multiget = match report { - cal::Report::Multiget(m) => m, - _ => return Ok(Response::builder() - .status(501) - .body(text_body("Not implemented"))?), - }; - - // Getting the list of nodes - let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); - for h in multiget.href.into_iter() { - let maybe_collected_node = match Path::new(h.0.as_str()) { - Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice()).await.or(Err(h)), - Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice()).await.or(Err(h)), - Err(_) => Err(h), - }; - - match maybe_collected_node { - Ok(v) => ok_node.push(v), - Err(h) => not_found.push(h), - }; - } - - // Getting props - let props = match multiget.selector { - None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), - Some(cal::CalendarSelector::PropName) => None, - Some(cal::CalendarSelector::Prop(inner)) => Some(inner), - }; - - serialize(status, Self::multistatus(&self.user, ok_node, not_found, props)) - } - - /// PROPFIND is the standard way to fetch WebDAV properties - async fn propfind(self) -> Result>> { - let depth = depth(&self.req); - if matches!(depth, dav::Depth::Infinity) { - return Ok(Response::builder() - .status(501) - .body(text_body("Depth: Infinity not implemented"))?) - } - - let status = hyper::StatusCode::from_u16(207)?; - - // A client may choose not to submit a request body. An empty PROPFIND - // request body MUST be treated as if it were an 'allprop' request. - // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly - // handled, but corrupted requests are also silently handled as allprop. - let propfind = deserialize::>(self.req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); - tracing::debug!(recv=?propfind, "inferred propfind request"); - - // Collect nodes as PROPFIND is not limited to the targeted node - let mut nodes = vec![]; - if matches!(depth, dav::Depth::One | dav::Depth::Infinity) { - nodes.extend(self.node.children(&self.user).await); - } - nodes.push(self.node); - - // Expand properties request - let propname = match propfind { - dav::PropFind::PropName => None, - dav::PropFind::AllProp(None) => Some(dav::PropName(ALLPROP.to_vec())), - dav::PropFind::AllProp(Some(dav::Include(mut include))) => { - include.extend_from_slice(&ALLPROP); - Some(dav::PropName(include)) - }, - dav::PropFind::Prop(inner) => Some(inner), - }; - - // Not Found is currently impossible considering the way we designed this function - let not_found = vec![]; - serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) - } - - // --- Internal functions --- - /// Utility function to build a multistatus response from - /// a list of DavNodes - fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { - // Collect properties on existing objects - let mut responses: Vec> = match props { - Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), - None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), - }; - - // Register not found objects only if relevant - if !not_found.is_empty() { - responses.push(dav::Response { - status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), - error: None, - location: None, - responsedescription: None, - }); - } - - // Build response - dav::Multistatus:: { - responses, - responsedescription: None, - } - } -} - -#[derive(Clone)] -struct RootNode {} -impl DavNode for RootNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let this = self.clone(); - return async { Ok(Box::new(this) as Box) }.boxed(); - } - - if path[0] == user.username { - let child = Box::new(HomeNode {}); - return child.fetch(user, &path[1..]); - } - - async { Err(anyhow!("Not found")) }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { vec![Box::new(HomeNode { }) as Box] }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - "/".into() - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), - ]) - } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(user))))))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -#[derive(Clone)] -struct HomeNode {} -impl DavNode for HomeNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() - } - - if path[0] == "calendar" { - return async { - let child = Box::new(CalendarListNode::new(user).await?); - child.fetch(user, &path[1..]).await - }.boxed(); - } - - async { Err(anyhow!("Not found")) }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { - CalendarListNode::new(user).await - .map(|c| vec![Box::new(c) as Box]) - .unwrap_or(vec![]) - }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/", user.username) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), - ]) - } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), - ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(/*CalendarListNode{}.path(user)*/ todo!()))))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -#[derive(Clone)] -struct CalendarListNode { - list: Vec, -} -impl CalendarListNode { - async fn new(user: &ArcUser) -> Result { - let list = user.calendars.list(user).await?; - Ok(Self { list }) - } -} -impl DavNode for CalendarListNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed(); - } - - async { - let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; - let child = Box::new(CalendarNode { - col: cal, - calname: path[0].to_string() - }); - child.fetch(user, &path[1..]).await - }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - let list = self.list.clone(); - async move { - //@FIXME maybe we want to be lazy here?! - futures::stream::iter(list.iter()) - .filter_map(|name| async move { - user.calendars.open(user, name).await - .ok() - .flatten() - .map(|v| (name, v)) - }) - .map(|(name, cal)| Box::new(CalendarNode { - col: cal, - calname: name.to_string(), - }) as Box) - .collect::>>() - .await - }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/", user.username) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - ]) - } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -#[derive(Clone)] -struct CalendarNode { - col: Arc, - calname: String, -} -impl DavNode for CalendarNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() - } - - let col = self.col.clone(); - let calname = self.calname.clone(); - async move { - if let Some(blob_id) = col.dag().await.idx_by_filename.get(path[0]) { - let child = Box::new(EventNode { - col: col.clone(), - calname, - filename: path[0].to_string(), - blob_id: *blob_id, - }); - return child.fetch(user, &path[1..]).await - } - - Err(anyhow!("Not found")) - }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - let col = self.col.clone(); - let calname = self.calname.clone(); - - async move { - col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { - Box::new(EventNode { - col: col.clone(), - calname: calname.clone(), - filename: filename.to_string(), - blob_id: *blob_id, - }) as Box - }).collect() - }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/", user.username, self.calname) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), - ]) - } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.calname))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), - ])), - //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - //@FIXME seems wrong but seems to be what Thunderbird expects... - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) - => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ - cal::CompSupport(cal::Component::VEvent), - ])))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - -const FAKE_ICS: &str = r#"BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Example Corp.//CalDAV Client//EN -BEGIN:VTIMEZONE -LAST-MODIFIED:20040110T032845Z -TZID:US/Eastern -BEGIN:DAYLIGHT -DTSTART:20000404T020000 -RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 -TZNAME:EDT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -END:DAYLIGHT -BEGIN:STANDARD -DTSTART:20001026T020000 -RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 -TZNAME:EST -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTAMP:20240406T001102Z -DTSTART;TZID=US/Eastern:20240406T100000 -DURATION:PT1H -SUMMARY:Event #1 -Description:Go Steelers! -UID:74855313FA803DA593CD579A@example.com -END:VEVENT -END:VCALENDAR"#; - -#[derive(Clone)] -struct EventNode { - col: Arc, - calname: String, - filename: String, - blob_id: BlobId, -} -impl DavNode for EventNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { - if path.len() == 0 { - let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() - } - - async { Err(anyhow!("Not found")) }.boxed() - } - - fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { vec![] }.boxed() - } - - fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) - } - - fn supported_properties(&self, user: &ArcUser) -> dav::PropName { - dav::PropName(vec![ - dav::PropertyRequest::DisplayName, - dav::PropertyRequest::ResourceType, - dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), - ]) - } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.filename))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), - dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { - mime: None, - payload: FAKE_ICS.into() - })))), - v => dav::AnyProperty::Request(v), - }).collect() - } -} - - - // // // diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs new file mode 100644 index 0000000..afeeeff --- /dev/null +++ b/aero-proto/src/dav/node.rs @@ -0,0 +1,85 @@ +use anyhow::Result; +use futures::future::BoxFuture; + +use aero_dav::types as dav; +use aero_dav::realization::All; +use aero_collections::user::User; + +type ArcUser = std::sync::Arc; + +/// A DAV node should implement the following methods +/// @FIXME not satisfied by BoxFutures but I have no better idea currently +pub(crate) trait DavNode: Send { + // recurence, filesystem hierarchy + /// This node direct children + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; + /// Recursively fetch a child (progress inside the filesystem hierarchy) + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; + + // node properties + /// Get the path + fn path(&self, user: &ArcUser) -> String; + /// Get the supported WebDAV properties + fn supported_properties(&self, user: &ArcUser) -> dav::PropName; + /// Get the values for the given properties + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; + + //@FIXME maybe add etag, maybe add a way to set content + + /// Utility function to get a propname response from a node + fn response_propname(&self, user: &ArcUser) -> dav::Response { + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href(self.path(user)), + vec![ + dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), + error: None, + responsedescription: None, + } + ], + ), + error: None, + location: None, + responsedescription: None + } + } + + /// Utility function to get a prop response from a node & a list of propname + fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { + let mut prop_desc = vec![]; + let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); + + // If at least one property has been found on this object, adding a HTTP 200 propstat to + // the response + if !found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(found), + error: None, + responsedescription: None, + }); + } + + // If at least one property can't be found on this object, adding a HTTP 404 propstat to + // the response + if !not_found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::NOT_FOUND), + prop: dav::AnyProp(not_found), + error: None, + responsedescription: None, + }) + } + + // Build the finale response + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(self.path(user)), prop_desc), + error: None, + location: None, + responsedescription: None + } + } +} + diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs new file mode 100644 index 0000000..9ad662a --- /dev/null +++ b/aero-proto/src/dav/resource.rs @@ -0,0 +1,340 @@ +use std::sync::Arc; +type ArcUser = std::sync::Arc; + +use anyhow::{anyhow, Result}; +use futures::stream::StreamExt; +use futures::{future::BoxFuture, future::FutureExt}; + +use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; +use aero_dav::types as dav; +use aero_dav::caltypes as cal; +use aero_dav::acltypes as acl; +use aero_dav::realization::{All, self as all}; + + +use crate::dav::node::DavNode; + +#[derive(Clone)] +pub(crate) struct RootNode {} +impl DavNode for RootNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let this = self.clone(); + return async { Ok(Box::new(this) as Box) }.boxed(); + } + + if path[0] == user.username { + let child = Box::new(HomeNode {}); + return child.fetch(user, &path[1..]); + } + + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![Box::new(HomeNode { }) as Box] }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + "/".into() + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), + ]) + } + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + ])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(user))))))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +#[derive(Clone)] +pub(crate) struct HomeNode {} +impl DavNode for HomeNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + if path[0] == "calendar" { + return async { + let child = Box::new(CalendarListNode::new(user).await?); + child.fetch(user, &path[1..]).await + }.boxed(); + } + + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { + CalendarListNode::new(user).await + .map(|c| vec![Box::new(c) as Box]) + .unwrap_or(vec![]) + }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/", user.username) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), + ]) + } + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), + ])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(/*CalendarListNode{}.path(user)*/ todo!()))))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +#[derive(Clone)] +pub(crate) struct CalendarListNode { + list: Vec, +} +impl CalendarListNode { + async fn new(user: &ArcUser) -> Result { + let list = user.calendars.list(user).await?; + Ok(Self { list }) + } +} +impl DavNode for CalendarListNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed(); + } + + async { + let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; + let child = Box::new(CalendarNode { + col: cal, + calname: path[0].to_string() + }); + child.fetch(user, &path[1..]).await + }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + let list = self.list.clone(); + async move { + //@FIXME maybe we want to be lazy here?! + futures::stream::iter(list.iter()) + .filter_map(|name| async move { + user.calendars.open(user, name).await + .ok() + .flatten() + .map(|v| (name, v)) + }) + .map(|(name, cal)| Box::new(CalendarNode { + col: cal, + calname: name.to_string(), + }) as Box) + .collect::>>() + .await + }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/", user.username) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + ]) + } + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +#[derive(Clone)] +pub(crate) struct CalendarNode { + col: Arc, + calname: String, +} +impl DavNode for CalendarNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + let col = self.col.clone(); + let calname = self.calname.clone(); + async move { + if let Some(blob_id) = col.dag().await.idx_by_filename.get(path[0]) { + let child = Box::new(EventNode { + col: col.clone(), + calname, + filename: path[0].to_string(), + blob_id: *blob_id, + }); + return child.fetch(user, &path[1..]).await + } + + Err(anyhow!("Not found")) + }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + let col = self.col.clone(); + let calname = self.calname.clone(); + + async move { + col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { + Box::new(EventNode { + col: col.clone(), + calname: calname.clone(), + filename: filename.to_string(), + blob_id: *blob_id, + }) as Box + }).collect() + }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/", user.username, self.calname) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetContentType, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), + ]) + } + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.calname))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), + ])), + //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //@FIXME seems wrong but seems to be what Thunderbird expects... + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) + => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ + cal::CompSupport(cal::Component::VEvent), + ])))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} + +const FAKE_ICS: &str = r#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTIMEZONE +LAST-MODIFIED:20040110T032845Z +TZID:US/Eastern +BEGIN:DAYLIGHT +DTSTART:20000404T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZNAME:EDT +TZOFFSETFROM:-0500 +TZOFFSETTO:-0400 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:20001026T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:EST +TZOFFSETFROM:-0400 +TZOFFSETTO:-0500 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTAMP:20240406T001102Z +DTSTART;TZID=US/Eastern:20240406T100000 +DURATION:PT1H +SUMMARY:Event #1 +Description:Go Steelers! +UID:74855313FA803DA593CD579A@example.com +END:VEVENT +END:VCALENDAR"#; + +#[derive(Clone)] +pub(crate) struct EventNode { + col: Arc, + calname: String, + filename: String, + blob_id: BlobId, +} +impl DavNode for EventNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + async { Err(anyhow!("Not found")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![] }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![ + dav::PropertyRequest::DisplayName, + dav::PropertyRequest::ResourceType, + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), + ]) + } + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + prop.0.into_iter().map(|n| match n { + dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.filename))), + dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), + dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), + dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { + mime: None, + payload: FAKE_ICS.into() + })))), + v => dav::AnyProperty::Request(v), + }).collect() + } +} -- cgit v1.2.3 From b6c656de8f8e8caf75dfe3bea9096576f3263cf4 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 20 Apr 2024 20:01:07 +0200 Subject: Add a virtual node CreateEventNode --- aero-proto/src/dav/controller.rs | 36 ++++++++++++---- aero-proto/src/dav/node.rs | 11 ++++- aero-proto/src/dav/resource.rs | 88 ++++++++++++++++++++++++++++++---------- 3 files changed, 105 insertions(+), 30 deletions(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 79ead0a..243a455 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -2,6 +2,7 @@ use anyhow::Result; use http_body_util::combinators::BoxBody; use hyper::body::Incoming; use hyper::{Request, Response, body::Bytes}; +use http_body_util::StreamBody; use aero_collections::user::User; use aero_dav::types as dav; @@ -39,7 +40,8 @@ impl Controller { let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); - let node = match (RootNode {}).fetch(&user, &path_segments).await { + let can_create = matches!(method.as_str(), "PUT" | "MKCOL" | "MKCALENDAR"); + let node = match (RootNode {}).fetch(&user, &path_segments, can_create).await{ Ok(v) => v, Err(e) => { tracing::warn!(err=?e, "dav node fetch failed"); @@ -48,6 +50,7 @@ impl Controller { .body(codec::text_body("Resource not found"))?) } }; + let ctrl = Self { node, user, req }; match method.as_str() { @@ -63,7 +66,8 @@ impl Controller { .body(codec::text_body(""))?) }, "PUT" => { - todo!(); + let to_create = path_segments.last().expect("Bound checked earlier in this fx"); + ctrl.put(to_create).await }, "DELETE" => { todo!(); @@ -77,7 +81,7 @@ impl Controller { } - // --- Public API --- + // --- Per-method functions --- /// REPORT has been first described in the "Versioning Extension" of WebDAV /// It allows more complex queries compared to PROPFIND @@ -111,8 +115,8 @@ impl Controller { let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); for h in multiget.href.into_iter() { let maybe_collected_node = match Path::new(h.0.as_str()) { - Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice()).await.or(Err(h)), - Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice()).await.or(Err(h)), + Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice(), false).await.or(Err(h)), + Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice(), false).await.or(Err(h)), Err(_) => Err(h), }; @@ -173,9 +177,25 @@ impl Controller { serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) } - // --- Internal functions --- - /// Utility function to build a multistatus response from - /// a list of DavNodes + async fn put(self, child: &str) -> Result>> { + todo!() + } + + async fn get(self) -> Result>> { + todo!() + /*let stream = StreamBody::new(self.node.get().map(|v| Ok(Frame::data(v)))); + let boxed_body = BoxBody::new(stream); + + let response = Response::builder() + .status(200) + //.header("content-type", "application/xml; charset=\"utf-8\"") + .body(boxed_body)?; + + Ok(response)*/ + } + + // --- Common utulity functions --- + /// Build a multistatus response from a list of DavNodes fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { // Collect properties on existing objects let mut responses: Vec> = match props { diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index afeeeff..b0f97a5 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -7,6 +7,11 @@ use aero_collections::user::User; type ArcUser = std::sync::Arc; +pub(crate) enum PutPolicy { + CreateOnly, + ReplaceEtags(String), +} + /// A DAV node should implement the following methods /// @FIXME not satisfied by BoxFutures but I have no better idea currently pub(crate) trait DavNode: Send { @@ -14,7 +19,7 @@ pub(crate) trait DavNode: Send { /// This node direct children fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; /// Recursively fetch a child (progress inside the filesystem hierarchy) - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>>; + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>>; // node properties /// Get the path @@ -23,6 +28,10 @@ pub(crate) trait DavNode: Send { fn supported_properties(&self, user: &ArcUser) -> dav::PropName; /// Get the values for the given properties fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; + /// Put a child + //fn put(&self, policy: PutPolicy, stream: TryStream) -> BoxFuture>; + /// Get content + //fn content(&self) -> TryStream; //@FIXME maybe add etag, maybe add a way to set content diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 9ad662a..fec8bcb 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -17,7 +17,7 @@ use crate::dav::node::DavNode; #[derive(Clone)] pub(crate) struct RootNode {} impl DavNode for RootNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { if path.len() == 0 { let this = self.clone(); return async { Ok(Box::new(this) as Box) }.boxed(); @@ -25,9 +25,10 @@ impl DavNode for RootNode { if path[0] == user.username { let child = Box::new(HomeNode {}); - return child.fetch(user, &path[1..]); + return child.fetch(user, &path[1..], create); } + //@NOTE: We can't create a node at this level async { Err(anyhow!("Not found")) }.boxed() } @@ -64,19 +65,20 @@ impl DavNode for RootNode { #[derive(Clone)] pub(crate) struct HomeNode {} impl DavNode for HomeNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; return async { Ok(node) }.boxed() } if path[0] == "calendar" { - return async { + return async move { let child = Box::new(CalendarListNode::new(user).await?); - child.fetch(user, &path[1..]).await + child.fetch(user, &path[1..], create).await }.boxed(); } - + + //@NOTE: we can't create a node at this level async { Err(anyhow!("Not found")) }.boxed() } @@ -126,19 +128,20 @@ impl CalendarListNode { } } impl DavNode for CalendarListNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; return async { Ok(node) }.boxed(); } - async { + async move { + //@FIXME: we should create a node if the open returns a "not found". let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; let child = Box::new(CalendarNode { col: cal, calname: path[0].to_string() }); - child.fetch(user, &path[1..]).await + child.fetch(user, &path[1..], create).await }.boxed() } @@ -189,7 +192,7 @@ pub(crate) struct CalendarNode { calname: String, } impl DavNode for CalendarNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; return async { Ok(node) }.boxed() @@ -198,17 +201,27 @@ impl DavNode for CalendarNode { let col = self.col.clone(); let calname = self.calname.clone(); async move { - if let Some(blob_id) = col.dag().await.idx_by_filename.get(path[0]) { - let child = Box::new(EventNode { - col: col.clone(), - calname, - filename: path[0].to_string(), - blob_id: *blob_id, - }); - return child.fetch(user, &path[1..]).await + match (col.dag().await.idx_by_filename.get(path[0]), create) { + (Some(blob_id), _) => { + let child = Box::new(EventNode { + col: col.clone(), + calname, + filename: path[0].to_string(), + blob_id: *blob_id, + }); + child.fetch(user, &path[1..], create).await + }, + (None, true) => { + let child = Box::new(CreateEventNode { + col: col.clone(), + calname, + filename: path[0].to_string(), + }); + child.fetch(user, &path[1..], create).await + }, + _ => Err(anyhow!("Not found")), } - Err(anyhow!("Not found")) }.boxed() } @@ -298,13 +311,13 @@ pub(crate) struct EventNode { blob_id: BlobId, } impl DavNode for EventNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str]) -> BoxFuture<'a, Result>> { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; return async { Ok(node) }.boxed() } - async { Err(anyhow!("Not found")) }.boxed() + async { Err(anyhow!("Not supported: can't create a child on an event node")) }.boxed() } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { @@ -338,3 +351,36 @@ impl DavNode for EventNode { }).collect() } } + +#[derive(Clone)] +pub(crate) struct CreateEventNode { + col: Arc, + calname: String, + filename: String, +} +impl DavNode for CreateEventNode { + fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + if path.len() == 0 { + let node = Box::new(self.clone()) as Box; + return async { Ok(node) }.boxed() + } + + async { Err(anyhow!("Not supported: can't create a child on an event node")) }.boxed() + } + + fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { + async { vec![] }.boxed() + } + + fn path(&self, user: &ArcUser) -> String { + format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) + } + + fn supported_properties(&self, user: &ArcUser) -> dav::PropName { + dav::PropName(vec![]) + } + + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { + vec![] + } +} -- cgit v1.2.3 From 936f851fdb120dd0b46c4effeabe0dbb508d4d3d Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 21 Apr 2024 13:47:45 +0200 Subject: Do not silently drop an invalid frame --- aero-proto/src/dav/codec.rs | 8 ++++++-- aero-proto/src/dav/node.rs | 6 ++++-- aero-proto/src/dav/resource.rs | 35 ++++++++++++++++++++++++++++++++--- 3 files changed, 42 insertions(+), 7 deletions(-) diff --git a/aero-proto/src/dav/codec.rs b/aero-proto/src/dav/codec.rs index 08af2fe..e317a03 100644 --- a/aero-proto/src/dav/codec.rs +++ b/aero-proto/src/dav/codec.rs @@ -70,8 +70,12 @@ pub(crate) fn serialize(status_ok: hyper::Stat pub(crate) async fn deserialize>(req: Request) -> Result { let stream_of_frames = BodyStream::new(req.into_body()); let stream_of_bytes = stream_of_frames - .try_filter_map(|frame| async move { Ok(frame.into_data().ok()) }) - .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)); + .map_ok(|frame| frame.into_data()) + .map(|obj| match obj { + Ok(Ok(v)) => Ok(v), + Ok(Err(_)) => Err(std::io::Error::new(std::io::ErrorKind::Other, "conversion error")), + Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), + }); let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); let async_read = std::pin::pin!(async_read); let mut rdr = dxml::Reader::new(quick_xml::reader::NsReader::from_reader(async_read)).await?; diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index b0f97a5..96bd52b 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -1,4 +1,5 @@ use anyhow::Result; +use futures::Stream; use futures::future::BoxFuture; use aero_dav::types as dav; @@ -6,6 +7,7 @@ use aero_dav::realization::All; use aero_collections::user::User; type ArcUser = std::sync::Arc; +pub(crate) type Content = Box>>; pub(crate) enum PutPolicy { CreateOnly, @@ -28,8 +30,8 @@ pub(crate) trait DavNode: Send { fn supported_properties(&self, user: &ArcUser) -> dav::PropName; /// Get the values for the given properties fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; - /// Put a child - //fn put(&self, policy: PutPolicy, stream: TryStream) -> BoxFuture>; + /// Put an element (create or update) + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture>; /// Get content //fn content(&self) -> TryStream; diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index fec8bcb..2269de2 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -2,7 +2,7 @@ use std::sync::Arc; type ArcUser = std::sync::Arc; use anyhow::{anyhow, Result}; -use futures::stream::StreamExt; +use futures::stream::{TryStream, StreamExt}; use futures::{future::BoxFuture, future::FutureExt}; use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; @@ -12,7 +12,7 @@ use aero_dav::acltypes as acl; use aero_dav::realization::{All, self as all}; -use crate::dav::node::DavNode; +use crate::dav::node::{DavNode, PutPolicy, Content}; #[derive(Clone)] pub(crate) struct RootNode {} @@ -60,6 +60,10 @@ impl DavNode for RootNode { v => dav::AnyProperty::Request(v), }).collect() } + + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + todo!() + } } #[derive(Clone)] @@ -111,10 +115,18 @@ impl DavNode for HomeNode { ])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href(/*CalendarListNode{}.path(user)*/ todo!()))))), + dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href( + //@FIXME we are hardcoding the calendar path, instead we would want to use + //objects + format!("/{}/calendar/", user.username) + ))))), v => dav::AnyProperty::Request(v), }).collect() } + + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + todo!() + } } #[derive(Clone)] @@ -184,6 +196,10 @@ impl DavNode for CalendarListNode { v => dav::AnyProperty::Request(v), }).collect() } + + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + todo!() + } } #[derive(Clone)] @@ -270,6 +286,10 @@ impl DavNode for CalendarNode { v => dav::AnyProperty::Request(v), }).collect() } + + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + todo!() + } } const FAKE_ICS: &str = r#"BEGIN:VCALENDAR @@ -350,6 +370,10 @@ impl DavNode for EventNode { v => dav::AnyProperty::Request(v), }).collect() } + + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + todo!() + } } #[derive(Clone)] @@ -383,4 +407,9 @@ impl DavNode for CreateEventNode { fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { vec![] } + + fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + //@TODO write file + todo!() + } } -- cgit v1.2.3 From 4594e068dbba3d3d704728449fc6ccaaadaa82f1 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 23 Apr 2024 10:35:43 +0200 Subject: PUT seems to work --- aero-collections/src/calendar/mod.rs | 16 ++++++----- aero-proto/src/dav/controller.rs | 34 ++++++++++++++++------ aero-proto/src/dav/node.rs | 11 +++---- aero-proto/src/dav/resource.rs | 56 +++++++++++++++++++++++++++--------- aero-user/src/storage/garage.rs | 14 +++++---- aero-user/src/storage/in_memory.rs | 16 +++++++++-- aero-user/src/storage/mod.rs | 2 +- 7 files changed, 106 insertions(+), 43 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 127f41b..feae73e 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -71,8 +71,8 @@ impl Calendar { } /// Put a specific event - pub async fn put<'a>(&self, entry: IndexEntry, evt: &'a [u8]) -> Result { - self.internal.write().await.put(entry, evt).await + pub async fn put<'a>(&self, name: &str, evt: &'a [u8]) -> Result<(Token, IndexEntry)> { + self.internal.write().await.put(name, evt).await } /// Delete a specific event @@ -123,8 +123,9 @@ impl CalendarInternal { cryptoblob::open(&body, &message_key) } - async fn put<'a>(&mut self, entry: IndexEntry, evt: &'a [u8]) -> Result { + async fn put<'a>(&mut self, name: &str, evt: &'a [u8]) -> Result<(Token, IndexEntry)> { let message_key = gen_key(); + let blob_id = gen_ident(); let encrypted_msg_key = cryptoblob::seal(&message_key.as_ref(), &self.encryption_key)?; let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_msg_key); @@ -132,22 +133,23 @@ impl CalendarInternal { // Write event to S3 let message_blob = cryptoblob::seal(evt, &message_key)?; let blob_val = BlobVal::new( - BlobRef(format!("{}/{}", self.cal_path, entry.0)), + BlobRef(format!("{}/{}", self.cal_path, blob_id)), message_blob, ) .with_meta(MESSAGE_KEY.to_string(), key_header); - self.storage + let etag = self.storage .blob_insert(blob_val) .await?; // Add entry to Bayou + let entry: IndexEntry = (blob_id, name.to_string(), etag); let davstate = self.davdag.state(); - let put_op = davstate.op_put(entry); + let put_op = davstate.op_put(entry.clone()); let token = put_op.token(); self.davdag.push(put_op).await?; - Ok(token) + Ok((token, entry)) } async fn delete(&mut self, blob_id: BlobId) -> Result { diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 243a455..c8432dd 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -2,7 +2,8 @@ use anyhow::Result; use http_body_util::combinators::BoxBody; use hyper::body::Incoming; use hyper::{Request, Response, body::Bytes}; -use http_body_util::StreamBody; +use http_body_util::BodyStream; +use futures::stream::{StreamExt, TryStreamExt}; use aero_collections::user::User; use aero_dav::types as dav; @@ -10,7 +11,7 @@ use aero_dav::realization::All; use aero_dav::caltypes as cal; use crate::dav::codec::{serialize, deserialize, depth, text_body}; -use crate::dav::node::DavNode; +use crate::dav::node::{DavNode, PutPolicy}; use crate::dav::resource::RootNode; use crate::dav::codec; @@ -65,10 +66,7 @@ impl Controller { .status(404) .body(codec::text_body(""))?) }, - "PUT" => { - let to_create = path_segments.last().expect("Bound checked earlier in this fx"); - ctrl.put(to_create).await - }, + "PUT" => ctrl.put().await, "DELETE" => { todo!(); }, @@ -177,8 +175,28 @@ impl Controller { serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) } - async fn put(self, child: &str) -> Result>> { - todo!() + async fn put(self) -> Result>> { + //@FIXME temporary, look at If-None-Match & If-Match headers + let put_policy = PutPolicy::CreateOnly; + + let stream_of_frames = BodyStream::new(self.req.into_body()); + let stream_of_bytes = stream_of_frames + .map_ok(|frame| frame.into_data()) + .map(|obj| match obj { + Ok(Ok(v)) => Ok(v), + Ok(Err(_)) => Err(std::io::Error::new(std::io::ErrorKind::Other, "conversion error")), + Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), + }).boxed(); + + let etag = self.node.put(put_policy, stream_of_bytes).await?; + + let response = Response::builder() + .status(201) + .header("ETag", etag) + //.header("content-type", "application/xml; charset=\"utf-8\"") + .body(text_body(""))?; + + Ok(response) } async fn get(self) -> Result>> { diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 96bd52b..96586ad 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -1,17 +1,18 @@ use anyhow::Result; -use futures::Stream; +use futures::stream::{BoxStream, Stream}; use futures::future::BoxFuture; +use hyper::body::Bytes; use aero_dav::types as dav; use aero_dav::realization::All; -use aero_collections::user::User; +use aero_collections::{user::User, davdag::Etag}; type ArcUser = std::sync::Arc; -pub(crate) type Content = Box>>; +pub(crate) type Content<'a> = BoxStream<'a, std::result::Result>; pub(crate) enum PutPolicy { CreateOnly, - ReplaceEtags(String), + ReplaceEtag(String), } /// A DAV node should implement the following methods @@ -31,7 +32,7 @@ pub(crate) trait DavNode: Send { /// Get the values for the given properties fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; /// Put an element (create or update) - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture>; + fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result>; /// Get content //fn content(&self) -> TryStream; diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 2269de2..02a246e 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -1,17 +1,17 @@ use std::sync::Arc; type ArcUser = std::sync::Arc; -use anyhow::{anyhow, Result}; -use futures::stream::{TryStream, StreamExt}; +use anyhow::{anyhow, bail, Result}; +use futures::stream::{TryStream, TryStreamExt, StreamExt}; +use futures::io::AsyncReadExt; use futures::{future::BoxFuture, future::FutureExt}; -use aero_collections::{user::User, calendar::Calendar, davdag::BlobId}; +use aero_collections::{user::User, calendar::Calendar, davdag::{BlobId, IndexEntry, Etag}}; use aero_dav::types as dav; use aero_dav::caltypes as cal; use aero_dav::acltypes as acl; use aero_dav::realization::{All, self as all}; - use crate::dav::node::{DavNode, PutPolicy, Content}; #[derive(Clone)] @@ -61,7 +61,7 @@ impl DavNode for RootNode { }).collect() } - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } } @@ -124,7 +124,7 @@ impl DavNode for HomeNode { }).collect() } - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } } @@ -197,7 +197,7 @@ impl DavNode for CalendarListNode { }).collect() } - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } } @@ -287,7 +287,7 @@ impl DavNode for CalendarNode { }).collect() } - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } } @@ -330,6 +330,12 @@ pub(crate) struct EventNode { filename: String, blob_id: BlobId, } +impl EventNode { + async fn etag(&self) -> Result { + self.col.dag().await.table.get(&self.blob_id).map(|(_, _, etag)| etag.to_string()).ok_or(anyhow!("Missing blob id in index")) + } +} + impl DavNode for EventNode { fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { if path.len() == 0 { @@ -362,7 +368,7 @@ impl DavNode for EventNode { dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(req))) => + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(_req))) => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { mime: None, payload: FAKE_ICS.into() @@ -371,8 +377,22 @@ impl DavNode for EventNode { }).collect() } - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { - todo!() + fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { + async { + let existing_etag = self.etag().await?; + match policy { + PutPolicy::CreateOnly => bail!("Already existing"), + PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => bail!("Would overwrite something we don't know"), + _ => () + }; + + //@FIXME for now, our storage interface does not allow for streaming + let mut evt = Vec::new(); + let mut reader = stream.into_async_read(); + reader.read_to_end(&mut evt).await.unwrap(); + let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await?; + Ok(entry.2) + }.boxed() } } @@ -408,8 +428,16 @@ impl DavNode for CreateEventNode { vec![] } - fn put(&self, policy: PutPolicy, stream: Content) -> BoxFuture> { - //@TODO write file - todo!() + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { + //@NOTE: policy might not be needed here: whatever we put, there is no known entries here + + async { + //@FIXME for now, our storage interface does not allow for streaming + let mut evt = Vec::new(); + let mut reader = stream.into_async_read(); + reader.read_to_end(&mut evt).await.unwrap(); + let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await?; + Ok(entry.2) + }.boxed() } } diff --git a/aero-user/src/storage/garage.rs b/aero-user/src/storage/garage.rs index 7e930c3..1164839 100644 --- a/aero-user/src/storage/garage.rs +++ b/aero-user/src/storage/garage.rs @@ -426,15 +426,16 @@ impl IStore for GarageStore { tracing::debug!("Fetched {}/{}", self.bucket, blob_ref.0); Ok(bv) } - async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError> { + async fn blob_insert(&self, blob_val: BlobVal) -> Result { tracing::trace!(entry=%blob_val.blob_ref, command="blob_insert"); let streamable_value = s3::primitives::ByteStream::from(blob_val.value); + let obj_key = blob_val.blob_ref.0; let maybe_send = self .s3 .put_object() .bucket(self.bucket.to_string()) - .key(blob_val.blob_ref.0.to_string()) + .key(obj_key.to_string()) .set_metadata(Some(blob_val.meta)) .body(streamable_value) .send() @@ -445,9 +446,12 @@ impl IStore for GarageStore { tracing::error!("unable to send object: {}", e); Err(StorageError::Internal) } - Ok(_) => { - tracing::debug!("Inserted {}/{}", self.bucket, blob_val.blob_ref.0); - Ok(()) + Ok(put_output) => { + tracing::debug!("Inserted {}/{}", self.bucket, obj_key); + Ok(put_output + .e_tag() + .map(|v| format!("\"{}\"", v)) + .unwrap_or(format!("W/\"{}\"", obj_key))) } } } diff --git a/aero-user/src/storage/in_memory.rs b/aero-user/src/storage/in_memory.rs index a676797..9ef2721 100644 --- a/aero-user/src/storage/in_memory.rs +++ b/aero-user/src/storage/in_memory.rs @@ -1,9 +1,12 @@ -use crate::storage::*; use std::collections::BTreeMap; use std::ops::Bound::{self, Excluded, Included, Unbounded}; use std::sync::RwLock; + +use sodiumoxide::{hex, crypto::hash}; use tokio::sync::Notify; +use crate::storage::*; + /// This implementation is very inneficient, and not completely correct /// Indeed, when the connector is dropped, the memory is freed. /// It means that when a user disconnects, its data are lost. @@ -80,6 +83,12 @@ impl InternalBlobVal { value: self.data.clone(), } } + fn etag(&self) -> String { + let digest = hash::hash(self.data.as_ref()); + let buff = digest.as_ref(); + let hexstr = hex::encode(buff); + format!("\"{}\"", hexstr) + } } type ArcRow = Arc>>>; @@ -300,13 +309,14 @@ impl IStore for MemStore { .ok_or(StorageError::NotFound) .map(|v| v.to_blob_val(blob_ref)) } - async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError> { + async fn blob_insert(&self, blob_val: BlobVal) -> Result { tracing::trace!(entry=%blob_val.blob_ref, command="blob_insert"); let mut store = self.blob.write().or(Err(StorageError::Internal))?; let entry = store.entry(blob_val.blob_ref.0.clone()).or_default(); entry.data = blob_val.value.clone(); entry.metadata = blob_val.meta.clone(); - Ok(()) + + Ok(entry.etag()) } async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError> { tracing::trace!(src=%src, dst=%dst, command="blob_copy"); diff --git a/aero-user/src/storage/mod.rs b/aero-user/src/storage/mod.rs index f5eb8d3..527765f 100644 --- a/aero-user/src/storage/mod.rs +++ b/aero-user/src/storage/mod.rs @@ -159,7 +159,7 @@ pub trait IStore { async fn row_poll(&self, value: &RowRef) -> Result; async fn blob_fetch(&self, blob_ref: &BlobRef) -> Result; - async fn blob_insert(&self, blob_val: BlobVal) -> Result<(), StorageError>; + async fn blob_insert(&self, blob_val: BlobVal) -> Result; async fn blob_copy(&self, src: &BlobRef, dst: &BlobRef) -> Result<(), StorageError>; async fn blob_list(&self, prefix: &str) -> Result, StorageError>; async fn blob_rm(&self, blob_ref: &BlobRef) -> Result<(), StorageError>; -- cgit v1.2.3 From 50ce8621c2eaf91c46be0a2a9c2b82b19e66880b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 23 Apr 2024 15:20:29 +0200 Subject: GET implementation --- aero-proto/src/dav/codec.rs | 11 +++++----- aero-proto/src/dav/controller.rs | 31 +++++++++++++++------------ aero-proto/src/dav/middleware.rs | 8 +++---- aero-proto/src/dav/node.rs | 7 +++--- aero-proto/src/dav/resource.rs | 46 +++++++++++++++++++++++++++++++++++++++- 5 files changed, 76 insertions(+), 27 deletions(-) diff --git a/aero-proto/src/dav/codec.rs b/aero-proto/src/dav/codec.rs index e317a03..9082d0a 100644 --- a/aero-proto/src/dav/codec.rs +++ b/aero-proto/src/dav/codec.rs @@ -6,7 +6,7 @@ use futures::stream::StreamExt; use futures::stream::TryStreamExt; use http_body_util::BodyStream; use http_body_util::StreamBody; -use http_body_util::combinators::BoxBody; +use http_body_util::combinators::UnsyncBoxBody; use hyper::body::Frame; use tokio_util::sync::PollSender; use std::io::{Error, ErrorKind}; @@ -16,6 +16,7 @@ use http_body_util::BodyExt; use aero_dav::types as dav; use aero_dav::xml as dxml; +use super::controller::HttpResponse; pub(crate) fn depth(req: &Request) -> dav::Depth { match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { @@ -26,11 +27,11 @@ pub(crate) fn depth(req: &Request) -> dav::Depth { } } -pub(crate) fn text_body(txt: &'static str) -> BoxBody { - BoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) +pub(crate) fn text_body(txt: &'static str) -> UnsyncBoxBody { + UnsyncBoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) } -pub(crate) fn serialize(status_ok: hyper::StatusCode, elem: T) -> Result>> { +pub(crate) fn serialize(status_ok: hyper::StatusCode, elem: T) -> Result { let (tx, rx) = tokio::sync::mpsc::channel::(1); // Build the writer @@ -55,7 +56,7 @@ pub(crate) fn serialize(status_ok: hyper::Stat // Build the reader let recv = tokio_stream::wrappers::ReceiverStream::new(rx); let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v)))); - let boxed_body = BoxBody::new(stream); + let boxed_body = UnsyncBoxBody::new(stream); let response = Response::builder() .status(status_ok) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index c8432dd..de6403e 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -1,8 +1,10 @@ use anyhow::Result; -use http_body_util::combinators::BoxBody; +use http_body_util::combinators::{UnsyncBoxBody, BoxBody}; use hyper::body::Incoming; use hyper::{Request, Response, body::Bytes}; use http_body_util::BodyStream; +use http_body_util::StreamBody; +use hyper::body::Frame; use futures::stream::{StreamExt, TryStreamExt}; use aero_collections::user::User; @@ -15,7 +17,8 @@ use crate::dav::node::{DavNode, PutPolicy}; use crate::dav::resource::RootNode; use crate::dav::codec; -type ArcUser = std::sync::Arc; +pub(super) type ArcUser = std::sync::Arc; +pub(super) type HttpResponse = Response>; const ALLPROP: [dav::PropertyRequest; 10] = [ dav::PropertyRequest::CreationDate, @@ -36,7 +39,7 @@ pub(crate) struct Controller { req: Request, } impl Controller { - pub(crate) async fn route(user: std::sync::Arc, req: Request) -> Result>> { + pub(crate) async fn route(user: std::sync::Arc, req: Request) -> Result { let path = req.uri().path().to_string(); let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); @@ -60,12 +63,13 @@ impl Controller { .header("DAV", "1") .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") .body(codec::text_body(""))?), - "HEAD" | "GET" => { - tracing::warn!("HEAD+GET not correctly implemented"); + "HEAD" => { + tracing::warn!("HEAD not correctly implemented"); Ok(Response::builder() .status(404) .body(codec::text_body(""))?) }, + "GET" => ctrl.get().await, "PUT" => ctrl.put().await, "DELETE" => { todo!(); @@ -87,7 +91,7 @@ impl Controller { /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. /// A rewrite would be required to make it more generic (with the extension system that has /// been introduced in aero-dav) - async fn report(self) -> Result>> { + async fn report(self) -> Result { let status = hyper::StatusCode::from_u16(207)?; let report = match deserialize::>(self.req).await { @@ -135,7 +139,7 @@ impl Controller { } /// PROPFIND is the standard way to fetch WebDAV properties - async fn propfind(self) -> Result>> { + async fn propfind(self) -> Result { let depth = depth(&self.req); if matches!(depth, dav::Depth::Infinity) { return Ok(Response::builder() @@ -175,7 +179,7 @@ impl Controller { serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) } - async fn put(self) -> Result>> { + async fn put(self) -> Result { //@FIXME temporary, look at If-None-Match & If-Match headers let put_policy = PutPolicy::CreateOnly; @@ -199,20 +203,19 @@ impl Controller { Ok(response) } - async fn get(self) -> Result>> { - todo!() - /*let stream = StreamBody::new(self.node.get().map(|v| Ok(Frame::data(v)))); - let boxed_body = BoxBody::new(stream); + async fn get(self) -> Result { + let stream_body = StreamBody::new(self.node.content().await.map_ok(|v| Frame::data(v))); + let boxed_body = UnsyncBoxBody::new(stream_body); let response = Response::builder() .status(200) //.header("content-type", "application/xml; charset=\"utf-8\"") .body(boxed_body)?; - Ok(response)*/ + Ok(response) } - // --- Common utulity functions --- + // --- Common utility functions --- /// Build a multistatus response from a list of DavNodes fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { // Collect properties on existing objects diff --git a/aero-proto/src/dav/middleware.rs b/aero-proto/src/dav/middleware.rs index c4edbd8..e19ce14 100644 --- a/aero-proto/src/dav/middleware.rs +++ b/aero-proto/src/dav/middleware.rs @@ -1,21 +1,21 @@ use anyhow::{anyhow, Result}; use base64::Engine; -use hyper::{Request, Response, body::Bytes}; +use hyper::{Request, Response}; use hyper::body::Incoming; -use http_body_util::combinators::BoxBody; use aero_user::login::ArcLoginProvider; use aero_collections::user::User; use super::codec::text_body; +use super::controller::HttpResponse; type ArcUser = std::sync::Arc; pub(super) async fn auth<'a>( login: ArcLoginProvider, req: Request, - next: impl Fn(ArcUser, Request) -> futures::future::BoxFuture<'a, Result>>>, -) -> Result>> { + next: impl Fn(ArcUser, Request) -> futures::future::BoxFuture<'a, Result>, +) -> Result { let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { Some(hv) => hv.to_str()?, None => { diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 96586ad..e2835e9 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -5,9 +5,10 @@ use hyper::body::Bytes; use aero_dav::types as dav; use aero_dav::realization::All; -use aero_collections::{user::User, davdag::Etag}; +use aero_collections::davdag::Etag; + +use super::controller::ArcUser; -type ArcUser = std::sync::Arc; pub(crate) type Content<'a> = BoxStream<'a, std::result::Result>; pub(crate) enum PutPolicy { @@ -34,7 +35,7 @@ pub(crate) trait DavNode: Send { /// Put an element (create or update) fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result>; /// Get content - //fn content(&self) -> TryStream; + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>>; //@FIXME maybe add etag, maybe add a way to set content diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 02a246e..bd377fb 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -64,6 +64,12 @@ impl DavNode for RootNode { fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } + + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { + async { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + }.boxed() + } } #[derive(Clone)] @@ -127,6 +133,12 @@ impl DavNode for HomeNode { fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } + + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { + async { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + }.boxed() + } } #[derive(Clone)] @@ -200,6 +212,12 @@ impl DavNode for CalendarListNode { fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } + + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { + async { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + }.boxed() + } } #[derive(Clone)] @@ -290,6 +308,12 @@ impl DavNode for CalendarNode { fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } + + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { + async { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + }.boxed() + } } const FAKE_ICS: &str = r#"BEGIN:VCALENDAR @@ -386,7 +410,8 @@ impl DavNode for EventNode { _ => () }; - //@FIXME for now, our storage interface does not allow for streaming + //@FIXME for now, our storage interface does not allow streaming, + // so we load everything in memory let mut evt = Vec::new(); let mut reader = stream.into_async_read(); reader.read_to_end(&mut evt).await.unwrap(); @@ -394,6 +419,19 @@ impl DavNode for EventNode { Ok(entry.2) }.boxed() } + + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { + async { + //@FIXME for now, our storage interface does not allow streaming, + // so we load everything in memory + let content = self.col.get(self.blob_id).await.or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); + let r = async { + Ok(hyper::body::Bytes::from(content?)) + }; + //tokio::pin!(r); + futures::stream::once(Box::pin(r)).boxed() + }.boxed() + } } #[derive(Clone)] @@ -440,4 +478,10 @@ impl DavNode for CreateEventNode { Ok(entry.2) }.boxed() } + + fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { + async { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + }.boxed() + } } -- cgit v1.2.3 From adbccd88348f472751373a2e1d536e818be8fa67 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 23 Apr 2024 15:43:48 +0200 Subject: Add support for content type --- aero-proto/src/dav/controller.rs | 2 +- aero-proto/src/dav/node.rs | 2 ++ aero-proto/src/dav/resource.rs | 25 +++++++++++++++++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index de6403e..5762581 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -209,7 +209,7 @@ impl Controller { let response = Response::builder() .status(200) - //.header("content-type", "application/xml; charset=\"utf-8\"") + .header("content-type", self.node.content_type()) .body(boxed_body)?; Ok(response) diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index e2835e9..0b63900 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -34,6 +34,8 @@ pub(crate) trait DavNode: Send { fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; /// Put an element (create or update) fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result>; + /// Content type of the element + fn content_type(&self) -> &str; /// Get content fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>>; diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index bd377fb..f13fb0c 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -70,6 +70,10 @@ impl DavNode for RootNode { futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() }.boxed() } + + fn content_type(&self) -> &str { + "text/plain" + } } #[derive(Clone)] @@ -139,6 +143,11 @@ impl DavNode for HomeNode { futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() }.boxed() } + + + fn content_type(&self) -> &str { + "text/plain" + } } #[derive(Clone)] @@ -218,6 +227,10 @@ impl DavNode for CalendarListNode { futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() }.boxed() } + + fn content_type(&self) -> &str { + "text/plain" + } } #[derive(Clone)] @@ -314,6 +327,10 @@ impl DavNode for CalendarNode { futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() }.boxed() } + + fn content_type(&self) -> &str { + "text/plain" + } } const FAKE_ICS: &str = r#"BEGIN:VCALENDAR @@ -432,6 +449,10 @@ impl DavNode for EventNode { futures::stream::once(Box::pin(r)).boxed() }.boxed() } + + fn content_type(&self) -> &str { + "text/calendar" + } } #[derive(Clone)] @@ -484,4 +505,8 @@ impl DavNode for CreateEventNode { futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() }.boxed() } + + fn content_type(&self) -> &str { + "text/plain" + } } -- cgit v1.2.3 From 6de63055a239be05053424460d019cea8b8495a2 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 23 Apr 2024 18:07:00 +0200 Subject: successfully return ICS in REPORT queries --- aero-proto/src/dav/controller.rs | 10 +- aero-proto/src/dav/node.rs | 82 +++++++------ aero-proto/src/dav/resource.rs | 245 +++++++++++++++++++-------------------- 3 files changed, 171 insertions(+), 166 deletions(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 5762581..2dcc7bc 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -135,7 +135,7 @@ impl Controller { Some(cal::CalendarSelector::Prop(inner)) => Some(inner), }; - serialize(status, Self::multistatus(&self.user, ok_node, not_found, props)) + serialize(status, Self::multistatus(&self.user, ok_node, not_found, props).await) } /// PROPFIND is the standard way to fetch WebDAV properties @@ -176,7 +176,7 @@ impl Controller { // Not Found is currently impossible considering the way we designed this function let not_found = vec![]; - serialize(status, Self::multistatus(&self.user, nodes, not_found, propname)) + serialize(status, Self::multistatus(&self.user, nodes, not_found, propname).await) } async fn put(self) -> Result { @@ -204,7 +204,7 @@ impl Controller { } async fn get(self) -> Result { - let stream_body = StreamBody::new(self.node.content().await.map_ok(|v| Frame::data(v))); + let stream_body = StreamBody::new(self.node.content().map_ok(|v| Frame::data(v))); let boxed_body = UnsyncBoxBody::new(stream_body); let response = Response::builder() @@ -217,10 +217,10 @@ impl Controller { // --- Common utility functions --- /// Build a multistatus response from a list of DavNodes - fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { + async fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { // Collect properties on existing objects let mut responses: Vec> = match props { - Some(props) => nodes.into_iter().map(|n| n.response_props(user, props.clone())).collect(), + Some(props) => futures::stream::iter(nodes).then(|n| n.response_props(user, props.clone())).collect().await, None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), }; diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 0b63900..00dabce 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -1,6 +1,6 @@ use anyhow::Result; -use futures::stream::{BoxStream, Stream}; -use futures::future::BoxFuture; +use futures::stream::{BoxStream, Stream, StreamExt}; +use futures::future::{BoxFuture, FutureExt}; use hyper::body::Bytes; use aero_dav::types as dav; @@ -10,6 +10,7 @@ use aero_collections::davdag::Etag; use super::controller::ArcUser; pub(crate) type Content<'a> = BoxStream<'a, std::result::Result>; +pub(crate) type PropertyStream<'a> = BoxStream<'a, std::result::Result, dav::PropertyRequest>>; pub(crate) enum PutPolicy { CreateOnly, @@ -31,13 +32,14 @@ pub(crate) trait DavNode: Send { /// Get the supported WebDAV properties fn supported_properties(&self, user: &ArcUser) -> dav::PropName; /// Get the values for the given properties - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static>; + //fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; /// Put an element (create or update) fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result>; /// Content type of the element fn content_type(&self) -> &str; /// Get content - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>>; + fn content(&self) -> Content<'static>; //@FIXME maybe add etag, maybe add a way to set content @@ -62,39 +64,51 @@ pub(crate) trait DavNode: Send { } /// Utility function to get a prop response from a node & a list of propname - fn response_props(&self, user: &ArcUser, props: dav::PropName) -> dav::Response { - let mut prop_desc = vec![]; - let (found, not_found): (Vec<_>, Vec<_>) = self.properties(user, props).into_iter().partition(|v| matches!(v, dav::AnyProperty::Value(_))); + fn response_props(&self, user: &ArcUser, props: dav::PropName) -> BoxFuture<'static, dav::Response> { + //@FIXME we should make the DAV parsed object a stream... + let mut result_stream = self.properties(user, props); + let path = self.path(user); - // If at least one property has been found on this object, adding a HTTP 200 propstat to - // the response - if !found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), - prop: dav::AnyProp(found), - error: None, - responsedescription: None, - }); - } + async move { + let mut prop_desc = vec![]; + let (mut found, mut not_found) = (vec![], vec![]); + while let Some(maybe_prop) = result_stream.next().await { + match maybe_prop { + Ok(v) => found.push(dav::AnyProperty::Value(v)), + Err(v) => not_found.push(dav::AnyProperty::Request(v)), + } + } - // If at least one property can't be found on this object, adding a HTTP 404 propstat to - // the response - if !not_found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::NOT_FOUND), - prop: dav::AnyProp(not_found), - error: None, - responsedescription: None, - }) - } + // If at least one property has been found on this object, adding a HTTP 200 propstat to + // the response + if !found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp(found), + error: None, + responsedescription: None, + }); + } - // Build the finale response - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(self.path(user)), prop_desc), - error: None, - location: None, - responsedescription: None - } + // If at least one property can't be found on this object, adding a HTTP 404 propstat to + // the response + if !not_found.is_empty() { + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::NOT_FOUND), + prop: dav::AnyProp(not_found), + error: None, + responsedescription: None, + }) + } + + // Build the finale response + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(path), prop_desc), + error: None, + location: None, + responsedescription: None + } + }.boxed() } } diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index f13fb0c..7477ba9 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -13,6 +13,7 @@ use aero_dav::acltypes as acl; use aero_dav::realization::{All, self as all}; use crate::dav::node::{DavNode, PutPolicy, Content}; +use super::node::PropertyStream; #[derive(Clone)] pub(crate) struct RootNode {} @@ -48,27 +49,30 @@ impl DavNode for RootNode { dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), ]) } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName("DAV Root".to_string())), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(user))))))), - v => dav::AnyProperty::Request(v), - }).collect() + + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + let user = user.clone(); + futures::stream::iter(prop.0).map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => dav::Property::DisplayName("DAV Root".to_string()), + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + ]), + dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), + dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => + dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(&user)))))), + v => return Err(v), + }; + Ok(prop) + }).boxed() } fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { - async { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() - }.boxed() + fn content(&self) -> Content<'static> { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() } fn content_type(&self) -> &str { @@ -116,32 +120,35 @@ impl DavNode for HomeNode { dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), ]) } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} home", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), - ])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href( - //@FIXME we are hardcoding the calendar path, instead we would want to use - //objects - format!("/{}/calendar/", user.username) - ))))), - v => dav::AnyProperty::Request(v), - }).collect() + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + let user = user.clone(); + + futures::stream::iter(prop.0).map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} home", user.username)), + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), + ]), + dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => + dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href( + //@FIXME we are hardcoding the calendar path, instead we would want to use + //objects + format!("/{}/calendar/", user.username) + )))), + v => return Err(v), + }; + Ok(prop) + }).boxed() } fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { - async { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() - }.boxed() + fn content(&self) -> Content<'static> { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() } @@ -209,23 +216,26 @@ impl DavNode for CalendarListNode { dav::PropertyRequest::GetContentType, ]) } - fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendars", user.username))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![dav::ResourceType::Collection])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - v => dav::AnyProperty::Request(v), - }).collect() + fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + let user = user.clone(); + + futures::stream::iter(prop.0).map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} calendars", user.username)), + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![dav::ResourceType::Collection]), + dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), + v => return Err(v), + }; + Ok(prop) + }).boxed() } fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { - async { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() - }.boxed() + fn content(&self) -> Content<'static> { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() } fn content_type(&self) -> &str { @@ -300,32 +310,35 @@ impl DavNode for CalendarNode { dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), ]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} calendar", self.calname))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), - ])), - //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - //@FIXME seems wrong but seems to be what Thunderbird expects... - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) - => dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ - cal::CompSupport(cal::Component::VEvent), - ])))), - v => dav::AnyProperty::Request(v), - }).collect() + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + let calname = self.calname.to_string(); + + futures::stream::iter(prop.0).map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} calendar", calname)), + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), + ]), + //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //@FIXME seems wrong but seems to be what Thunderbird expects... + dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) + => dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ + cal::CompSupport(cal::Component::VEvent), + ]))), + v => return Err(v), + }; + Ok(prop) + }).boxed() } fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { todo!() } - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { - async { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() - }.boxed() + fn content<'a>(&'a self) -> Content<'static> { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() } fn content_type(&self) -> &str { @@ -333,37 +346,6 @@ impl DavNode for CalendarNode { } } -const FAKE_ICS: &str = r#"BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Example Corp.//CalDAV Client//EN -BEGIN:VTIMEZONE -LAST-MODIFIED:20040110T032845Z -TZID:US/Eastern -BEGIN:DAYLIGHT -DTSTART:20000404T020000 -RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 -TZNAME:EDT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -END:DAYLIGHT -BEGIN:STANDARD -DTSTART:20001026T020000 -RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 -TZNAME:EST -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTAMP:20240406T001102Z -DTSTART;TZID=US/Eastern:20240406T100000 -DURATION:PT1H -SUMMARY:Event #1 -Description:Go Steelers! -UID:74855313FA803DA593CD579A@example.com -END:VEVENT -END:VCALENDAR"#; - #[derive(Clone)] pub(crate) struct EventNode { col: Arc, @@ -403,19 +385,31 @@ impl DavNode for EventNode { dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), ]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - prop.0.into_iter().map(|n| match n { - dav::PropertyRequest::DisplayName => dav::AnyProperty::Value(dav::Property::DisplayName(format!("{} event", self.filename))), - dav::PropertyRequest::ResourceType => dav::AnyProperty::Value(dav::Property::ResourceType(vec![])), - dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("text/calendar".into())), - dav::PropertyRequest::GetEtag => dav::AnyProperty::Value(dav::Property::GetEtag("\"abcdefg\"".into())), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(_req))) => - dav::AnyProperty::Value(dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { - mime: None, - payload: FAKE_ICS.into() - })))), - v => dav::AnyProperty::Request(v), - }).collect() + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + let this = self.clone(); + + futures::stream::iter(prop.0).then(move |n| { + let this = this.clone(); + + async move { + let prop = match &n { + dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} event", this.filename)), + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![]), + dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), + dav::PropertyRequest::GetEtag => dav::Property::GetEtag("\"abcdefg\"".into()), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(_req))) => { + let ics = String::from_utf8(this.col.get(this.blob_id).await.or(Err(n.clone()))?).or(Err(n.clone()))?; + + dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { + mime: None, + payload: ics, + }))) + }, + _ => return Err(n), + }; + Ok(prop) + } + }).boxed() } fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { @@ -437,17 +431,16 @@ impl DavNode for EventNode { }.boxed() } - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { - async { - //@FIXME for now, our storage interface does not allow streaming, - // so we load everything in memory - let content = self.col.get(self.blob_id).await.or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); - let r = async { - Ok(hyper::body::Bytes::from(content?)) - }; - //tokio::pin!(r); - futures::stream::once(Box::pin(r)).boxed() - }.boxed() + fn content<'a>(&'a self) -> Content<'static> { + //@FIXME for now, our storage interface does not allow streaming, + // so we load everything in memory + let calendar = self.col.clone(); + let blob_id = self.blob_id.clone(); + let r = async move { + let content = calendar.get(blob_id).await.or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); + Ok(hyper::body::Bytes::from(content?)) + }; + futures::stream::once(Box::pin(r)).boxed() } fn content_type(&self) -> &str { @@ -483,8 +476,8 @@ impl DavNode for CreateEventNode { dav::PropName(vec![]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> Vec> { - vec![] + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + futures::stream::iter(vec![]).boxed() } fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { @@ -500,10 +493,8 @@ impl DavNode for CreateEventNode { }.boxed() } - fn content<'a>(&'a self) -> BoxFuture<'a, Content<'static>> { - async { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() - }.boxed() + fn content(&self) -> Content<'static> { + futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() } fn content_type(&self) -> &str { -- cgit v1.2.3 From 5d85fd16f2625b6efb7ed70254a275237dfab1eb Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 23 Apr 2024 18:19:07 +0200 Subject: basic thunderbird event is working! --- aero-proto/src/dav/node.rs | 2 +- aero-proto/src/dav/resource.rs | 32 ++++++++++++++++---------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 00dabce..4d5dd1a 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -35,7 +35,7 @@ pub(crate) trait DavNode: Send { fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static>; //fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; /// Put an element (create or update) - fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result>; + fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result>; /// Content type of the element fn content_type(&self) -> &str; /// Get content diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 7477ba9..cb63b71 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -67,8 +67,8 @@ impl DavNode for RootNode { }).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { - todo!() + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } fn content(&self) -> Content<'static> { @@ -143,8 +143,8 @@ impl DavNode for HomeNode { }).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { - todo!() + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } fn content(&self) -> Content<'static> { @@ -230,8 +230,8 @@ impl DavNode for CalendarListNode { }).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { - todo!() + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } fn content(&self) -> Content<'static> { @@ -333,8 +333,8 @@ impl DavNode for CalendarNode { }).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { - todo!() + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } fn content<'a>(&'a self) -> Content<'static> { @@ -412,12 +412,12 @@ impl DavNode for EventNode { }).boxed() } - fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { + fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { async { - let existing_etag = self.etag().await?; + let existing_etag = self.etag().await.or(Err(std::io::Error::new(std::io::ErrorKind::Other, "Etag error")))?; match policy { - PutPolicy::CreateOnly => bail!("Already existing"), - PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => bail!("Would overwrite something we don't know"), + PutPolicy::CreateOnly => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), + PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), _ => () }; @@ -425,8 +425,8 @@ impl DavNode for EventNode { // so we load everything in memory let mut evt = Vec::new(); let mut reader = stream.into_async_read(); - reader.read_to_end(&mut evt).await.unwrap(); - let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await?; + reader.read_to_end(&mut evt).await.or(Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)))?; + let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; Ok(entry.2) }.boxed() } @@ -480,7 +480,7 @@ impl DavNode for CreateEventNode { futures::stream::iter(vec![]).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, Result> { + fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { //@NOTE: policy might not be needed here: whatever we put, there is no known entries here async { @@ -488,7 +488,7 @@ impl DavNode for CreateEventNode { let mut evt = Vec::new(); let mut reader = stream.into_async_read(); reader.read_to_end(&mut evt).await.unwrap(); - let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await?; + let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; Ok(entry.2) }.boxed() } -- cgit v1.2.3 From 52d767edae38cc0d3effd216152ff2dcf6d19239 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 24 Apr 2024 11:43:57 +0200 Subject: Parse If-{None-}Match headers --- aero-proto/src/dav/codec.rs | 25 ++++++++++++++++++++++++- aero-proto/src/dav/controller.rs | 3 +-- aero-proto/src/dav/node.rs | 3 ++- aero-proto/src/dav/resource.rs | 6 +++--- 4 files changed, 30 insertions(+), 7 deletions(-) diff --git a/aero-proto/src/dav/codec.rs b/aero-proto/src/dav/codec.rs index 9082d0a..57c3808 100644 --- a/aero-proto/src/dav/codec.rs +++ b/aero-proto/src/dav/codec.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{bail, Result}; use hyper::{Request, Response, body::Bytes}; use hyper::body::Incoming; use http_body_util::Full; @@ -17,6 +17,7 @@ use http_body_util::BodyExt; use aero_dav::types as dav; use aero_dav::xml as dxml; use super::controller::HttpResponse; +use super::node::PutPolicy; pub(crate) fn depth(req: &Request) -> dav::Depth { match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { @@ -27,6 +28,28 @@ pub(crate) fn depth(req: &Request) -> dav::Depth { } } +pub(crate) fn put_policy(req: &Request) -> Result { + if let Some(maybe_txt_etag) = req.headers().get("If-Match").map(hyper::header::HeaderValue::to_str) { + let etag = maybe_txt_etag?; + let dquote_count = etag.chars().filter(|c| *c == '"').count(); + if dquote_count != 2 { + bail!("Either If-Match value is invalid or it's not supported (only single etag is supported)"); + } + + return Ok(PutPolicy::ReplaceEtag(etag.into())) + } + + if let Some(maybe_txt_etag) = req.headers().get("If-None-Match").map(hyper::header::HeaderValue::to_str) { + let etag = maybe_txt_etag?; + if etag == "*" { + return Ok(PutPolicy::CreateOnly) + } + bail!("Either If-None-Match value is invalid or it's not supported (only asterisk is supported)") + } + + Ok(PutPolicy::OverwriteAll) +} + pub(crate) fn text_body(txt: &'static str) -> UnsyncBoxBody { UnsyncBoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) } diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 2dcc7bc..aee86fa 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -180,8 +180,7 @@ impl Controller { } async fn put(self) -> Result { - //@FIXME temporary, look at If-None-Match & If-Match headers - let put_policy = PutPolicy::CreateOnly; + let put_policy = codec::put_policy(&self.req)?; let stream_of_frames = BodyStream::new(self.req.into_body()); let stream_of_bytes = stream_of_frames diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 4d5dd1a..1ed4b0a 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use futures::stream::{BoxStream, Stream, StreamExt}; +use futures::stream::{BoxStream, StreamExt}; use futures::future::{BoxFuture, FutureExt}; use hyper::body::Bytes; @@ -13,6 +13,7 @@ pub(crate) type Content<'a> = BoxStream<'a, std::result::Result = BoxStream<'a, std::result::Result, dav::PropertyRequest>>; pub(crate) enum PutPolicy { + OverwriteAll, CreateOnly, ReplaceEtag(String), } diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index cb63b71..9e2ce3d 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -1,12 +1,12 @@ use std::sync::Arc; type ArcUser = std::sync::Arc; -use anyhow::{anyhow, bail, Result}; -use futures::stream::{TryStream, TryStreamExt, StreamExt}; +use anyhow::{anyhow, Result}; +use futures::stream::{TryStreamExt, StreamExt}; use futures::io::AsyncReadExt; use futures::{future::BoxFuture, future::FutureExt}; -use aero_collections::{user::User, calendar::Calendar, davdag::{BlobId, IndexEntry, Etag}}; +use aero_collections::{user::User, calendar::Calendar, davdag::{BlobId, Etag}}; use aero_dav::types as dav; use aero_dav::caltypes as cal; use aero_dav::acltypes as acl; -- cgit v1.2.3 From e1d7cf88afd9baab67d53823e95cb1b7f240802f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 24 Apr 2024 17:35:00 +0200 Subject: Working ICS GET/PUT/DELETE --- aero-collections/src/calendar/mod.rs | 2 +- aero-collections/src/davdag.rs | 10 +++++ aero-proto/src/dav/controller.rs | 23 +++++++--- aero-proto/src/dav/node.rs | 4 ++ aero-proto/src/dav/resource.rs | 83 ++++++++++++++++++++++++++++++++---- 5 files changed, 105 insertions(+), 17 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index feae73e..028cf87 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -155,7 +155,7 @@ impl CalendarInternal { async fn delete(&mut self, blob_id: BlobId) -> Result { let davstate = self.davdag.state(); - if davstate.table.contains_key(&blob_id) { + if !davstate.table.contains_key(&blob_id) { bail!("Cannot delete event that doesn't exist"); } diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs index 3aaebb8..7335bdc 100644 --- a/aero-collections/src/davdag.rs +++ b/aero-collections/src/davdag.rs @@ -202,6 +202,16 @@ impl DavDag { } } +impl std::fmt::Debug for DavDag { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("DavDag\n")?; + for elem in self.table.iter() { + f.write_fmt(format_args!("\t{:?} => {:?}", elem.0, elem.1))?; + } + Ok(()) + } +} + impl BayouState for DavDag { type Op = DavDagOp; diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index aee86fa..f3b5496 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -71,9 +71,7 @@ impl Controller { }, "GET" => ctrl.get().await, "PUT" => ctrl.put().await, - "DELETE" => { - todo!(); - }, + "DELETE" => ctrl.delete().await, "PROPFIND" => ctrl.propfind().await, "REPORT" => ctrl.report().await, _ => Ok(Response::builder() @@ -206,14 +204,25 @@ impl Controller { let stream_body = StreamBody::new(self.node.content().map_ok(|v| Frame::data(v))); let boxed_body = UnsyncBoxBody::new(stream_body); - let response = Response::builder() - .status(200) - .header("content-type", self.node.content_type()) - .body(boxed_body)?; + let mut builder = Response::builder().status(200); + builder = builder.header("content-type", self.node.content_type()); + if let Some(etag) = self.node.etag().await { + builder = builder.header("etag", etag); + } + let response = builder.body(boxed_body)?; Ok(response) } + async fn delete(self) -> Result { + self.node.delete().await?; + let response = Response::builder() + .status(204) + //.header("content-type", "application/xml; charset=\"utf-8\"") + .body(text_body(""))?; + Ok(response) + } + // --- Common utility functions --- /// Build a multistatus response from a list of DavNodes async fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 1ed4b0a..d246280 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -39,8 +39,12 @@ pub(crate) trait DavNode: Send { fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result>; /// Content type of the element fn content_type(&self) -> &str; + /// Get ETag + fn etag(&self) -> BoxFuture>; /// Get content fn content(&self) -> Content<'static>; + /// Delete + fn delete(&self) -> BoxFuture>; //@FIXME maybe add etag, maybe add a way to set content diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 9e2ce3d..944c6c8 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -78,6 +78,14 @@ impl DavNode for RootNode { fn content_type(&self) -> &str { "text/plain" } + + fn etag(&self) -> BoxFuture> { + async { None }.boxed() + } + + fn delete(&self) -> BoxFuture> { + async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() + } } #[derive(Clone)] @@ -151,10 +159,17 @@ impl DavNode for HomeNode { futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() } - fn content_type(&self) -> &str { "text/plain" } + + fn etag(&self) -> BoxFuture> { + async { None }.boxed() + } + + fn delete(&self) -> BoxFuture> { + async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() + } } #[derive(Clone)] @@ -241,6 +256,14 @@ impl DavNode for CalendarListNode { fn content_type(&self) -> &str { "text/plain" } + + fn etag(&self) -> BoxFuture> { + async { None }.boxed() + } + + fn delete(&self) -> BoxFuture> { + async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() + } } #[derive(Clone)] @@ -333,7 +356,7 @@ impl DavNode for CalendarNode { }).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + fn put<'a>(&'a self, _policy: PutPolicy, _stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } @@ -344,6 +367,14 @@ impl DavNode for CalendarNode { fn content_type(&self) -> &str { "text/plain" } + + fn etag(&self) -> BoxFuture> { + async { None }.boxed() + } + + fn delete(&self) -> BoxFuture> { + async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() + } } #[derive(Clone)] @@ -353,11 +384,6 @@ pub(crate) struct EventNode { filename: String, blob_id: BlobId, } -impl EventNode { - async fn etag(&self) -> Result { - self.col.dag().await.table.get(&self.blob_id).map(|(_, _, etag)| etag.to_string()).ok_or(anyhow!("Missing blob id in index")) - } -} impl DavNode for EventNode { fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { @@ -396,7 +422,10 @@ impl DavNode for EventNode { dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} event", this.filename)), dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![]), dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), - dav::PropertyRequest::GetEtag => dav::Property::GetEtag("\"abcdefg\"".into()), + dav::PropertyRequest::GetEtag => { + let etag = this.etag().await.ok_or(n.clone())?; + dav::Property::GetEtag(etag) + }, dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(_req))) => { let ics = String::from_utf8(this.col.get(this.blob_id).await.or(Err(n.clone()))?).or(Err(n.clone()))?; @@ -414,7 +443,7 @@ impl DavNode for EventNode { fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { async { - let existing_etag = self.etag().await.or(Err(std::io::Error::new(std::io::ErrorKind::Other, "Etag error")))?; + let existing_etag = self.etag().await.ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Etag error"))?; match policy { PutPolicy::CreateOnly => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), @@ -427,6 +456,7 @@ impl DavNode for EventNode { let mut reader = stream.into_async_read(); reader.read_to_end(&mut evt).await.or(Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)))?; let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; + self.col.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; Ok(entry.2) }.boxed() } @@ -446,6 +476,31 @@ impl DavNode for EventNode { fn content_type(&self) -> &str { "text/calendar" } + + fn etag(&self) -> BoxFuture> { + let calendar = self.col.clone(); + + async move { + calendar.dag().await.table.get(&self.blob_id).map(|(_, _, etag)| etag.to_string()) + }.boxed() + } + + fn delete(&self) -> BoxFuture> { + let calendar = self.col.clone(); + let blob_id = self.blob_id.clone(); + + async move { + let _token = match calendar.delete(blob_id).await { + Ok(v) => v, + Err(e) => { + tracing::error!(err=?e, "delete event node"); + return Err(std::io::Error::from(std::io::ErrorKind::Interrupted)) + }, + }; + calendar.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; + Ok(()) + }.boxed() + } } #[derive(Clone)] @@ -489,6 +544,7 @@ impl DavNode for CreateEventNode { let mut reader = stream.into_async_read(); reader.read_to_end(&mut evt).await.unwrap(); let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; + self.col.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; Ok(entry.2) }.boxed() } @@ -500,4 +556,13 @@ impl DavNode for CreateEventNode { fn content_type(&self) -> &str { "text/plain" } + + fn etag(&self) -> BoxFuture> { + async { None }.boxed() + } + + fn delete(&self) -> BoxFuture> { + // Nothing to delete + async { Ok(()) }.boxed() + } } -- cgit v1.2.3 From 6b9542088cd1b66af46e95b787493b601accb495 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 30 Apr 2024 13:02:59 +0200 Subject: Add icalendar dependency --- Cargo.lock | 28 ++++++++++++++++++++++++++++ Cargo.toml | 3 +++ aero-collections/Cargo.toml | 1 + aero-proto/src/dav/controller.rs | 4 +++- 4 files changed, 35 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index c6228af..484d96e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -51,6 +51,7 @@ dependencies = [ "eml-codec", "futures", "hex", + "icalendar", "im", "lazy_static", "rand", @@ -1662,8 +1663,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", ] [[package]] @@ -1972,6 +1975,18 @@ dependencies = [ "cc", ] +[[package]] +name = "icalendar" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd83e81e8a329918d84e49032f8e596f4f079380942d172724cea3599a80807e" +dependencies = [ + "chrono", + "iso8601", + "nom 7.1.3", + "uuid", +] + [[package]] name = "idna" version = "0.2.3" @@ -2087,6 +2102,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "iso8601" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924e5d73ea28f59011fec52a0d12185d496a9b075d360657aed2a5707f701153" +dependencies = [ + "nom 7.1.3", +] + [[package]] name = "itoa" version = "1.0.10" @@ -3636,6 +3660,10 @@ name = "uuid" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +dependencies = [ + "getrandom", + "wasm-bindgen", +] [[package]] name = "valuable" diff --git a/Cargo.toml b/Cargo.toml index d4bc543..68b1eae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,6 +57,9 @@ smtp-server = { git = "http://github.com/Alexis211/kannader", branch = "feature/ imap-codec = { version = "2.0.0", features = ["bounded-static", "ext_condstore_qresync"] } imap-flow = { git = "https://github.com/duesee/imap-flow.git", branch = "main" } +# dav protocols +icalendar = "0.16" + # http & web http = "1.1" http-body-util = "0.1.1" diff --git a/aero-collections/Cargo.toml b/aero-collections/Cargo.toml index 90d285e..95ab142 100644 --- a/aero-collections/Cargo.toml +++ b/aero-collections/Cargo.toml @@ -22,3 +22,4 @@ rand.workspace = true im.workspace = true sodiumoxide.workspace = true eml-codec.workspace = true +icalendar.workspace = true diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index f3b5496..885828f 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -104,9 +104,11 @@ impl Controller { // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary // list of URLs + // @FIXME let multiget = match report { cal::Report::Multiget(m) => m, - _ => return Ok(Response::builder() + cal::Report::Query(q) => todo!(), + cal::Report::FreeBusy(_) => return Ok(Response::builder() .status(501) .body(text_body("Not implemented"))?), }; -- cgit v1.2.3 From 32dfd25f570b7a55bf43752684d286be0f6b2dc2 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 16 May 2024 17:38:34 +0200 Subject: format + WIP calendar-query --- Cargo.lock | 1 + aero-collections/src/calendar/mod.rs | 17 +- aero-collections/src/calendar/namespace.rs | 43 +- aero-collections/src/davdag.rs | 39 +- aero-collections/src/lib.rs | 6 +- aero-collections/src/mail/incoming.rs | 4 +- aero-collections/src/mail/mailbox.rs | 6 +- aero-collections/src/mail/mod.rs | 2 +- aero-collections/src/mail/namespace.rs | 6 +- aero-collections/src/mail/snapshot.rs | 2 +- aero-collections/src/mail/uidindex.rs | 2 +- aero-collections/src/user.rs | 7 +- aero-dav/fuzz/fuzz_targets/dav.rs | 157 +++--- aero-dav/src/acldecoder.rs | 44 +- aero-dav/src/aclencoder.rs | 12 +- aero-dav/src/acltypes.rs | 2 - aero-dav/src/caldecoder.rs | 696 ++++++++++++++++++--------- aero-dav/src/calencoder.rs | 431 ++++++++++------- aero-dav/src/caltypes.rs | 53 +- aero-dav/src/decoder.rs | 674 ++++++++++++++++---------- aero-dav/src/encoder.rs | 435 ++++++++++------- aero-dav/src/error.rs | 4 +- aero-dav/src/lib.rs | 12 +- aero-dav/src/realization.rs | 38 +- aero-dav/src/types.rs | 29 +- aero-dav/src/xml.rs | 124 +++-- aero-proto/Cargo.toml | 1 + aero-proto/src/dav/codec.rs | 71 ++- aero-proto/src/dav/controller.rs | 200 +++++--- aero-proto/src/dav/middleware.rs | 28 +- aero-proto/src/dav/mod.rs | 61 ++- aero-proto/src/dav/node.rs | 68 ++- aero-proto/src/dav/resource.rs | 565 +++++++++++++++------- aero-proto/src/imap/command/anonymous.rs | 2 +- aero-proto/src/imap/command/authenticated.rs | 4 +- aero-proto/src/imap/mod.rs | 6 +- aero-proto/src/lmtp.rs | 6 +- aero-proto/src/sasl.rs | 2 +- aero-sasl/src/flow.rs | 18 +- aero-sasl/src/lib.rs | 6 +- aero-sasl/src/types.rs | 2 - aero-user/src/config.rs | 1 - aero-user/src/login/ldap_provider.rs | 2 +- aero-user/src/storage/in_memory.rs | 2 +- aerogramme/src/main.rs | 2 +- aerogramme/src/server.rs | 8 +- 46 files changed, 2454 insertions(+), 1447 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 484d96e..c6602ab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -90,6 +90,7 @@ dependencies = [ "http-body-util", "hyper 1.2.0", "hyper-util", + "icalendar", "imap-codec", "imap-flow", "quick-xml", diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 028cf87..cd05328 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -4,12 +4,12 @@ use anyhow::{anyhow, bail, Result}; use tokio::sync::RwLock; use aero_bayou::Bayou; -use aero_user::login::Credentials; use aero_user::cryptoblob::{self, gen_key, Key}; +use aero_user::login::Credentials; use aero_user::storage::{self, BlobRef, BlobVal, Store}; +use crate::davdag::{BlobId, DavDag, IndexEntry, SyncChange, Token}; use crate::unique_ident::*; -use crate::davdag::{DavDag, IndexEntry, Token, BlobId, SyncChange}; pub struct Calendar { pub(super) id: UniqueIdent, @@ -17,10 +17,7 @@ pub struct Calendar { } impl Calendar { - pub(crate) async fn open( - creds: &Credentials, - id: UniqueIdent, - ) -> Result { + pub(crate) async fn open(creds: &Credentials, id: UniqueIdent) -> Result { let bayou_path = format!("calendar/dag/{}", id); let cal_path = format!("calendar/events/{}", id); @@ -126,7 +123,7 @@ impl CalendarInternal { async fn put<'a>(&mut self, name: &str, evt: &'a [u8]) -> Result<(Token, IndexEntry)> { let message_key = gen_key(); let blob_id = gen_ident(); - + let encrypted_msg_key = cryptoblob::seal(&message_key.as_ref(), &self.encryption_key)?; let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_msg_key); @@ -138,9 +135,7 @@ impl CalendarInternal { ) .with_meta(MESSAGE_KEY.to_string(), key_header); - let etag = self.storage - .blob_insert(blob_val) - .await?; + let etag = self.storage.blob_insert(blob_val).await?; // Add entry to Bayou let entry: IndexEntry = (blob_id, name.to_string(), etag); @@ -181,7 +176,7 @@ impl CalendarInternal { let heads = davstate.heads_vec(); let token = match heads.as_slice() { - [ token ] => *token, + [token] => *token, _ => { let op_mg = davstate.op_merge(); let token = op_mg.token(); diff --git a/aero-collections/src/calendar/namespace.rs b/aero-collections/src/calendar/namespace.rs index 9c21d19..db65703 100644 --- a/aero-collections/src/calendar/namespace.rs +++ b/aero-collections/src/calendar/namespace.rs @@ -1,16 +1,16 @@ use anyhow::{bail, Result}; -use std::collections::{HashMap, BTreeMap}; -use std::sync::{Weak, Arc}; +use std::collections::{BTreeMap, HashMap}; +use std::sync::{Arc, Weak}; use serde::{Deserialize, Serialize}; use aero_bayou::timestamp::now_msec; -use aero_user::storage; use aero_user::cryptoblob::{open_deserialize, seal_serialize}; +use aero_user::storage; +use super::Calendar; use crate::unique_ident::{gen_ident, UniqueIdent}; use crate::user::User; -use super::Calendar; pub(crate) const CAL_LIST_PK: &str = "calendars"; pub(crate) const CAL_LIST_SK: &str = "list"; @@ -46,7 +46,7 @@ impl CalendarNs { } let cal = Arc::new(Calendar::open(&user.creds, id).await?); - + let mut cache = self.0.lock().unwrap(); if let Some(concurrent_cal) = cache.get(&id).and_then(Weak::upgrade) { drop(cal); // we worked for nothing but at least we didn't starve someone else @@ -117,13 +117,15 @@ impl CalendarNs { CalendarExists::Created(_) => (), } list.save(user, ct).await?; - + Ok(()) } /// Has calendar pub async fn has(&self, user: &Arc, name: &str) -> Result { - CalendarList::load(user).await.map(|(list, _)| list.has(name)) + CalendarList::load(user) + .await + .map(|(list, _)| list.has(name)) } } @@ -161,7 +163,8 @@ impl CalendarList { for v in row_vals { if let storage::Alternative::Value(vbytes) = v { - let list2 = open_deserialize::(&vbytes, &user.creds.keys.master)?; + let list2 = + open_deserialize::(&vbytes, &user.creds.keys.master)?; list.merge(list2); } } @@ -200,7 +203,7 @@ impl CalendarList { /// (Don't forget to save if it returns CalendarExists::Created) fn create(&mut self, name: &str) -> CalendarExists { if let Some(CalendarListEntry { - id_lww: (_, Some(id)) + id_lww: (_, Some(id)), }) = self.0.get(name) { return CalendarExists::Existed(*id); @@ -222,9 +225,10 @@ impl CalendarList { /// For a given calendar name, get its Unique Identifier fn get(&self, name: &str) -> Option { - self.0.get(name).map(|CalendarListEntry { - id_lww: (_, ident), - }| *ident).flatten() + self.0 + .get(name) + .map(|CalendarListEntry { id_lww: (_, ident) }| *ident) + .flatten() } /// Check if a given calendar name exists @@ -271,9 +275,7 @@ impl CalendarList { (now_msec(), id) } } - Some(CalendarListEntry { - id_lww, - }) => { + Some(CalendarListEntry { id_lww }) => { if id_lww.1 == id { // Entry is already equals to the requested id (Option, // ------------ Below this line, data is ephemeral, ie. not checkpointed - /// Partial synchronization graph pub ancestors: OrdMap>, @@ -84,7 +83,7 @@ impl DavDag { // HELPER functions pub fn heads_vec(&self) -> Vec { - self.heads.clone().into_iter().collect() + self.heads.clone().into_iter().collect() } /// A sync descriptor @@ -99,7 +98,7 @@ impl DavDag { // We can't capture all missing events if we are not connected // to all sinks of the graph, // ie. if we don't already know all the sinks, - // ie. if we are missing so much history that + // ie. if we are missing so much history that // the event log has been transformed into a checkpoint if !self.origins.is_subset(already_known.clone()) { bail!("Not enough history to produce a correct diff, a full resync is needed"); @@ -124,7 +123,7 @@ impl DavDag { if all_known.insert(cursor).is_some() { // Item already processed - continue + continue; } // Collect parents @@ -167,7 +166,8 @@ impl DavDag { self.idx_by_filename.remove(filename); // Record the change in the ephemeral synchronization map - self.change.insert(sync_token, SyncChange::NotFound(filename.to_string())); + self.change + .insert(sync_token, SyncChange::NotFound(filename.to_string())); // Finally clear item from the source of trust self.table.remove(blob_id); @@ -179,10 +179,13 @@ impl DavDag { // --- Update ANCESTORS // We register ancestors as it is required for the sync algorithm - self.ancestors.insert(*child, parents.iter().fold(ordset![], |mut acc, p| { - acc.insert(*p); - acc - })); + self.ancestors.insert( + *child, + parents.iter().fold(ordset![], |mut acc, p| { + acc.insert(*p); + acc + }), + ); // --- Update ORIGINS // If this event has no parents, it's an origin @@ -192,11 +195,13 @@ impl DavDag { // --- Update HEADS // Remove from HEADS this event's parents - parents.iter().for_each(|par| { self.heads.remove(par); }); + parents.iter().for_each(|par| { + self.heads.remove(par); + }); // This event becomes a new HEAD in turn self.heads.insert(*child); - + // --- Update ALL NODES self.all_nodes.insert(*child); } @@ -217,16 +222,16 @@ impl BayouState for DavDag { fn apply(&self, op: &Self::Op) -> Self { let mut new = self.clone(); - + match op { DavDagOp::Put(sync_desc, entry) => { new.sync_dag(sync_desc); new.register(Some(sync_desc.1), entry.clone()); - }, + } DavDagOp::Delete(sync_desc, blob_id) => { new.sync_dag(sync_desc); new.unregister(sync_desc.1, blob_id); - }, + } DavDagOp::Merge(sync_desc) => { new.sync_dag(sync_desc); } @@ -252,7 +257,9 @@ impl<'de> Deserialize<'de> for DavDag { let mut davdag = DavDag::default(); // Build the table + index - val.items.into_iter().for_each(|entry| davdag.register(None, entry)); + val.items + .into_iter() + .for_each(|entry| davdag.register(None, entry)); // Initialize the synchronization DAG with its roots val.heads.into_iter().for_each(|ident| { diff --git a/aero-collections/src/lib.rs b/aero-collections/src/lib.rs index ef8b8d8..eabf61c 100644 --- a/aero-collections/src/lib.rs +++ b/aero-collections/src/lib.rs @@ -1,5 +1,5 @@ -pub mod unique_ident; +pub mod calendar; pub mod davdag; -pub mod user; pub mod mail; -pub mod calendar; +pub mod unique_ident; +pub mod user; diff --git a/aero-collections/src/mail/incoming.rs b/aero-collections/src/mail/incoming.rs index cd2f8fd..55c2515 100644 --- a/aero-collections/src/mail/incoming.rs +++ b/aero-collections/src/mail/incoming.rs @@ -8,16 +8,16 @@ use futures::{future::BoxFuture, FutureExt}; use tokio::sync::watch; use tracing::{debug, error, info, warn}; +use aero_bayou::timestamp::now_msec; use aero_user::cryptoblob; use aero_user::login::{Credentials, PublicCredentials}; use aero_user::storage; -use aero_bayou::timestamp::now_msec; use crate::mail::mailbox::Mailbox; use crate::mail::uidindex::ImapUidvalidity; +use crate::mail::IMF; use crate::unique_ident::*; use crate::user::User; -use crate::mail::IMF; const INCOMING_PK: &str = "incoming"; const INCOMING_LOCK_SK: &str = "lock"; diff --git a/aero-collections/src/mail/mailbox.rs b/aero-collections/src/mail/mailbox.rs index fcdb21e..bec9669 100644 --- a/aero-collections/src/mail/mailbox.rs +++ b/aero-collections/src/mail/mailbox.rs @@ -2,15 +2,15 @@ use anyhow::{anyhow, bail, Result}; use serde::{Deserialize, Serialize}; use tokio::sync::RwLock; +use aero_bayou::timestamp::now_msec; +use aero_bayou::Bayou; use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; use aero_user::login::Credentials; use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; -use aero_bayou::Bayou; -use aero_bayou::timestamp::now_msec; -use crate::unique_ident::*; use crate::mail::uidindex::*; use crate::mail::IMF; +use crate::unique_ident::*; pub struct Mailbox { pub(super) id: UniqueIdent, diff --git a/aero-collections/src/mail/mod.rs b/aero-collections/src/mail/mod.rs index ca9b08b..584a9eb 100644 --- a/aero-collections/src/mail/mod.rs +++ b/aero-collections/src/mail/mod.rs @@ -1,9 +1,9 @@ pub mod incoming; pub mod mailbox; +pub mod namespace; pub mod query; pub mod snapshot; pub mod uidindex; -pub mod namespace; // Internet Message Format // aka RFC 822 - RFC 2822 - RFC 5322 diff --git a/aero-collections/src/mail/namespace.rs b/aero-collections/src/mail/namespace.rs index b1f6a70..0f1db7d 100644 --- a/aero-collections/src/mail/namespace.rs +++ b/aero-collections/src/mail/namespace.rs @@ -104,7 +104,11 @@ impl MailboxList { /// Ensures mailbox `name` maps to id `id`. /// If it already mapped to that, returns None. /// If a change had to be done, returns Some(new uidvalidity in mailbox). - pub(crate) fn set_mailbox(&mut self, name: &str, id: Option) -> Option { + pub(crate) fn set_mailbox( + &mut self, + name: &str, + id: Option, + ) -> Option { let (ts, id, uidvalidity) = match self.0.get_mut(name) { None => { if id.is_none() { diff --git a/aero-collections/src/mail/snapshot.rs b/aero-collections/src/mail/snapshot.rs index 9503d4d..6f8a8a8 100644 --- a/aero-collections/src/mail/snapshot.rs +++ b/aero-collections/src/mail/snapshot.rs @@ -2,10 +2,10 @@ use std::sync::Arc; use anyhow::Result; -use crate::unique_ident::UniqueIdent; use super::mailbox::Mailbox; use super::query::{Query, QueryScope}; use super::uidindex::UidIndex; +use crate::unique_ident::UniqueIdent; /// A Frozen Mailbox has a snapshot of the current mailbox /// state that is desynchronized with the real mailbox state. diff --git a/aero-collections/src/mail/uidindex.rs b/aero-collections/src/mail/uidindex.rs index ca975a3..6df3206 100644 --- a/aero-collections/src/mail/uidindex.rs +++ b/aero-collections/src/mail/uidindex.rs @@ -3,8 +3,8 @@ use std::num::{NonZeroU32, NonZeroU64}; use im::{HashMap, OrdMap, OrdSet}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use aero_bayou::*; use crate::unique_ident::UniqueIdent; +use aero_bayou::*; pub type ModSeq = NonZeroU64; pub type ImapUid = NonZeroU32; diff --git a/aero-collections/src/user.rs b/aero-collections/src/user.rs index 9ed342f..f125c46 100644 --- a/aero-collections/src/user.rs +++ b/aero-collections/src/user.rs @@ -9,12 +9,15 @@ use aero_user::cryptoblob::{open_deserialize, seal_serialize}; use aero_user::login::Credentials; use aero_user::storage; +use crate::calendar::namespace::CalendarNs; use crate::mail::incoming::incoming_mail_watch_process; use crate::mail::mailbox::Mailbox; +use crate::mail::namespace::{ + CreatedMailbox, MailboxList, ARCHIVE, DRAFTS, INBOX, MAILBOX_HIERARCHY_DELIMITER, + MAILBOX_LIST_PK, MAILBOX_LIST_SK, SENT, TRASH, +}; use crate::mail::uidindex::ImapUidvalidity; use crate::unique_ident::UniqueIdent; -use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox}; -use crate::calendar::namespace::CalendarNs; //@FIXME User should be totally rewriten // to extract the local mailbox list diff --git a/aero-dav/fuzz/fuzz_targets/dav.rs b/aero-dav/fuzz/fuzz_targets/dav.rs index 5bd28bc..a303401 100644 --- a/aero-dav/fuzz/fuzz_targets/dav.rs +++ b/aero-dav/fuzz/fuzz_targets/dav.rs @@ -1,79 +1,79 @@ #![no_main] -use libfuzzer_sys::fuzz_target; use libfuzzer_sys::arbitrary; use libfuzzer_sys::arbitrary::Arbitrary; +use libfuzzer_sys::fuzz_target; -use aero_dav::{types, realization, xml}; +use aero_dav::{realization, types, xml}; use quick_xml::reader::NsReader; -use tokio::runtime::Runtime; use tokio::io::AsyncWriteExt; +use tokio::runtime::Runtime; // Split this file const tokens: [&str; 63] = [ -"0", -"1", -"activelock", -"allprop", -"encoding", -"utf-8", -"http://ns.example.com/boxschema/", -"HTTP/1.1 200 OK", -"1997-12-01T18:27:21-08:00", -"Mon, 12 Jan 1998 09:25:56 GMT", -"\"abcdef\"", -"cannot-modify-protected-property", -"collection", -"creationdate", -"DAV:", -"D", -"C", -"xmlns:D", -"depth", -"displayname", -"error", -"exclusive", -"getcontentlanguage", -"getcontentlength", -"getcontenttype", -"getetag", -"getlastmodified", -"href", -"include", -"Infinite", -"infinity", -"location", -"lockdiscovery", -"lockentry", -"lockinfo", -"lockroot", -"lockscope", -"locktoken", -"lock-token-matches-request-uri", -"lock-token-submitted", -"locktype", -"multistatus", -"no-conflicting-lock", -"no-external-entities", -"owner", -"preserved-live-properties", -"prop", -"propertyupdate", -"propfind", -"propfind-finite-depth", -"propname", -"propstat", -"remove", -"resourcetype", -"response", -"responsedescription", -"set", -"shared", -"status", -"supportedlock", -"text/html", -"timeout", -"write", + "0", + "1", + "activelock", + "allprop", + "encoding", + "utf-8", + "http://ns.example.com/boxschema/", + "HTTP/1.1 200 OK", + "1997-12-01T18:27:21-08:00", + "Mon, 12 Jan 1998 09:25:56 GMT", + "\"abcdef\"", + "cannot-modify-protected-property", + "collection", + "creationdate", + "DAV:", + "D", + "C", + "xmlns:D", + "depth", + "displayname", + "error", + "exclusive", + "getcontentlanguage", + "getcontentlength", + "getcontenttype", + "getetag", + "getlastmodified", + "href", + "include", + "Infinite", + "infinity", + "location", + "lockdiscovery", + "lockentry", + "lockinfo", + "lockroot", + "lockscope", + "locktoken", + "lock-token-matches-request-uri", + "lock-token-submitted", + "locktype", + "multistatus", + "no-conflicting-lock", + "no-external-entities", + "owner", + "preserved-live-properties", + "prop", + "propertyupdate", + "propfind", + "propfind-finite-depth", + "propname", + "propstat", + "remove", + "resourcetype", + "response", + "responsedescription", + "set", + "shared", + "status", + "supportedlock", + "text/html", + "timeout", + "write", ]; #[derive(Arbitrary)] @@ -106,7 +106,7 @@ impl Tag { acc.push_str("D:"); acc.push_str(self.name.serialize().as_str()); - if let Some((k,v)) = &self.attr { + if let Some((k, v)) = &self.attr { acc.push_str(" "); acc.push_str(k.serialize().as_str()); acc.push_str("=\""); @@ -123,7 +123,6 @@ impl Tag { } } - #[derive(Arbitrary)] enum XmlNode { //@FIXME: build RFC3339 and RFC822 Dates with chrono based on timestamps @@ -145,9 +144,14 @@ impl XmlNode { let stag = tag.start(); match children.is_empty() { true => format!("<{}/>", stag), - false => format!("<{}>{}", stag, children.iter().map(|v| v.serialize()).collect::(), tag.end()), + false => format!( + "<{}>{}", + stag, + children.iter().map(|v| v.serialize()).collect::(), + tag.end() + ), } - }, + } Self::Number(v) => format!("{}", v), Self::Text(v) => v.serialize(), } @@ -158,19 +162,22 @@ async fn serialize(elem: &impl xml::QWrite) -> Vec { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let ns_to_apply = vec![("xmlns:D".into(), "DAV:".into())]; let mut writer = xml::Writer { q, ns_to_apply }; elem.qwrite(&mut writer).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); - return buffer + return buffer; } type Object = types::Multistatus>; fuzz_target!(|nodes: XmlNode| { - let gen = format!("{}", nodes.serialize()); + let gen = format!( + "{}", + nodes.serialize() + ); //println!("--------\n{}", gen); let data = gen.as_bytes(); @@ -191,7 +198,9 @@ fuzz_target!(|nodes: XmlNode| { let my_serialization = serialize(&reference).await; // 3. De-serialize my serialization - let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())).await.expect("XML Reader init"); + let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())) + .await + .expect("XML Reader init"); let comparison = rdr2.find::().await.expect("Deserialize again"); // 4. Both the first decoding and last decoding must be identical diff --git a/aero-dav/src/acldecoder.rs b/aero-dav/src/acldecoder.rs index 67dfb0b..405286e 100644 --- a/aero-dav/src/acldecoder.rs +++ b/aero-dav/src/acldecoder.rs @@ -1,23 +1,31 @@ use super::acltypes::*; -use super::types as dav; -use super::xml::{QRead, Reader, IRead, DAV_URN}; use super::error::ParsingError; +use super::types as dav; +use super::xml::{IRead, QRead, Reader, DAV_URN}; impl QRead for Property { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open_start(DAV_URN, "owner").await?.is_some() { let href = xml.find().await?; xml.close().await?; - return Ok(Self::Owner(href)) + return Ok(Self::Owner(href)); } - if xml.maybe_open_start(DAV_URN, "current-user-principal").await?.is_some() { + if xml + .maybe_open_start(DAV_URN, "current-user-principal") + .await? + .is_some() + { let user = xml.find().await?; xml.close().await?; - return Ok(Self::CurrentUserPrincipal(user)) + return Ok(Self::CurrentUserPrincipal(user)); } - if xml.maybe_open_start(DAV_URN, "current-user-privilege-set").await?.is_some() { + if xml + .maybe_open_start(DAV_URN, "current-user-privilege-set") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::CurrentUserPrivilegeSet(vec![])) + return Ok(Self::CurrentUserPrivilegeSet(vec![])); } Err(ParsingError::Recoverable) @@ -28,17 +36,25 @@ impl QRead for PropertyRequest { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open(DAV_URN, "owner").await?.is_some() { xml.close().await?; - return Ok(Self::Owner) + return Ok(Self::Owner); } - if xml.maybe_open(DAV_URN, "current-user-principal").await?.is_some() { + if xml + .maybe_open(DAV_URN, "current-user-principal") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::CurrentUserPrincipal) + return Ok(Self::CurrentUserPrincipal); } - if xml.maybe_open(DAV_URN, "current-user-privilege-set").await?.is_some() { + if xml + .maybe_open(DAV_URN, "current-user-privilege-set") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::CurrentUserPrivilegeSet) + return Ok(Self::CurrentUserPrivilegeSet); } Err(ParsingError::Recoverable) @@ -49,7 +65,7 @@ impl QRead for ResourceType { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open(DAV_URN, "principal").await?.is_some() { xml.close().await?; - return Ok(Self::Principal) + return Ok(Self::Principal); } Err(ParsingError::Recoverable) } @@ -60,7 +76,7 @@ impl QRead for User { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open(DAV_URN, "unauthenticated").await?.is_some() { xml.close().await?; - return Ok(Self::Unauthenticated) + return Ok(Self::Unauthenticated); } dav::Href::qread(xml).await.map(Self::Authenticated) diff --git a/aero-dav/src/aclencoder.rs b/aero-dav/src/aclencoder.rs index 2fa4707..28c01a7 100644 --- a/aero-dav/src/aclencoder.rs +++ b/aero-dav/src/aclencoder.rs @@ -1,9 +1,9 @@ -use quick_xml::Error as QError; use quick_xml::events::Event; +use quick_xml::Error as QError; use super::acltypes::*; -use super::xml::{QWrite, Writer, IWrite}; use super::error::ParsingError; +use super::xml::{IWrite, QWrite, Writer}; impl QWrite for Property { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { @@ -14,18 +14,18 @@ impl QWrite for Property { xml.q.write_event_async(Event::Start(start.clone())).await?; href.qwrite(xml).await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::CurrentUserPrincipal(user) => { let start = xml.create_dav_element("current-user-principal"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; user.qwrite(xml).await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::CurrentUserPrivilegeSet(_) => { let empty_tag = xml.create_dav_element("current-user-privilege-set"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } } } } @@ -64,7 +64,7 @@ impl QWrite for User { Self::Unauthenticated => { let tag = xml.create_dav_element("unauthenticated"); xml.q.write_event_async(Event::Empty(tag)).await - }, + } Self::Authenticated(href) => href.qwrite(xml).await, } } diff --git a/aero-dav/src/acltypes.rs b/aero-dav/src/acltypes.rs index d5be413..0af3c8a 100644 --- a/aero-dav/src/acltypes.rs +++ b/aero-dav/src/acltypes.rs @@ -2,14 +2,12 @@ use super::types as dav; //RFC covered: RFC3744 (ACL core) + RFC5397 (ACL Current Principal Extension) - //@FIXME required for a full CalDAV implementation // See section 6. of the CalDAV RFC // It seems mainly required for free-busy that I will not implement now. // It can also be used for discovering main calendar, not sure it is used. // Note: it is used by Thunderbird - #[derive(Debug, PartialEq, Clone)] pub enum PropertyRequest { Owner, diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 008668e..16c9c6c 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -1,10 +1,10 @@ -use quick_xml::events::Event; use chrono::NaiveDateTime; +use quick_xml::events::Event; -use super::types as dav; use super::caltypes::*; -use super::xml::{QRead, IRead, Reader, DAV_URN, CAL_URN}; use super::error::ParsingError; +use super::types as dav; +use super::xml::{IRead, QRead, Reader, CAL_URN, DAV_URN}; // ---- ROOT ELEMENTS --- impl QRead> for MkCalendar { @@ -29,7 +29,7 @@ impl QRead> for Report { async fn qread(xml: &mut Reader) -> Result { match CalendarQuery::::qread(xml).await { Err(ParsingError::Recoverable) => (), - otherwise => return otherwise.map(Self::Query) + otherwise => return otherwise.map(Self::Query), } match CalendarMultiget::::qread(xml).await { @@ -61,7 +61,11 @@ impl QRead> for CalendarQuery { xml.close().await?; match filter { - Some(filter) => Ok(CalendarQuery { selector, filter, timezone }), + Some(filter) => Ok(CalendarQuery { + selector, + filter, + timezone, + }), _ => Err(ParsingError::MissingChild), } } @@ -100,39 +104,70 @@ impl QRead for FreeBusyQuery { } } - // ---- EXTENSIONS --- impl QRead for Violation { async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(DAV_URN, "resource-must-be-null").await?.is_some() { + if xml + .maybe_open(DAV_URN, "resource-must-be-null") + .await? + .is_some() + { xml.close().await?; Ok(Self::ResourceMustBeNull) } else if xml.maybe_open(DAV_URN, "need-privileges").await?.is_some() { xml.close().await?; Ok(Self::NeedPrivileges) - } else if xml.maybe_open(CAL_URN, "calendar-collection-location-ok").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "calendar-collection-location-ok") + .await? + .is_some() + { xml.close().await?; Ok(Self::CalendarCollectionLocationOk) - } else if xml.maybe_open(CAL_URN, "valid-calendar-data").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "valid-calendar-data") + .await? + .is_some() + { xml.close().await?; Ok(Self::ValidCalendarData) - } else if xml.maybe_open(CAL_URN, "initialize-calendar-collection").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "initialize-calendar-collection") + .await? + .is_some() + { xml.close().await?; Ok(Self::InitializeCalendarCollection) - } else if xml.maybe_open(CAL_URN, "supported-calendar-data").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "supported-calendar-data") + .await? + .is_some() + { xml.close().await?; Ok(Self::SupportedCalendarData) - } else if xml.maybe_open(CAL_URN, "valid-calendar-object-resource").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "valid-calendar-object-resource") + .await? + .is_some() + { xml.close().await?; Ok(Self::ValidCalendarObjectResource) - } else if xml.maybe_open(CAL_URN, "supported-calendar-component").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "supported-calendar-component") + .await? + .is_some() + { xml.close().await?; Ok(Self::SupportedCalendarComponent) } else if xml.maybe_open(CAL_URN, "no-uid-conflict").await?.is_some() { let href = xml.find().await?; xml.close().await?; Ok(Self::NoUidConflict(href)) - } else if xml.maybe_open(CAL_URN, "max-resource-size").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "max-resource-size") + .await? + .is_some() + { xml.close().await?; Ok(Self::MaxResourceSize) } else if xml.maybe_open(CAL_URN, "min-date-time").await?.is_some() { @@ -144,7 +179,11 @@ impl QRead for Violation { } else if xml.maybe_open(CAL_URN, "max-instances").await?.is_some() { xml.close().await?; Ok(Self::MaxInstances) - } else if xml.maybe_open(CAL_URN, "max-attendees-per-instance").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "max-attendees-per-instance") + .await? + .is_some() + { xml.close().await?; Ok(Self::MaxAttendeesPerInstance) } else if xml.maybe_open(CAL_URN, "valid-filter").await?.is_some() { @@ -167,7 +206,11 @@ impl QRead for Violation { } xml.close().await?; Ok(Self::SupportedFilter { comp, prop, param }) - } else if xml.maybe_open(CAL_URN, "number-of-matches-within-limits").await?.is_some() { + } else if xml + .maybe_open(CAL_URN, "number-of-matches-within-limits") + .await? + .is_some() + { xml.close().await?; Ok(Self::NumberOfMatchesWithinLimits) } else { @@ -178,72 +221,112 @@ impl QRead for Violation { impl QRead for Property { async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open_start(CAL_URN, "calendar-home-set").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "calendar-home-set") + .await? + .is_some() + { let href = xml.find().await?; xml.close().await?; - return Ok(Property::CalendarHomeSet(href)) + return Ok(Property::CalendarHomeSet(href)); } - if xml.maybe_open_start(CAL_URN, "calendar-description").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "calendar-description") + .await? + .is_some() + { let lang = xml.prev_attr("xml:lang"); let text = xml.tag_string().await?; xml.close().await?; - return Ok(Property::CalendarDescription { lang, text }) + return Ok(Property::CalendarDescription { lang, text }); } - if xml.maybe_open_start(CAL_URN, "calendar-timezone").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "calendar-timezone") + .await? + .is_some() + { let tz = xml.tag_string().await?; xml.close().await?; - return Ok(Property::CalendarTimezone(tz)) + return Ok(Property::CalendarTimezone(tz)); } - if xml.maybe_open_start(CAL_URN, "supported-calendar-component-set").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "supported-calendar-component-set") + .await? + .is_some() + { let comp = xml.collect().await?; xml.close().await?; - return Ok(Property::SupportedCalendarComponentSet(comp)) + return Ok(Property::SupportedCalendarComponentSet(comp)); } - if xml.maybe_open_start(CAL_URN, "supported-calendar-data").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "supported-calendar-data") + .await? + .is_some() + { let mime = xml.collect().await?; xml.close().await?; - return Ok(Property::SupportedCalendarData(mime)) + return Ok(Property::SupportedCalendarData(mime)); } - if xml.maybe_open_start(CAL_URN, "max-resource-size").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "max-resource-size") + .await? + .is_some() + { let sz = xml.tag_string().await?.parse::()?; xml.close().await?; - return Ok(Property::MaxResourceSize(sz)) + return Ok(Property::MaxResourceSize(sz)); } - if xml.maybe_open_start(CAL_URN, "max-date-time").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "max-date-time") + .await? + .is_some() + { let dtstr = xml.tag_string().await?; let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), ICAL_DATETIME_FMT)?.and_utc(); xml.close().await?; - return Ok(Property::MaxDateTime(dt)) + return Ok(Property::MaxDateTime(dt)); } - if xml.maybe_open_start(CAL_URN, "max-instances").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "max-instances") + .await? + .is_some() + { let sz = xml.tag_string().await?.parse::()?; xml.close().await?; - return Ok(Property::MaxInstances(sz)) + return Ok(Property::MaxInstances(sz)); } - if xml.maybe_open_start(CAL_URN, "max-attendees-per-instance").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "max-attendees-per-instance") + .await? + .is_some() + { let sz = xml.tag_string().await?.parse::()?; xml.close().await?; - return Ok(Property::MaxAttendeesPerInstance(sz)) + return Ok(Property::MaxAttendeesPerInstance(sz)); } - if xml.maybe_open_start(CAL_URN, "supported-collation-set").await?.is_some() { + if xml + .maybe_open_start(CAL_URN, "supported-collation-set") + .await? + .is_some() + { let cols = xml.collect().await?; xml.close().await?; - return Ok(Property::SupportedCollationSet(cols)) + return Ok(Property::SupportedCollationSet(cols)); } let mut dirty = false; let mut caldata: Option = None; xml.maybe_read(&mut caldata, &mut dirty).await?; if let Some(cal) = caldata { - return Ok(Property::CalendarData(cal)) + return Ok(Property::CalendarData(cal)); } Err(ParsingError::Recoverable) @@ -252,54 +335,88 @@ impl QRead for Property { impl QRead for PropertyRequest { async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(CAL_URN, "calendar-home-set").await?.is_some() { + if xml + .maybe_open(CAL_URN, "calendar-home-set") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::CalendarHomeSet) - } - if xml.maybe_open(CAL_URN, "calendar-description").await?.is_some() { + return Ok(Self::CalendarHomeSet); + } + if xml + .maybe_open(CAL_URN, "calendar-description") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::CalendarDescription) - } - if xml.maybe_open(CAL_URN, "calendar-timezone").await?.is_some() { + return Ok(Self::CalendarDescription); + } + if xml + .maybe_open(CAL_URN, "calendar-timezone") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::CalendarTimezone) + return Ok(Self::CalendarTimezone); } - if xml.maybe_open(CAL_URN, "supported-calendar-component-set").await?.is_some() { + if xml + .maybe_open(CAL_URN, "supported-calendar-component-set") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::SupportedCalendarComponentSet) + return Ok(Self::SupportedCalendarComponentSet); } - if xml.maybe_open(CAL_URN, "supported-calendar-data").await?.is_some() { + if xml + .maybe_open(CAL_URN, "supported-calendar-data") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::SupportedCalendarData) + return Ok(Self::SupportedCalendarData); } - if xml.maybe_open(CAL_URN, "max-resource-size").await?.is_some() { + if xml + .maybe_open(CAL_URN, "max-resource-size") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::MaxResourceSize) + return Ok(Self::MaxResourceSize); } if xml.maybe_open(CAL_URN, "min-date-time").await?.is_some() { xml.close().await?; - return Ok(Self::MinDateTime) + return Ok(Self::MinDateTime); } if xml.maybe_open(CAL_URN, "max-date-time").await?.is_some() { xml.close().await?; - return Ok(Self::MaxDateTime) + return Ok(Self::MaxDateTime); } if xml.maybe_open(CAL_URN, "max-instances").await?.is_some() { xml.close().await?; - return Ok(Self::MaxInstances) + return Ok(Self::MaxInstances); } - if xml.maybe_open(CAL_URN, "max-attendees-per-instance").await?.is_some() { + if xml + .maybe_open(CAL_URN, "max-attendees-per-instance") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::MaxAttendeesPerInstance) + return Ok(Self::MaxAttendeesPerInstance); } - if xml.maybe_open(CAL_URN, "supported-collation-set").await?.is_some() { + if xml + .maybe_open(CAL_URN, "supported-collation-set") + .await? + .is_some() + { xml.close().await?; - return Ok(Self::SupportedCollationSet) + return Ok(Self::SupportedCollationSet); } let mut dirty = false; let mut m_cdr = None; xml.maybe_read(&mut m_cdr, &mut dirty).await?; - m_cdr.ok_or(ParsingError::Recoverable).map(Self::CalendarData) + m_cdr + .ok_or(ParsingError::Recoverable) + .map(Self::CalendarData) } } @@ -307,7 +424,7 @@ impl QRead for ResourceType { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open(CAL_URN, "calendar").await?.is_some() { xml.close().await?; - return Ok(Self::Calendar) + return Ok(Self::Calendar); } Err(ParsingError::Recoverable) } @@ -338,7 +455,10 @@ impl QRead for CalendarDataSupport { let ct = xml.prev_attr("content-type"); let vs = xml.prev_attr("version"); match (ct, vs) { - (Some(content_type), Some(version)) => Ok(Self { content_type, version }), + (Some(content_type), Some(version)) => Ok(Self { + content_type, + version, + }), _ => Err(ParsingError::Recoverable), } } @@ -351,10 +471,14 @@ impl QRead for CalendarDataRequest { let (mut comp, mut recurrence, mut limit_freebusy_set) = (None, None, None); if !xml.parent_has_child() { - return Ok(Self { mime, comp, recurrence, limit_freebusy_set }) + return Ok(Self { + mime, + comp, + recurrence, + limit_freebusy_set, + }); } - loop { let mut dirty = false; xml.maybe_read(&mut comp, &mut dirty).await?; @@ -367,11 +491,15 @@ impl QRead for CalendarDataRequest { _ => xml.skip().await?, }; } - } xml.close().await?; - Ok(Self { mime, comp, recurrence, limit_freebusy_set }) + Ok(Self { + mime, + comp, + recurrence, + limit_freebusy_set, + }) } } @@ -389,17 +517,25 @@ impl QRead for Comp { let (mut prop_kind, mut comp_kind) = (None, None); let bs = xml.open(CAL_URN, "comp").await?; - let name = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let name = Component::new( + xml.prev_attr("name") + .ok_or(ParsingError::MissingAttribute)?, + ); // Return early if it's an empty tag if matches!(bs, Event::Empty(_)) { xml.close().await?; - return Ok(Self { name, prop_kind, comp_kind }) + return Ok(Self { + name, + prop_kind, + comp_kind, + }); } loop { - let mut dirty = false; - let (mut tmp_prop_kind, mut tmp_comp_kind): (Option, Option) = (None, None); + let mut dirty = false; + let (mut tmp_prop_kind, mut tmp_comp_kind): (Option, Option) = + (None, None); xml.maybe_read(&mut tmp_prop_kind, &mut dirty).await?; Box::pin(xml.maybe_read(&mut tmp_comp_kind, &mut dirty)).await?; @@ -408,35 +544,41 @@ impl QRead for Comp { // Merge match (tmp_prop_kind, &mut prop_kind) { (Some(PropKind::Prop(mut a)), Some(PropKind::Prop(ref mut b))) => b.append(&mut a), - (Some(PropKind::AllProp), v) => *v = Some(PropKind::AllProp), + (Some(PropKind::AllProp), v) => *v = Some(PropKind::AllProp), (Some(x), b) => *b = Some(x), (None, _) => (), }; match (tmp_comp_kind, &mut comp_kind) { (Some(CompKind::Comp(mut a)), Some(CompKind::Comp(ref mut b))) => b.append(&mut a), - (Some(CompKind::AllComp), v) => *v = Some(CompKind::AllComp), + (Some(CompKind::AllComp), v) => *v = Some(CompKind::AllComp), (Some(a), b) => *b = Some(a), (None, _) => (), }; - if !dirty { match xml.peek() { Event::End(_) => break, _ => xml.skip().await?, }; } - }; + } xml.close().await?; - Ok(Self { name, prop_kind, comp_kind }) + Ok(Self { + name, + prop_kind, + comp_kind, + }) } } impl QRead for CompSupport { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "comp").await?; - let inner = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let inner = Component::new( + xml.prev_attr("name") + .ok_or(ParsingError::MissingAttribute)?, + ); xml.close().await?; Ok(Self(inner)) } @@ -450,18 +592,18 @@ impl QRead for CompKind { if xml.maybe_open(CAL_URN, "allcomp").await?.is_some() { xml.close().await?; - return Ok(CompKind::AllComp) + return Ok(CompKind::AllComp); } xml.maybe_push(&mut comp, &mut dirty).await?; if !dirty { - break + break; } } match &comp[..] { - [] => Err(ParsingError::Recoverable), - _ => Ok(CompKind::Comp(comp)), + [] => Err(ParsingError::Recoverable), + _ => Ok(CompKind::Comp(comp)), } } } @@ -474,13 +616,13 @@ impl QRead for PropKind { if xml.maybe_open(CAL_URN, "allprop").await?.is_some() { xml.close().await?; - return Ok(PropKind::AllProp) + return Ok(PropKind::AllProp); } xml.maybe_push(&mut prop, &mut dirty).await?; if !dirty { - break + break; } } @@ -497,7 +639,9 @@ impl QRead for RecurrenceModifier { Err(ParsingError::Recoverable) => (), otherwise => return otherwise.map(RecurrenceModifier::Expand), } - LimitRecurrenceSet::qread(xml).await.map(RecurrenceModifier::LimitRecurrenceSet) + LimitRecurrenceSet::qread(xml) + .await + .map(RecurrenceModifier::LimitRecurrenceSet) } } @@ -508,11 +652,11 @@ impl QRead for Expand { (Some(start), Some(end)) => (start, end), _ => return Err(ParsingError::MissingAttribute), }; - + let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); if start > end { - return Err(ParsingError::InvalidValue) + return Err(ParsingError::InvalidValue); } xml.close().await?; @@ -527,11 +671,11 @@ impl QRead for LimitRecurrenceSet { (Some(start), Some(end)) => (start, end), _ => return Err(ParsingError::MissingAttribute), }; - + let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); if start > end { - return Err(ParsingError::InvalidValue) + return Err(ParsingError::InvalidValue); } xml.close().await?; @@ -546,11 +690,11 @@ impl QRead for LimitFreebusySet { (Some(start), Some(end)) => (start, end), _ => return Err(ParsingError::MissingAttribute), }; - + let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); if start > end { - return Err(ParsingError::InvalidValue) + return Err(ParsingError::InvalidValue); } xml.close().await?; @@ -563,20 +707,21 @@ impl QRead> for CalendarSelector { // allprop if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { xml.close().await?; - return Ok(Self::AllProp) + return Ok(Self::AllProp); } // propname if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { xml.close().await?; - return Ok(Self::PropName) + return Ok(Self::PropName); } // prop let (mut maybe_prop, mut dirty) = (None, false); - xml.maybe_read::>(&mut maybe_prop, &mut dirty).await?; + xml.maybe_read::>(&mut maybe_prop, &mut dirty) + .await?; if let Some(prop) = maybe_prop { - return Ok(Self::Prop(prop)) + return Ok(Self::Prop(prop)); } Err(ParsingError::Recoverable) @@ -586,10 +731,16 @@ impl QRead> for CalendarSelector { impl QRead for CompFilter { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "comp-filter").await?; - let name = Component::new(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let name = Component::new( + xml.prev_attr("name") + .ok_or(ParsingError::MissingAttribute)?, + ); let additional_rules = Box::pin(xml.maybe_find()).await?; xml.close().await?; - Ok(Self { name, additional_rules }) + Ok(Self { + name, + additional_rules, + }) } } @@ -604,7 +755,7 @@ impl QRead for CompFilterRules { if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { xml.close().await?; - return Ok(Self::IsNotDefined) + return Ok(Self::IsNotDefined); } xml.maybe_read(&mut time_range, &mut dirty).await?; @@ -621,7 +772,11 @@ impl QRead for CompFilterRules { match (&time_range, &prop_filter[..], &comp_filter[..]) { (None, [], []) => Err(ParsingError::Recoverable), - _ => Ok(Self::Matches(CompFilterMatch { time_range, prop_filter, comp_filter })), + _ => Ok(Self::Matches(CompFilterMatch { + time_range, + prop_filter, + comp_filter, + })), } } } @@ -635,10 +790,16 @@ impl QRead for CompFilterMatch { impl QRead for PropFilter { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "prop-filter").await?; - let name = ComponentProperty(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let name = ComponentProperty( + xml.prev_attr("name") + .ok_or(ParsingError::MissingAttribute)?, + ); let additional_rules = xml.maybe_find().await?; xml.close().await?; - Ok(Self { name, additional_rules }) + Ok(Self { + name, + additional_rules, + }) } } @@ -653,7 +814,7 @@ impl QRead for PropFilterRules { if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { xml.close().await?; - return Ok(Self::IsNotDefined) + return Ok(Self::IsNotDefined); } xml.maybe_read(&mut time_range, &mut dirty).await?; @@ -670,7 +831,11 @@ impl QRead for PropFilterRules { match (&time_range, &time_or_text, ¶m_filter[..]) { (None, None, []) => Err(ParsingError::Recoverable), - _ => Ok(PropFilterRules::Match(PropFilterMatch { time_range, time_or_text, param_filter })), + _ => Ok(PropFilterRules::Match(PropFilterMatch { + time_range, + time_or_text, + param_filter, + })), } } } @@ -684,10 +849,16 @@ impl QRead for PropFilterMatch { impl QRead for ParamFilter { async fn qread(xml: &mut Reader) -> Result { xml.open(CAL_URN, "param-filter").await?; - let name = PropertyParameter(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + let name = PropertyParameter( + xml.prev_attr("name") + .ok_or(ParsingError::MissingAttribute)?, + ); let additional_rules = xml.maybe_find().await?; xml.close().await?; - Ok(Self { name, additional_rules }) + Ok(Self { + name, + additional_rules, + }) } } @@ -708,7 +879,11 @@ impl QRead for TextMatch { let negate_condition = xml.prev_attr("negate-condition").map(|v| v == "yes"); let text = xml.tag_string().await?; xml.close().await?; - Ok(Self { collation, negate_condition, text }) + Ok(Self { + collation, + negate_condition, + text, + }) } } @@ -716,7 +891,7 @@ impl QRead for ParamFilterMatch { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open(CAL_URN, "is-not-defined").await?.is_some() { xml.close().await?; - return Ok(Self::IsNotDefined) + return Ok(Self::IsNotDefined); } TextMatch::qread(xml).await.map(Self::Match) } @@ -745,11 +920,15 @@ impl QRead for TimeRange { xml.open(CAL_URN, "time-range").await?; let start = match xml.prev_attr("start") { - Some(r) => Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()), + Some(r) => { + Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()) + } _ => None, }; let end = match xml.prev_attr("end") { - Some(r) => Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()), + Some(r) => { + Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()) + } _ => None, }; @@ -758,10 +937,10 @@ impl QRead for TimeRange { match (start, end) { (Some(start), Some(end)) => { if start > end { - return Err(ParsingError::InvalidValue) + return Err(ParsingError::InvalidValue); } Ok(TimeRange::FullRange(start, end)) - }, + } (Some(start), None) => Ok(TimeRange::OnlyStart(start)), (None, Some(end)) => Ok(TimeRange::OnlyEnd(end)), (None, None) => Err(ParsingError::MissingAttribute), @@ -771,8 +950,11 @@ impl QRead for TimeRange { impl QRead for CalProp { async fn qread(xml: &mut Reader) -> Result { - xml.open(CAL_URN, "prop").await?; - let name = ComponentProperty(xml.prev_attr("name").ok_or(ParsingError::MissingAttribute)?); + xml.open(CAL_URN, "prop").await?; + let name = ComponentProperty( + xml.prev_attr("name") + .ok_or(ParsingError::MissingAttribute)?, + ); let novalue = xml.prev_attr("novalue").map(|v| v == "yes"); xml.close().await?; Ok(Self { name, novalue }) @@ -782,21 +964,23 @@ impl QRead for CalProp { #[cfg(test)] mod tests { use super::*; - use chrono::{Utc, TimeZone}; use crate::realization::Calendar; use crate::xml::Node; + use chrono::{TimeZone, Utc}; //use quick_reader::NsReader; async fn deserialize>(src: &str) -> T { - let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())).await.unwrap(); + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); rdr.find().await.unwrap() } #[tokio::test] async fn basic_mkcalendar() { - let expected = MkCalendar(dav::Set(dav::PropValue(vec![ - dav::Property::DisplayName("Lisa's Events".into()), - ]))); + let expected = MkCalendar(dav::Set(dav::PropValue(vec![dav::Property::DisplayName( + "Lisa's Events".into(), + )]))); let src = r#" @@ -856,61 +1040,89 @@ END:VCALENDAR]]> let expected = CalendarQuery { selector: Some(CalendarSelector::Prop(dav::PropName(vec![ dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { - mime: None, - comp: Some(Comp { - name: Component::VCalendar, - prop_kind: Some(PropKind::Prop(vec![ - CalProp { + dav::PropertyRequest::Extension(PropertyRequest::CalendarData( + CalendarDataRequest { + mime: None, + comp: Some(Comp { + name: Component::VCalendar, + prop_kind: Some(PropKind::Prop(vec![CalProp { name: ComponentProperty("VERSION".into()), novalue: None, - } - ])), - comp_kind: Some(CompKind::Comp(vec![ - Comp { - name: Component::VEvent, - prop_kind: Some(PropKind::Prop(vec![ - CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, - CalProp { name: ComponentProperty("UID".into()), novalue: None }, - CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, - CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, - CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, - CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, - ])), - comp_kind: None, - }, - Comp { - name: Component::VTimeZone, - prop_kind: None, - comp_kind: None, - } - ])), - }), - recurrence: None, - limit_freebusy_set: None, - })), + }])), + comp_kind: Some(CompKind::Comp(vec![ + Comp { + name: Component::VEvent, + prop_kind: Some(PropKind::Prop(vec![ + CalProp { + name: ComponentProperty("SUMMARY".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("UID".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("DTSTART".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("DTEND".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("DURATION".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("RRULE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("RDATE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("EXRULE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("EXDATE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("RECURRENCE-ID".into()), + novalue: None, + }, + ])), + comp_kind: None, + }, + Comp { + name: Component::VTimeZone, + prop_kind: None, + comp_kind: None, + }, + ])), + }), + recurrence: None, + limit_freebusy_set: None, + }, + )), ]))), filter: Filter(CompFilter { name: Component::VCalendar, additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { prop_filter: vec![], - comp_filter: vec![ - CompFilter { - name: Component::VEvent, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - prop_filter: vec![], - comp_filter: vec![], - time_range: Some(TimeRange::FullRange( - Utc.with_ymd_and_hms(2006, 1, 4, 0, 0, 0).unwrap(), - Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), - )), - })), - }, - ], + comp_filter: vec![CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + prop_filter: vec![], + comp_filter: vec![], + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006, 1, 4, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + )), + })), + }], time_range: None, })), }), @@ -958,26 +1170,28 @@ END:VCALENDAR]]> } #[tokio::test] - async fn rfc_calendar_query_res() { + async fn rfc_calendar_query_res() { let expected = dav::Multistatus:: { responses: vec![ dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), - vec![ - dav::PropStat { - prop: dav::AnyProp(vec![ - dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd2\"".into())), - dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + vec![dav::PropStat { + prop: dav::AnyProp(vec![ + dav::AnyProperty::Value(dav::Property::GetEtag( + "\"fffff-abcd2\"".into(), + )), + dav::AnyProperty::Value(dav::Property::Extension( + Property::CalendarData(CalendarDataPayload { mime: None, payload: "BEGIN:VCALENDAR".into(), - }))), - ]), - status: dav::Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }, - ], + }), + )), + ]), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], ), error: None, location: None, @@ -986,20 +1200,22 @@ END:VCALENDAR]]> dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), - vec![ - dav::PropStat { - prop: dav::AnyProp(vec![ - dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd3\"".into())), - dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { + vec![dav::PropStat { + prop: dav::AnyProp(vec![ + dav::AnyProperty::Value(dav::Property::GetEtag( + "\"fffff-abcd3\"".into(), + )), + dav::AnyProperty::Value(dav::Property::Extension( + Property::CalendarData(CalendarDataPayload { mime: None, payload: "BEGIN:VCALENDAR".into(), - }))), - ]), - status: dav::Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }, - ], + }), + )), + ]), + status: dav::Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], ), error: None, location: None, @@ -1039,36 +1255,38 @@ END:VCALENDAR]]> } #[tokio::test] - async fn rfc_recurring_evt() { + async fn rfc_recurring_evt() { let expected = CalendarQuery:: { selector: Some(CalendarSelector::Prop(dav::PropName(vec![ - dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest{ - mime: None, - comp: None, - recurrence: Some(RecurrenceModifier::LimitRecurrenceSet(LimitRecurrenceSet ( - Utc.with_ymd_and_hms(2006, 1, 3, 0, 0, 0).unwrap(), - Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), - ))), - limit_freebusy_set: None, - })), + dav::PropertyRequest::Extension(PropertyRequest::CalendarData( + CalendarDataRequest { + mime: None, + comp: None, + recurrence: Some(RecurrenceModifier::LimitRecurrenceSet( + LimitRecurrenceSet( + Utc.with_ymd_and_hms(2006, 1, 3, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + ), + )), + limit_freebusy_set: None, + }, + )), ]))), filter: Filter(CompFilter { name: Component::VCalendar, additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { prop_filter: vec![], - comp_filter: vec![ - CompFilter { - name: Component::VEvent, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - prop_filter: vec![], - comp_filter: vec![], - time_range: Some(TimeRange::FullRange( - Utc.with_ymd_and_hms(2006, 1, 3, 0, 0, 0).unwrap(), - Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), - )), - })), - }, - ], + comp_filter: vec![CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + prop_filter: vec![], + comp_filter: vec![], + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006, 1, 3, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + )), + })), + }], time_range: None, })), }), @@ -1104,32 +1322,34 @@ END:VCALENDAR]]> let expected = CalendarQuery:: { selector: Some(CalendarSelector::Prop(dav::PropName(vec![ dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { - mime: None, - comp: None, - recurrence: None, - limit_freebusy_set: None, - })) + dav::PropertyRequest::Extension(PropertyRequest::CalendarData( + CalendarDataRequest { + mime: None, + comp: None, + recurrence: None, + limit_freebusy_set: None, + }, + )), ]))), filter: Filter(CompFilter { name: Component::VCalendar, additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { time_range: None, prop_filter: vec![], - comp_filter: vec![ - CompFilter { - name: Component::VTodo, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - time_range: None, - comp_filter: vec![], - prop_filter: vec![ - PropFilter { - name: ComponentProperty("COMPLETED".into()), - additional_rules: Some(PropFilterRules::IsNotDefined), - }, - PropFilter { - name: ComponentProperty("STATUS".into()), - additional_rules: Some(PropFilterRules::Match(PropFilterMatch { + comp_filter: vec![CompFilter { + name: Component::VTodo, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: None, + comp_filter: vec![], + prop_filter: vec![ + PropFilter { + name: ComponentProperty("COMPLETED".into()), + additional_rules: Some(PropFilterRules::IsNotDefined), + }, + PropFilter { + name: ComponentProperty("STATUS".into()), + additional_rules: Some(PropFilterRules::Match( + PropFilterMatch { time_range: None, param_filter: vec![], time_or_text: Some(TimeOrText::Text(TextMatch { @@ -1137,12 +1357,12 @@ END:VCALENDAR]]> negate_condition: Some(true), text: "CANCELLED".into(), })), - })), - }, - ], - })), - } - ], + }, + )), + }, + ], + })), + }], })), }), timezone: None, @@ -1169,9 +1389,7 @@ END:VCALENDAR]]> "#; - let got = deserialize::>(src).await; assert_eq!(got, expected) - } } diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index d324c7f..06cafd4 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -1,10 +1,9 @@ +use quick_xml::events::{BytesText, Event}; use quick_xml::Error as QError; -use quick_xml::events::{Event, BytesText}; use super::caltypes::*; -use super::xml::{Node, QWrite, IWrite, Writer}; use super::types::Extension; - +use super::xml::{IWrite, Node, QWrite, Writer}; // ==================== Calendar Types Serialization ========================= @@ -54,7 +53,7 @@ impl QWrite for CalendarQuery { selector.qwrite(xml).await?; } self.filter.qwrite(xml).await?; - if let Some(tz) = &self.timezone { + if let Some(tz) = &self.timezone { tz.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await @@ -106,8 +105,8 @@ impl QWrite for PropertyRequest { Self::MinDateTime => atom("min-date-time").await, Self::MaxDateTime => atom("max-date-time").await, Self::MaxInstances => atom("max-instances").await, - Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, - Self::SupportedCollationSet => atom("supported-collation-set").await, + Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, + Self::SupportedCollationSet => atom("supported-collation-set").await, Self::CalendarData(req) => req.qwrite(xml).await, } } @@ -130,17 +129,21 @@ impl QWrite for Property { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(text))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(text))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::CalendarTimezone(payload) => { let start = xml.create_cal_element("calendar-timezone"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(payload))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(payload))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::SupportedCalendarComponentSet(many_comp) => { let start = xml.create_cal_element("supported-calendar-component-set"); let end = start.to_end(); @@ -150,7 +153,7 @@ impl QWrite for Property { comp.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await - }, + } Self::SupportedCalendarData(many_mime) => { let start = xml.create_cal_element("supported-calendar-data"); let end = start.to_end(); @@ -160,49 +163,59 @@ impl QWrite for Property { mime.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await - }, + } Self::MaxResourceSize(bytes) => { let start = xml.create_cal_element("max-resource-size"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::MinDateTime(dt) => { let start = xml.create_cal_element("min-date-time"); let end = start.to_end(); let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(dtstr.as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::MaxDateTime(dt) => { let start = xml.create_cal_element("max-date-time"); let end = start.to_end(); let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(dtstr.as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::MaxInstances(count) => { let start = xml.create_cal_element("max-instances"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::MaxAttendeesPerInstance(count) => { let start = xml.create_cal_element("max-attendees-per-instance"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::SupportedCollationSet(many_collations) => { let start = xml.create_cal_element("supported-collation-set"); let end = start.to_end(); @@ -211,8 +224,8 @@ impl QWrite for Property { for collation in many_collations.iter() { collation.qwrite(xml).await?; } - xml.q.write_event_async(Event::End(end)).await - }, + xml.q.write_event_async(Event::End(end)).await + } Self::CalendarData(inner) => inner.qwrite(xml).await, } } @@ -225,7 +238,7 @@ impl QWrite for ResourceType { Self::Calendar => { let empty_tag = xml.create_cal_element("calendar"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } } } } @@ -245,7 +258,7 @@ impl QWrite for Violation { Self::NeedPrivileges => { let empty_tag = xml.create_dav_element("need-privileges"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } // Regular CalDAV errors Self::ResourceMustBeNull => atom("resource-must-be-null").await, @@ -262,7 +275,7 @@ impl QWrite for Violation { xml.q.write_event_async(Event::Start(start.clone())).await?; href.qwrite(xml).await?; xml.q.write_event_async(Event::End(end)).await - }, + } Self::MaxResourceSize => atom("max-resource-size").await, Self::MinDateTime => atom("min-date-time").await, Self::MaxDateTime => atom("max-date-time").await, @@ -284,13 +297,12 @@ impl QWrite for Violation { param_item.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await - }, + } Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await, } } } - // ---------------------------- Inner XML ------------------------------------ impl QWrite for SupportedCollation { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { @@ -300,19 +312,20 @@ impl QWrite for SupportedCollation { xml.q.write_event_async(Event::Start(start.clone())).await?; self.0.qwrite(xml).await?; xml.q.write_event_async(Event::End(end)).await - } } impl QWrite for Collation { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let col = match self { - Self::AsciiCaseMap => "i;ascii-casemap", - Self::Octet => "i;octet", - Self::Unknown(v) => v.as_str(), + Self::AsciiCaseMap => "i;ascii-casemap", + Self::Octet => "i;octet", + Self::Unknown(v) => v.as_str(), }; - xml.q.write_event_async(Event::Text(BytesText::new(col))).await + xml.q + .write_event_async(Event::Text(BytesText::new(col))) + .await } } @@ -332,7 +345,9 @@ impl QWrite for CalendarDataPayload { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(self.payload.as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await } } @@ -347,7 +362,7 @@ impl QWrite for CalendarDataRequest { // Empty tag if self.comp.is_none() && self.recurrence.is_none() && self.limit_freebusy_set.is_none() { - return xml.q.write_event_async(Event::Empty(start.clone())).await + return xml.q.write_event_async(Event::Empty(start.clone())).await; } let end = start.to_end(); @@ -392,7 +407,7 @@ impl QWrite for Comp { comp_kind.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await - }, + } } } } @@ -411,7 +426,7 @@ impl QWrite for CompKind { Self::AllComp => { let empty_tag = xml.create_cal_element("allcomp"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::Comp(many_comp) => { for comp in many_comp.iter() { // Required: recursion in an async fn requires boxing @@ -420,7 +435,10 @@ impl QWrite for CompKind { // For more information about this error, try `rustc --explain E0391`. // https://github.com/rust-lang/rust/issues/78649 #[inline(always)] - fn recurse<'a>(comp: &'a Comp, xml: &'a mut Writer) -> futures::future::BoxFuture<'a, Result<(), QError>> { + fn recurse<'a>( + comp: &'a Comp, + xml: &'a mut Writer, + ) -> futures::future::BoxFuture<'a, Result<(), QError>> { Box::pin(comp.qwrite(xml)) } recurse(comp, xml).await?; @@ -437,7 +455,7 @@ impl QWrite for PropKind { Self::AllProp => { let empty_tag = xml.create_cal_element("allprop"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::Prop(many_prop) => { for prop in many_prop.iter() { prop.qwrite(xml).await?; @@ -473,8 +491,14 @@ impl QWrite for RecurrenceModifier { impl QWrite for Expand { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut empty = xml.create_cal_element("expand"); - empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(( + "start", + format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(), + )); + empty.push_attribute(( + "end", + format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(), + )); xml.q.write_event_async(Event::Empty(empty)).await } } @@ -482,8 +506,14 @@ impl QWrite for Expand { impl QWrite for LimitRecurrenceSet { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut empty = xml.create_cal_element("limit-recurrence-set"); - empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(( + "start", + format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(), + )); + empty.push_attribute(( + "end", + format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(), + )); xml.q.write_event_async(Event::Empty(empty)).await } } @@ -491,8 +521,14 @@ impl QWrite for LimitRecurrenceSet { impl QWrite for LimitFreebusySet { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut empty = xml.create_cal_element("limit-freebusy-set"); - empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(( + "start", + format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(), + )); + empty.push_attribute(( + "end", + format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(), + )); xml.q.write_event_async(Event::Empty(empty)).await } } @@ -503,11 +539,11 @@ impl QWrite for CalendarSelector { Self::AllProp => { let empty_tag = xml.create_dav_element("allprop"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::PropName => { let empty_tag = xml.create_dav_element("propname"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::Prop(prop) => prop.qwrite(xml).await, } } @@ -534,10 +570,10 @@ impl QWrite for CompFilter { impl QWrite for CompFilterRules { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::IsNotDefined => { + Self::IsNotDefined => { let empty_tag = xml.create_dav_element("is-not-defined"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::Matches(cfm) => cfm.qwrite(xml).await, } } @@ -559,7 +595,10 @@ impl QWrite for CompFilterMatch { // For more information about this error, try `rustc --explain E0391`. // https://github.com/rust-lang/rust/issues/78649 #[inline(always)] - fn recurse<'a>(comp: &'a CompFilter, xml: &'a mut Writer) -> futures::future::BoxFuture<'a, Result<(), QError>> { + fn recurse<'a>( + comp: &'a CompFilter, + xml: &'a mut Writer, + ) -> futures::future::BoxFuture<'a, Result<(), QError>> { Box::pin(comp.qwrite(xml)) } recurse(comp_item, xml).await?; @@ -591,7 +630,7 @@ impl QWrite for PropFilterRules { Self::IsNotDefined => { let empty_tag = xml.create_dav_element("is-not-defined"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::Match(prop_match) => prop_match.qwrite(xml).await, } } @@ -635,7 +674,9 @@ impl QWrite for TextMatch { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(self.text.as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await } } @@ -663,7 +704,7 @@ impl QWrite for ParamFilterMatch { Self::IsNotDefined => { let empty_tag = xml.create_dav_element("is-not-defined"); xml.q.write_event_async(Event::Empty(empty_tag)).await - }, + } Self::Match(tm) => tm.qwrite(xml).await, } } @@ -675,7 +716,9 @@ impl QWrite for TimeZone { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(self.0.as_str()))) + .await?; xml.q.write_event_async(Event::End(end)).await } } @@ -695,11 +738,20 @@ impl QWrite for TimeRange { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let mut empty = xml.create_cal_element("time-range"); match self { - Self::OnlyStart(start) => empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())), - Self::OnlyEnd(end) => empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())), + Self::OnlyStart(start) => empty.push_attribute(( + "start", + format!("{}", start.format(ICAL_DATETIME_FMT)).as_str(), + )), + Self::OnlyEnd(end) => { + empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())) + } Self::FullRange(start, end) => { - empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())); - empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); + empty.push_attribute(( + "start", + format!("{}", start.format(ICAL_DATETIME_FMT)).as_str(), + )); + empty + .push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); } } xml.q.write_event_async(Event::Empty(empty)).await @@ -709,16 +761,16 @@ impl QWrite for TimeRange { #[cfg(test)] mod tests { use super::*; - use crate::types as dav; use crate::realization::Calendar; + use crate::types as dav; + use chrono::{TimeZone, Utc}; use tokio::io::AsyncWriteExt; - use chrono::{Utc,TimeZone}; async fn serialize(elem: &impl QWrite) -> String { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ns_to_apply = vec![ + let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()), ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), ]; @@ -728,91 +780,120 @@ mod tests { tokio_buffer.flush().await.expect("tokio buffer flush"); let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - return got.into() + return got.into(); } #[tokio::test] async fn basic_violation() { - let got = serialize( - &dav::Error::(vec![ - dav::Violation::Extension(Violation::ResourceMustBeNull), - ]) - ).await; + let got = serialize(&dav::Error::(vec![dav::Violation::Extension( + Violation::ResourceMustBeNull, + )])) + .await; let expected = r#" "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); } #[tokio::test] async fn rfc_calendar_query1_req() { - let got = serialize( - &CalendarQuery:: { - selector: Some(CalendarSelector::Prop(dav::PropName(vec![ - dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { + let got = serialize(&CalendarQuery:: { + selector: Some(CalendarSelector::Prop(dav::PropName(vec![ + dav::PropertyRequest::GetEtag, + dav::PropertyRequest::Extension(PropertyRequest::CalendarData( + CalendarDataRequest { mime: None, comp: Some(Comp { name: Component::VCalendar, - prop_kind: Some(PropKind::Prop(vec![ - CalProp { - name: ComponentProperty("VERSION".into()), - novalue: None, - } - ])), + prop_kind: Some(PropKind::Prop(vec![CalProp { + name: ComponentProperty("VERSION".into()), + novalue: None, + }])), comp_kind: Some(CompKind::Comp(vec![ - Comp { - name: Component::VEvent, - prop_kind: Some(PropKind::Prop(vec![ - CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, - CalProp { name: ComponentProperty("UID".into()), novalue: None }, - CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, - CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, - CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, - CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, - CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, - CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, - ])), - comp_kind: None, - }, - Comp { - name: Component::VTimeZone, - prop_kind: None, - comp_kind: None, - } - ])), - }), + Comp { + name: Component::VEvent, + prop_kind: Some(PropKind::Prop(vec![ + CalProp { + name: ComponentProperty("SUMMARY".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("UID".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("DTSTART".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("DTEND".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("DURATION".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("RRULE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("RDATE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("EXRULE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("EXDATE".into()), + novalue: None, + }, + CalProp { + name: ComponentProperty("RECURRENCE-ID".into()), + novalue: None, + }, + ])), + comp_kind: None, + }, + Comp { + name: Component::VTimeZone, + prop_kind: None, + comp_kind: None, + }, + ])), + }), recurrence: None, limit_freebusy_set: None, - })), - ]))), - filter: Filter(CompFilter { - name: Component::VCalendar, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - time_range: None, - prop_filter: vec![], - comp_filter: vec![ - CompFilter { - name: Component::VEvent, - additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { - time_range: Some(TimeRange::FullRange( - Utc.with_ymd_and_hms(2006,1,4,0,0,0).unwrap(), - Utc.with_ymd_and_hms(2006,1,5,0,0,0).unwrap(), - )), - prop_filter: vec![], - comp_filter: vec![], - })), - }, - ], - })), - }), - timezone: None, - } - ).await; + }, + )), + ]))), + filter: Filter(CompFilter { + name: Component::VCalendar, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: None, + prop_filter: vec![], + comp_filter: vec![CompFilter { + name: Component::VEvent, + additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { + time_range: Some(TimeRange::FullRange( + Utc.with_ymd_and_hms(2006, 1, 4, 0, 0, 0).unwrap(), + Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(), + )), + prop_filter: vec![], + comp_filter: vec![], + })), + }], + })), + }), + timezone: None, + }) + .await; let expected = r#" @@ -844,59 +925,69 @@ mod tests { "#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); } #[tokio::test] async fn rfc_calendar_query1_res() { - let got = serialize( - &dav::Multistatus:: { - responses: vec![ - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), - vec![dav::PropStat { + let got = serialize(&dav::Multistatus:: { + responses: vec![ + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), + vec![dav::PropStat { prop: dav::AnyProp(vec![ - dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd2\"".into())), - dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { - mime: None, - payload: "PLACEHOLDER".into() - }))), + dav::AnyProperty::Value(dav::Property::GetEtag( + "\"fffff-abcd2\"".into(), + )), + dav::AnyProperty::Value(dav::Property::Extension( + Property::CalendarData(CalendarDataPayload { + mime: None, + payload: "PLACEHOLDER".into(), + }), + )), ]), status: dav::Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }] - ), - location: None, - error: None, - responsedescription: None, - }, - dav::Response { - status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), - vec![dav::PropStat { + }], + ), + location: None, + error: None, + responsedescription: None, + }, + dav::Response { + status_or_propstat: dav::StatusOrPropstat::PropStat( + dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), + vec![dav::PropStat { prop: dav::AnyProp(vec![ - dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd3\"".into())), - dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ - mime: None, - payload: "PLACEHOLDER".into(), - }))), + dav::AnyProperty::Value(dav::Property::GetEtag( + "\"fffff-abcd3\"".into(), + )), + dav::AnyProperty::Value(dav::Property::Extension( + Property::CalendarData(CalendarDataPayload { + mime: None, + payload: "PLACEHOLDER".into(), + }), + )), ]), status: dav::Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }] - ), - location: None, - error: None, - responsedescription: None, - }, - ], - responsedescription: None, - }, - ).await; + }], + ), + location: None, + error: None, + responsedescription: None, + }, + ], + responsedescription: None, + }) + .await; let expected = r#" @@ -921,7 +1012,9 @@ mod tests { "#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); } } diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 08991a0..7c85642 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -1,7 +1,7 @@ #![allow(dead_code)] -use chrono::{DateTime,Utc}; use super::types as dav; +use chrono::{DateTime, Utc}; pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; @@ -13,7 +13,6 @@ pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; // For reference, non-official extensions documented by SabreDAV: // https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions - // ----- Root elements ----- // --- (MKCALENDAR PART) --- @@ -33,17 +32,16 @@ pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; #[derive(Debug, PartialEq, Clone)] pub struct MkCalendar(pub dav::Set); - /// If a response body for a successful request is included, it MUST /// be a CALDAV:mkcalendar-response XML element. /// /// /// /// ---- -/// +/// /// ANY is not satisfying, so looking at RFC5689 /// https://www.rfc-editor.org/rfc/rfc5689.html#section-5.2 -/// +/// /// Definition: /// /// @@ -63,9 +61,9 @@ pub enum Report { /// Namespace: urn:ietf:params:xml:ns:caldav /// /// Purpose: Defines a report for querying calendar object resources. -/// +/// /// Description: See Section 7.8. -/// +/// /// Definition: /// /// Calendrier de Mathilde Desruisseaux - CalendarDescription { - lang: Option, - text: String, - }, + CalendarDescription { lang: Option, text: String }, /// 5.2.2. CALDAV:calendar-timezone Property /// @@ -232,7 +227,7 @@ pub enum Property { /// sequence "]]>", which is the end delimiter for the CDATA section. /// /// Definition: - /// + /// /// ```xmlschema /// /// PCDATA value: an iCalendar object with exactly one VTIMEZONE component. @@ -630,7 +625,7 @@ pub enum Property { /// WebDAV property. However, the CALDAV:calendar-data XML element is /// not a WebDAV property and, as such, is not returned in PROPFIND /// responses, nor used in PROPPATCH requests. - /// + /// /// Note: The iCalendar data embedded within the CALDAV:calendar-data /// XML element MUST follow the standard XML character data encoding /// rules, including use of <, >, & etc. entity encoding or @@ -649,7 +644,7 @@ pub enum Violation { /// (CALDAV:calendar-collection-location-ok): The Request-URI MUST /// identify a location where a calendar collection can be created; CalendarCollectionLocationOk, - + /// (CALDAV:valid-calendar-data): The time zone specified in CALDAV: /// calendar-timezone property MUST be a valid iCalendar object /// containing a single valid VTIMEZONE component. @@ -712,7 +707,7 @@ pub enum Violation { /// date-time property value (Section 5.2.6) on the calendar /// collection where the resource will be stored; MinDateTime, - + /// (CALDAV:max-date-time): The resource submitted in the PUT request, /// or targeted by a COPY or MOVE request, MUST have all of its /// iCalendar DATE or DATE-TIME property values (for each recurring @@ -784,15 +779,15 @@ pub enum Violation { /// To deal with this, this specification makes use of the IANA Collation /// Registry defined in [RFC4790] to specify collations that may be used /// to carry out the text comparison operations with a well-defined rule. -/// +/// /// The comparisons used in CalDAV are all "substring" matches, as per /// [RFC4790], Section 4.2. Collations supported by the server MUST /// support "substring" match operations. -/// +/// /// CalDAV servers are REQUIRED to support the "i;ascii-casemap" and /// "i;octet" collations, as described in [RFC4790], and MAY support /// other collations. -/// +/// /// Servers MUST advertise the set of collations that they support via /// the CALDAV:supported-collation-set property defined on any resource /// that supports reports that use collations. @@ -807,7 +802,7 @@ pub enum Violation { /// /// Wildcards (as defined in [RFC4790], Section 3.2) MUST NOT be used in /// the collation identifier. -/// +/// /// If the client chooses a collation not supported by the server, the /// server MUST respond with a CALDAV:supported-collation precondition /// error response. @@ -915,7 +910,7 @@ pub struct CompSupport(pub Component); /// Description: The CALDAV:allcomp XML element can be used when the /// client wants all types of components returned by a calendaring /// REPORT request. -/// +/// /// Definition: /// /// @@ -997,7 +992,7 @@ pub enum RecurrenceModifier { /// recurrence set into calendar components that define exactly one /// recurrence instance, and MUST return only those whose scheduled /// time intersect a specified time range. -/// +/// /// The "start" attribute specifies the inclusive start of the time /// range, and the "end" attribute specifies the non-inclusive end of /// the time range. Both attributes are specified as date with UTC @@ -1189,7 +1184,7 @@ pub struct CompFilterMatch { /// Name: prop-filter /// /// Namespace: urn:ietf:params:xml:ns:caldav -/// +/// /// Purpose: Specifies search criteria on calendar properties. /// /// Description: The CALDAV:prop-filter XML element specifies a query @@ -1352,8 +1347,6 @@ pub enum ParamFilterMatch { /// /* CURRENTLY INLINED */ - - /// Name: timezone /// /// Namespace: urn:ietf:params:xml:ns:caldav @@ -1475,7 +1468,7 @@ impl PropertyParameter { } } -#[derive(Default,Debug,PartialEq,Clone)] +#[derive(Default, Debug, PartialEq, Clone)] pub enum Collation { #[default] AsciiCaseMap, @@ -1492,9 +1485,9 @@ impl Collation { } pub fn new(v: String) -> Self { match v.as_str() { - "i;ascii-casemap" => Self::AsciiCaseMap, - "i;octet" => Self::Octet, - _ => Self::Unknown(v), + "i;ascii-casemap" => Self::AsciiCaseMap, + "i;octet" => Self::Octet, + _ => Self::Unknown(v), } } } diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index bb8d9de..bd724e8 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -1,9 +1,9 @@ -use quick_xml::events::Event; use chrono::DateTime; +use quick_xml::events::Event; -use super::types::*; use super::error::ParsingError; -use super::xml::{Node, QRead, Reader, IRead, DAV_URN}; +use super::types::*; +use super::xml::{IRead, Node, QRead, Reader, DAV_URN}; //@TODO (1) Rewrite all objects as Href, // where we return Ok(None) instead of trying to find the object at any cost. @@ -25,20 +25,21 @@ impl QRead> for PropFind { if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { xml.close().await?; let includ = xml.maybe_find::>().await?; - break PropFind::AllProp(includ) + break PropFind::AllProp(includ); } // propname if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { xml.close().await?; - break PropFind::PropName + break PropFind::PropName; } // prop let (mut maybe_prop, mut dirty) = (None, false); - xml.maybe_read::>(&mut maybe_prop, &mut dirty).await?; + xml.maybe_read::>(&mut maybe_prop, &mut dirty) + .await?; if let Some(prop) = maybe_prop { - break PropFind::Prop(prop) + break PropFind::Prop(prop); } // not found, skipping @@ -80,7 +81,10 @@ impl QRead> for Multistatus { } xml.close().await?; - Ok(Multistatus { responses, responsedescription }) + Ok(Multistatus { + responses, + responsedescription, + }) } } @@ -91,7 +95,8 @@ impl QRead for LockInfo { let (mut m_scope, mut m_type, mut owner) = (None, None, None); loop { let mut dirty = false; - xml.maybe_read::(&mut m_scope, &mut dirty).await?; + xml.maybe_read::(&mut m_scope, &mut dirty) + .await?; xml.maybe_read::(&mut m_type, &mut dirty).await?; xml.maybe_read::(&mut owner, &mut dirty).await?; @@ -104,7 +109,11 @@ impl QRead for LockInfo { } xml.close().await?; match (m_scope, m_type) { - (Some(lockscope), Some(locktype)) => Ok(LockInfo { lockscope, locktype, owner }), + (Some(lockscope), Some(locktype)) => Ok(LockInfo { + lockscope, + locktype, + owner, + }), _ => Err(ParsingError::MissingChild), } } @@ -121,7 +130,6 @@ impl QRead> for PropValue { } } - /// Error response impl QRead> for Error { async fn qread(xml: &mut Reader) -> Result { @@ -132,13 +140,12 @@ impl QRead> for Error { } } - - // ---- INNER XML impl QRead> for Response { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "response").await?; - let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); + let (mut status, mut error, mut responsedescription, mut location) = + (None, None, None, None); let mut href = Vec::new(); let mut propstat = Vec::new(); @@ -146,28 +153,38 @@ impl QRead> for Response { let mut dirty = false; xml.maybe_read::(&mut status, &mut dirty).await?; xml.maybe_push::(&mut href, &mut dirty).await?; - xml.maybe_push::>(&mut propstat, &mut dirty).await?; + xml.maybe_push::>(&mut propstat, &mut dirty) + .await?; xml.maybe_read::>(&mut error, &mut dirty).await?; - xml.maybe_read::(&mut responsedescription, &mut dirty).await?; - xml.maybe_read::(&mut location, &mut dirty).await?; + xml.maybe_read::(&mut responsedescription, &mut dirty) + .await?; + xml.maybe_read::(&mut location, &mut dirty) + .await?; if !dirty { match xml.peek() { Event::End(_) => break, - _ => { xml.skip().await? }, + _ => xml.skip().await?, }; } } xml.close().await?; match (status, &propstat[..], &href[..]) { - (Some(status), &[], &[_, ..]) => Ok(Response { - status_or_propstat: StatusOrPropstat::Status(href, status), - error, responsedescription, location, + (Some(status), &[], &[_, ..]) => Ok(Response { + status_or_propstat: StatusOrPropstat::Status(href, status), + error, + responsedescription, + location, }), (None, &[_, ..], &[_, ..]) => Ok(Response { - status_or_propstat: StatusOrPropstat::PropStat(href.into_iter().next().unwrap(), propstat), - error, responsedescription, location, + status_or_propstat: StatusOrPropstat::PropStat( + href.into_iter().next().unwrap(), + propstat, + ), + error, + responsedescription, + location, }), (Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue), _ => Err(ParsingError::MissingChild), @@ -179,14 +196,17 @@ impl QRead> for PropStat { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "propstat").await?; - let (mut m_any_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); + let (mut m_any_prop, mut m_status, mut error, mut responsedescription) = + (None, None, None, None); loop { let mut dirty = false; - xml.maybe_read::>(&mut m_any_prop, &mut dirty).await?; + xml.maybe_read::>(&mut m_any_prop, &mut dirty) + .await?; xml.maybe_read::(&mut m_status, &mut dirty).await?; xml.maybe_read::>(&mut error, &mut dirty).await?; - xml.maybe_read::(&mut responsedescription, &mut dirty).await?; + xml.maybe_read::(&mut responsedescription, &mut dirty) + .await?; if !dirty { match xml.peek() { @@ -198,7 +218,12 @@ impl QRead> for PropStat { xml.close().await?; match (m_any_prop, m_status) { - (Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), + (Some(prop), Some(status)) => Ok(PropStat { + prop, + status, + error, + responsedescription, + }), _ => Err(ParsingError::MissingChild), } } @@ -208,8 +233,12 @@ impl QRead for Status { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "status").await?; let fullcode = xml.tag_string().await?; - let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; - let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; + let txtcode = fullcode + .splitn(3, ' ') + .nth(1) + .ok_or(ParsingError::InvalidValue)?; + let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()) + .or(Err(ParsingError::InvalidValue))?; xml.close().await?; Ok(Status(code)) } @@ -263,27 +292,55 @@ impl QRead> for Set { impl QRead> for Violation { async fn qread(xml: &mut Reader) -> Result { - if xml.maybe_open(DAV_URN, "lock-token-matches-request-uri").await?.is_some() { + if xml + .maybe_open(DAV_URN, "lock-token-matches-request-uri") + .await? + .is_some() + { xml.close().await?; Ok(Violation::LockTokenMatchesRequestUri) - } else if xml.maybe_open(DAV_URN, "lock-token-submitted").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "lock-token-submitted") + .await? + .is_some() + { let links = xml.collect::().await?; xml.close().await?; Ok(Violation::LockTokenSubmitted(links)) - } else if xml.maybe_open(DAV_URN, "no-conflicting-lock").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "no-conflicting-lock") + .await? + .is_some() + { let links = xml.collect::().await?; xml.close().await?; Ok(Violation::NoConflictingLock(links)) - } else if xml.maybe_open(DAV_URN, "no-external-entities").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "no-external-entities") + .await? + .is_some() + { xml.close().await?; Ok(Violation::NoExternalEntities) - } else if xml.maybe_open(DAV_URN, "preserved-live-properties").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "preserved-live-properties") + .await? + .is_some() + { xml.close().await?; Ok(Violation::PreservedLiveProperties) - } else if xml.maybe_open(DAV_URN, "propfind-finite-depth").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "propfind-finite-depth") + .await? + .is_some() + { xml.close().await?; Ok(Violation::PropfindFiniteDepth) - } else if xml.maybe_open(DAV_URN, "cannot-modify-protected-property").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "cannot-modify-protected-property") + .await? + .is_some() + { xml.close().await?; Ok(Violation::CannotModifyProtectedProperty) } else { @@ -323,7 +380,7 @@ impl QRead> for AnyProperty { async fn qread(xml: &mut Reader) -> Result { match Property::qread(xml).await { Err(ParsingError::Recoverable) => (), - otherwise => return otherwise.map(Self::Value) + otherwise => return otherwise.map(Self::Value), } PropertyRequest::qread(xml).await.map(Self::Request) } @@ -335,7 +392,11 @@ impl QRead> for PropertyRequest { Some(PropertyRequest::CreationDate) } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { Some(PropertyRequest::DisplayName) - } else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { + } else if xml + .maybe_open(DAV_URN, "getcontentlanguage") + .await? + .is_some() + { Some(PropertyRequest::GetContentLanguage) } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { Some(PropertyRequest::GetContentLength) @@ -359,8 +420,10 @@ impl QRead> for PropertyRequest { Some(pr) => { xml.close().await?; Ok(pr) - }, - None => E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension), + } + None => E::PropertyRequest::qread(xml) + .await + .map(PropertyRequest::Extension), } } } @@ -368,46 +431,86 @@ impl QRead> for PropertyRequest { impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result { // Core WebDAV properties - if xml.maybe_open_start(DAV_URN, "creationdate").await?.is_some() { + if xml + .maybe_open_start(DAV_URN, "creationdate") + .await? + .is_some() + { let datestr = xml.tag_string().await?; xml.close().await?; - return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) - } else if xml.maybe_open_start(DAV_URN, "displayname").await?.is_some() { + return Ok(Property::CreationDate(DateTime::parse_from_rfc3339( + datestr.as_str(), + )?)); + } else if xml + .maybe_open_start(DAV_URN, "displayname") + .await? + .is_some() + { let name = xml.tag_string().await?; xml.close().await?; - return Ok(Property::DisplayName(name)) - } else if xml.maybe_open_start(DAV_URN, "getcontentlanguage").await?.is_some() { + return Ok(Property::DisplayName(name)); + } else if xml + .maybe_open_start(DAV_URN, "getcontentlanguage") + .await? + .is_some() + { let lang = xml.tag_string().await?; xml.close().await?; - return Ok(Property::GetContentLanguage(lang)) - } else if xml.maybe_open_start(DAV_URN, "getcontentlength").await?.is_some() { + return Ok(Property::GetContentLanguage(lang)); + } else if xml + .maybe_open_start(DAV_URN, "getcontentlength") + .await? + .is_some() + { let cl = xml.tag_string().await?.parse::()?; xml.close().await?; - return Ok(Property::GetContentLength(cl)) - } else if xml.maybe_open_start(DAV_URN, "getcontenttype").await?.is_some() { + return Ok(Property::GetContentLength(cl)); + } else if xml + .maybe_open_start(DAV_URN, "getcontenttype") + .await? + .is_some() + { let ct = xml.tag_string().await?; xml.close().await?; - return Ok(Property::GetContentType(ct)) + return Ok(Property::GetContentType(ct)); } else if xml.maybe_open_start(DAV_URN, "getetag").await?.is_some() { let etag = xml.tag_string().await?; xml.close().await?; - return Ok(Property::GetEtag(etag)) - } else if xml.maybe_open_start(DAV_URN, "getlastmodified").await?.is_some() { + return Ok(Property::GetEtag(etag)); + } else if xml + .maybe_open_start(DAV_URN, "getlastmodified") + .await? + .is_some() + { let datestr = xml.tag_string().await?; xml.close().await?; - return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(datestr.as_str())?)) - } else if xml.maybe_open_start(DAV_URN, "lockdiscovery").await?.is_some() { + return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822( + datestr.as_str(), + )?)); + } else if xml + .maybe_open_start(DAV_URN, "lockdiscovery") + .await? + .is_some() + { let acc = xml.collect::().await?; xml.close().await?; - return Ok(Property::LockDiscovery(acc)) - } else if xml.maybe_open_start(DAV_URN, "resourcetype").await?.is_some() { + return Ok(Property::LockDiscovery(acc)); + } else if xml + .maybe_open_start(DAV_URN, "resourcetype") + .await? + .is_some() + { let acc = xml.collect::>().await?; xml.close().await?; - return Ok(Property::ResourceType(acc)) - } else if xml.maybe_open_start(DAV_URN, "supportedlock").await?.is_some() { + return Ok(Property::ResourceType(acc)); + } else if xml + .maybe_open_start(DAV_URN, "supportedlock") + .await? + .is_some() + { let acc = xml.collect::().await?; xml.close().await?; - return Ok(Property::SupportedLock(acc)) + return Ok(Property::SupportedLock(acc)); } // Option 2: an extension property, delegating @@ -418,31 +521,49 @@ impl QRead> for Property { impl QRead for ActiveLock { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "activelock").await?; - let (mut m_scope, mut m_type, mut m_depth, mut owner, mut timeout, mut locktoken, mut m_root) = - (None, None, None, None, None, None, None); + let ( + mut m_scope, + mut m_type, + mut m_depth, + mut owner, + mut timeout, + mut locktoken, + mut m_root, + ) = (None, None, None, None, None, None, None); loop { let mut dirty = false; - xml.maybe_read::(&mut m_scope, &mut dirty).await?; + xml.maybe_read::(&mut m_scope, &mut dirty) + .await?; xml.maybe_read::(&mut m_type, &mut dirty).await?; xml.maybe_read::(&mut m_depth, &mut dirty).await?; xml.maybe_read::(&mut owner, &mut dirty).await?; xml.maybe_read::(&mut timeout, &mut dirty).await?; - xml.maybe_read::(&mut locktoken, &mut dirty).await?; + xml.maybe_read::(&mut locktoken, &mut dirty) + .await?; xml.maybe_read::(&mut m_root, &mut dirty).await?; if !dirty { match xml.peek() { Event::End(_) => break, - _ => { xml.skip().await?; }, + _ => { + xml.skip().await?; + } } } } xml.close().await?; match (m_scope, m_type, m_depth, m_root) { - (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => - Ok(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot }), + (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => Ok(ActiveLock { + lockscope, + locktype, + depth, + owner, + timeout, + locktoken, + lockroot, + }), _ => Err(ParsingError::MissingChild), } } @@ -465,7 +586,7 @@ impl QRead for Depth { impl QRead for Owner { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "owner").await?; - + let mut owner = Owner::Unknown; loop { match xml.peek() { @@ -475,17 +596,21 @@ impl QRead for Owner { owner = Owner::Txt(txt); } } - Event::Start(_) | Event::Empty(_) => { - match Href::qread(xml).await { - Ok(href) => { owner = Owner::Href(href); }, - Err(ParsingError::Recoverable) => { xml.skip().await?; }, - Err(e) => return Err(e), + Event::Start(_) | Event::Empty(_) => match Href::qread(xml).await { + Ok(href) => { + owner = Owner::Href(href); } - } + Err(ParsingError::Recoverable) => { + xml.skip().await?; + } + Err(e) => return Err(e), + }, Event::End(_) => break, - _ => { xml.skip().await?; }, + _ => { + xml.skip().await?; + } } - }; + } xml.close().await?; Ok(owner) } @@ -495,7 +620,7 @@ impl QRead for Timeout { async fn qread(xml: &mut Reader) -> Result { const SEC_PFX: &str = "Second-"; xml.open(DAV_URN, "timeout").await?; - + let timeout = match xml.tag_string().await?.as_str() { "Infinite" => Timeout::Infinite, seconds => match seconds.strip_prefix(SEC_PFX) { @@ -531,10 +656,12 @@ impl QRead> for ResourceType { async fn qread(xml: &mut Reader) -> Result { if xml.maybe_open(DAV_URN, "collection").await?.is_some() { xml.close().await?; - return Ok(ResourceType::Collection) + return Ok(ResourceType::Collection); } - - E::ResourceType::qread(xml).await.map(ResourceType::Extension) + + E::ResourceType::qread(xml) + .await + .map(ResourceType::Extension) } } @@ -545,8 +672,10 @@ impl QRead for LockEntry { loop { let mut dirty = false; - xml.maybe_read::(&mut maybe_scope, &mut dirty).await?; - xml.maybe_read::(&mut maybe_type, &mut dirty).await?; + xml.maybe_read::(&mut maybe_scope, &mut dirty) + .await?; + xml.maybe_read::(&mut maybe_type, &mut dirty) + .await?; if !dirty { match xml.peek() { Event::End(_) => break, @@ -557,7 +686,10 @@ impl QRead for LockEntry { xml.close().await?; match (maybe_scope, maybe_type) { - (Some(lockscope), Some(locktype)) => Ok(LockEntry { lockscope, locktype }), + (Some(lockscope), Some(locktype)) => Ok(LockEntry { + lockscope, + locktype, + }), _ => Err(ParsingError::MissingChild), } } @@ -570,12 +702,12 @@ impl QRead for LockScope { let lockscope = loop { if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() { xml.close().await?; - break LockScope::Exclusive - } + break LockScope::Exclusive; + } if xml.maybe_open(DAV_URN, "shared").await?.is_some() { xml.close().await?; - break LockScope::Shared + break LockScope::Shared; } xml.skip().await?; @@ -593,7 +725,7 @@ impl QRead for LockType { let locktype = loop { if xml.maybe_open(DAV_URN, "write").await?.is_some() { xml.close().await?; - break LockType::Write + break LockType::Write; } xml.skip().await?; @@ -616,8 +748,8 @@ impl QRead for Href { #[cfg(test)] mod tests { use super::*; - use chrono::{FixedOffset, TimeZone}; use crate::realization::Core; + use chrono::{FixedOffset, TimeZone}; use quick_xml::reader::NsReader; #[tokio::test] @@ -630,8 +762,10 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + let got = rdr.find::>().await.unwrap(); assert_eq!(got, PropFind::::PropName); } @@ -654,18 +788,23 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, PropFind::Prop(PropName(vec![ - PropertyRequest::DisplayName, - PropertyRequest::GetContentLength, - PropertyRequest::GetContentType, - PropertyRequest::GetEtag, - PropertyRequest::GetLastModified, - PropertyRequest::ResourceType, - PropertyRequest::SupportedLock, - ]))); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!( + got, + PropFind::Prop(PropName(vec![ + PropertyRequest::DisplayName, + PropertyRequest::GetContentLength, + PropertyRequest::GetContentType, + PropertyRequest::GetEtag, + PropertyRequest::GetLastModified, + PropertyRequest::ResourceType, + PropertyRequest::SupportedLock, + ])) + ); } #[tokio::test] @@ -677,17 +816,19 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, Error(vec![ - Violation::LockTokenSubmitted(vec![ - Href("/locked/".into()) - ]) - ])); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!( + got, + Error(vec![Violation::LockTokenSubmitted(vec![Href( + "/locked/".into() + )])]) + ); } - #[tokio::test] async fn rfc_propertyupdate() { let src = r#" @@ -706,13 +847,18 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + let got = rdr.find::>().await.unwrap(); - assert_eq!(got, PropertyUpdate(vec![ - PropertyUpdateItem::Set(Set(PropValue(vec![]))), - PropertyUpdateItem::Remove(Remove(PropName(vec![]))), - ])); + assert_eq!( + got, + PropertyUpdate(vec![ + PropertyUpdateItem::Set(Set(PropValue(vec![]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![]))), + ]) + ); } #[tokio::test] @@ -728,14 +874,21 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); let got = rdr.find::().await.unwrap(); - assert_eq!(got, LockInfo { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - }); + assert_eq!( + got, + LockInfo { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + owner: Some(Owner::Href(Href( + "http://example.org/~ejw/contact.html".into() + ))), + } + ); } #[tokio::test] @@ -777,59 +930,63 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, Multistatus { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/".into()), - vec![PropStat { - prop: AnyProp(vec![ - AnyProperty::Request(PropertyRequest::CreationDate), - AnyProperty::Request(PropertyRequest::DisplayName), - AnyProperty::Request(PropertyRequest::ResourceType), - AnyProperty::Request(PropertyRequest::SupportedLock), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("http://www.example.com/container/front.html".into()), - vec![PropStat { - prop: AnyProp(vec![ - AnyProperty::Request(PropertyRequest::CreationDate), - AnyProperty::Request(PropertyRequest::DisplayName), - AnyProperty::Request(PropertyRequest::GetContentLength), - AnyProperty::Request(PropertyRequest::GetContentType), - AnyProperty::Request(PropertyRequest::GetEtag), - AnyProperty::Request(PropertyRequest::GetLastModified), - AnyProperty::Request(PropertyRequest::ResourceType), - AnyProperty::Request(PropertyRequest::SupportedLock), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - }, - ], - responsedescription: None, - }); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!( + got, + Multistatus { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/".into()), + vec![PropStat { + prop: AnyProp(vec![ + AnyProperty::Request(PropertyRequest::CreationDate), + AnyProperty::Request(PropertyRequest::DisplayName), + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Request(PropertyRequest::SupportedLock), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("http://www.example.com/container/front.html".into()), + vec![PropStat { + prop: AnyProp(vec![ + AnyProperty::Request(PropertyRequest::CreationDate), + AnyProperty::Request(PropertyRequest::DisplayName), + AnyProperty::Request(PropertyRequest::GetContentLength), + AnyProperty::Request(PropertyRequest::GetContentType), + AnyProperty::Request(PropertyRequest::GetEtag), + AnyProperty::Request(PropertyRequest::GetLastModified), + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Request(PropertyRequest::SupportedLock), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + } + ); } - #[tokio::test] async fn rfc_multistatus_value() { let src = r#" @@ -888,78 +1045,103 @@ mod tests { "#; - let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); - let got = rdr.find::>().await.unwrap(); - - assert_eq!(got, Multistatus { - responses: vec![ - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/".into()), - vec![PropStat { - prop: AnyProp(vec![ - AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap())), - AnyProperty::Value(Property::DisplayName("Example collection".into())), - AnyProperty::Value(Property::ResourceType(vec![ResourceType::Collection])), - AnyProperty::Value(Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ])), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - - }, - Response { - status_or_propstat: StatusOrPropstat::PropStat( - Href("/container/front.html".into()), - vec![PropStat { - prop: AnyProp(vec![ - AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap())), - AnyProperty::Value(Property::DisplayName("Example HTML resource".into())), - AnyProperty::Value(Property::GetContentLength(4525)), - AnyProperty::Value(Property::GetContentType("text/html".into())), - AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), - AnyProperty::Value(Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap())), - //@FIXME know bug, can't disambiguate between an empty resource - //type value and a request resource type - AnyProperty::Request(PropertyRequest::ResourceType), - AnyProperty::Value(Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ])), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }], - ), - error: None, - responsedescription: None, - location: None, - - }, - ], - responsedescription: None, - }); + let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + let got = rdr.find::>().await.unwrap(); + + assert_eq!( + got, + Multistatus { + responses: vec![ + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/".into()), + vec![PropStat { + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate( + FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 01, 17, 42, 21) + .unwrap() + )), + AnyProperty::Value(Property::DisplayName( + "Example collection".into() + )), + AnyProperty::Value(Property::ResourceType(vec![ + ResourceType::Collection + ])), + AnyProperty::Value(Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ])), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + Response { + status_or_propstat: StatusOrPropstat::PropStat( + Href("/container/front.html".into()), + vec![PropStat { + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate( + FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 01, 18, 27, 21) + .unwrap() + )), + AnyProperty::Value(Property::DisplayName( + "Example HTML resource".into() + )), + AnyProperty::Value(Property::GetContentLength(4525)), + AnyProperty::Value(Property::GetContentType( + "text/html".into() + )), + AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), + AnyProperty::Value(Property::GetLastModified( + FixedOffset::west_opt(0) + .unwrap() + .with_ymd_and_hms(1998, 01, 12, 09, 25, 56) + .unwrap() + )), + //@FIXME know bug, can't disambiguate between an empty resource + //type value and a request resource type + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Value(Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ])), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], + ), + error: None, + responsedescription: None, + location: None, + }, + ], + responsedescription: None, + } + ); } - } diff --git a/aero-dav/src/encoder.rs b/aero-dav/src/encoder.rs index 1320c8a..fd87e95 100644 --- a/aero-dav/src/encoder.rs +++ b/aero-dav/src/encoder.rs @@ -1,8 +1,7 @@ -use quick_xml::Error as QError; -use quick_xml::events::{Event, BytesText}; use super::types::*; -use super::xml::{Node, Writer,QWrite,IWrite}; - +use super::xml::{IWrite, Node, QWrite, Writer}; +use quick_xml::events::{BytesText, Event}; +use quick_xml::Error as QError; // --- XML ROOTS @@ -16,15 +15,17 @@ impl QWrite for PropFind { match self { Self::PropName => { let empty_propname = xml.create_dav_element("propname"); - xml.q.write_event_async(Event::Empty(empty_propname)).await? - }, + xml.q + .write_event_async(Event::Empty(empty_propname)) + .await? + } Self::AllProp(maybe_include) => { let empty_allprop = xml.create_dav_element("allprop"); xml.q.write_event_async(Event::Empty(empty_allprop)).await?; if let Some(include) = maybe_include { include.qwrite(xml).await?; } - }, + } Self::Prop(propname) => propname.qwrite(xml).await?, } xml.q.write_event_async(Event::End(end)).await @@ -45,9 +46,8 @@ impl QWrite for PropertyUpdate { } } - /// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE -/// DELETE RESPONSE, +/// DELETE RESPONSE, impl QWrite for Multistatus { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("multistatus"); @@ -140,7 +140,6 @@ impl QWrite for Remove { } } - impl QWrite for PropName { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("prop"); @@ -176,14 +175,15 @@ impl QWrite for AnyProperty { } } - impl QWrite for Href { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("href"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&self.0))) + .await?; xml.q.write_event_async(Event::End(end)).await } } @@ -216,9 +216,9 @@ impl QWrite for StatusOrPropstat { href.qwrite(xml).await?; } status.qwrite(xml).await - }, + } Self::PropStat(href, propstat_list) => { - href.qwrite(xml).await?; + href.qwrite(xml).await?; for propstat in propstat_list.iter() { propstat.qwrite(xml).await?; } @@ -235,8 +235,14 @@ impl QWrite for Status { xml.q.write_event_async(Event::Start(start.clone())).await?; - let txt = format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason")); - xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?; + let txt = format!( + "HTTP/1.1 {} {}", + self.0.as_str(), + self.0.canonical_reason().unwrap_or("No reason") + ); + xml.q + .write_event_async(Event::Text(BytesText::new(&txt))) + .await?; xml.q.write_event_async(Event::End(end)).await?; @@ -250,7 +256,9 @@ impl QWrite for ResponseDescription { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&self.0))) + .await?; xml.q.write_event_async(Event::End(end)).await } } @@ -296,62 +304,76 @@ impl QWrite for Property { let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } DisplayName(name) => { // Example collection let start = xml.create_dav_element("displayname"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(name))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(name))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } GetContentLanguage(lang) => { let start = xml.create_dav_element("getcontentlanguage"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(lang))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(lang))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } GetContentLength(len) => { // 4525 let start = xml.create_dav_element("getcontentlength"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&len.to_string()))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } GetContentType(ct) => { // text/html let start = xml.create_dav_element("getcontenttype"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&ct))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&ct))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } GetEtag(et) => { // "zzyzx" let start = xml.create_dav_element("getetag"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(et))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(et))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } GetLastModified(date) => { // Mon, 12 Jan 1998 09:25:56 GMT let start = xml.create_dav_element("getlastmodified"); let end = start.to_end(); xml.q.write_event_async(Event::Start(start.clone())).await?; - xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))) + .await?; xml.q.write_event_async(Event::End(end)).await?; - }, + } LockDiscovery(many_locks) => { // ... let start = xml.create_dav_element("lockdiscovery"); @@ -362,17 +384,17 @@ impl QWrite for Property { lock.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await?; - }, + } ResourceType(many_types) => { // - + // - + // // // // - + let start = xml.create_dav_element("resourcetype"); if many_types.is_empty() { xml.q.write_event_async(Event::Empty(start)).await?; @@ -384,7 +406,7 @@ impl QWrite for Property { } xml.q.write_event_async(Event::End(end)).await?; } - }, + } SupportedLock(many_entries) => { // @@ -401,7 +423,7 @@ impl QWrite for Property { } xml.q.write_event_async(Event::End(end)).await?; } - }, + } Extension(inner) => inner.qwrite(xml).await?, }; Ok(()) @@ -413,8 +435,10 @@ impl QWrite for ResourceType { match self { Self::Collection => { let empty_collection = xml.create_dav_element("collection"); - xml.q.write_event_async(Event::Empty(empty_collection)).await - }, + xml.q + .write_event_async(Event::Empty(empty_collection)) + .await + } Self::Extension(inner) => inner.qwrite(xml).await, } } @@ -425,7 +449,7 @@ impl QWrite for Include { let start = xml.create_dav_element("include"); let end = start.to_end(); - xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q.write_event_async(Event::Start(start.clone())).await?; for prop in self.0.iter() { prop.qwrite(xml).await?; } @@ -505,8 +529,8 @@ impl QWrite for LockType { Self::Write => { let empty_write = xml.create_dav_element("write"); xml.q.write_event_async(Event::Empty(empty_write)).await? - }, - }; + } + }; xml.q.write_event_async(Event::End(end)).await } } @@ -521,12 +545,12 @@ impl QWrite for LockScope { Self::Exclusive => { let empty_tag = xml.create_dav_element("exclusive"); xml.q.write_event_async(Event::Empty(empty_tag)).await? - }, + } Self::Shared => { let empty_tag = xml.create_dav_element("shared"); xml.q.write_event_async(Event::Empty(empty_tag)).await? - }, - }; + } + }; xml.q.write_event_async(Event::End(end)).await } } @@ -538,7 +562,11 @@ impl QWrite for Owner { xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::Txt(txt) => xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?, + Self::Txt(txt) => { + xml.q + .write_event_async(Event::Text(BytesText::new(&txt))) + .await? + } Self::Href(href) => href.qwrite(xml).await?, Self::Unknown => (), } @@ -553,9 +581,21 @@ impl QWrite for Depth { xml.q.write_event_async(Event::Start(start.clone())).await?; match self { - Self::Zero => xml.q.write_event_async(Event::Text(BytesText::new("0"))).await?, - Self::One => xml.q.write_event_async(Event::Text(BytesText::new("1"))).await?, - Self::Infinity => xml.q.write_event_async(Event::Text(BytesText::new("infinity"))).await?, + Self::Zero => { + xml.q + .write_event_async(Event::Text(BytesText::new("0"))) + .await? + } + Self::One => { + xml.q + .write_event_async(Event::Text(BytesText::new("1"))) + .await? + } + Self::Infinity => { + xml.q + .write_event_async(Event::Text(BytesText::new("infinity"))) + .await? + } }; xml.q.write_event_async(Event::End(end)).await } @@ -570,9 +610,15 @@ impl QWrite for Timeout { match self { Self::Seconds(count) => { let txt = format!("Second-{}", count); - xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await? - }, - Self::Infinite => xml.q.write_event_async(Event::Text(BytesText::new("Infinite"))).await? + xml.q + .write_event_async(Event::Text(BytesText::new(&txt))) + .await? + } + Self::Infinite => { + xml.q + .write_event_async(Event::Text(BytesText::new("Infinite"))) + .await? + } }; xml.q.write_event_async(Event::End(end)).await } @@ -620,8 +666,10 @@ impl QWrite for Violation { }; match self { - Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await, - Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => atom("lock-token-submitted").await, + Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await, + Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => { + atom("lock-token-submitted").await + } Violation::LockTokenSubmitted(hrefs) => { let start = xml.create_dav_element("lock-token-submitted"); let end = start.to_end(); @@ -631,8 +679,10 @@ impl QWrite for Violation { href.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await - }, - Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => atom("no-conflicting-lock").await, + } + Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => { + atom("no-conflicting-lock").await + } Violation::NoConflictingLock(hrefs) => { let start = xml.create_dav_element("no-conflicting-lock"); let end = start.to_end(); @@ -642,11 +692,13 @@ impl QWrite for Violation { href.qwrite(xml).await?; } xml.q.write_event_async(Event::End(end)).await - }, + } Violation::NoExternalEntities => atom("no-external-entities").await, Violation::PreservedLiveProperties => atom("preserved-live-properties").await, Violation::PropfindFiniteDepth => atom("propfind-finite-depth").await, - Violation::CannotModifyProtectedProperty => atom("cannot-modify-protected-property").await, + Violation::CannotModifyProtectedProperty => { + atom("cannot-modify-protected-property").await + } Violation::Extension(inner) => inner.qwrite(xml).await, } } @@ -654,30 +706,32 @@ impl QWrite for Violation { #[cfg(test)] mod tests { - use super::*; use super::super::xml; + use super::*; use crate::realization::Core; use tokio::io::AsyncWriteExt; /// To run only the unit tests and avoid the behavior ones: /// cargo test --bin aerogramme - + async fn serialize(elem: &impl QWrite) -> String { let mut buffer = Vec::new(); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); - let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; + let ns_to_apply = vec![("xmlns:D".into(), "DAV:".into())]; let mut writer = Writer { q, ns_to_apply }; elem.qwrite(&mut writer).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - return got.into() + return got.into(); } async fn deserialize>(src: &str) -> T { - let mut rdr = xml::Reader::new(quick_xml::reader::NsReader::from_reader(src.as_bytes())).await.unwrap(); + let mut rdr = xml::Reader::new(quick_xml::reader::NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); rdr.find().await.unwrap() } @@ -688,15 +742,18 @@ mod tests { let got = serialize(&orig).await; let expected = r#"/SOGo/dav/so/"#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::(got.as_str()).await, orig) } #[tokio::test] async fn basic_multistatus() { - let orig = Multistatus:: { - responses: vec![], - responsedescription: Some(ResponseDescription("Hello world".into())) + let orig = Multistatus:: { + responses: vec![], + responsedescription: Some(ResponseDescription("Hello world".into())), }; let got = serialize(&orig).await; @@ -704,18 +761,18 @@ mod tests { Hello world "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } - #[tokio::test] async fn rfc_error_delete_locked() { - let orig = Error::(vec![ - Violation::LockTokenSubmitted(vec![ - Href("/locked/".into()) - ]) - ]); + let orig = Error::(vec![Violation::LockTokenSubmitted(vec![Href( + "/locked/".into(), + )])]); let got = serialize(&orig).await; let expected = r#" @@ -724,7 +781,10 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } @@ -738,7 +798,10 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } @@ -759,7 +822,7 @@ mod tests { status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, - }] + }], ), error: None, responsedescription: None, @@ -782,8 +845,8 @@ mod tests { status: Status(http::status::StatusCode::OK), error: None, responsedescription: None, - } - ]), + }], + ), error: None, responsedescription: None, location: None, @@ -825,8 +888,10 @@ mod tests { "#; - - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } @@ -835,17 +900,20 @@ mod tests { let orig = PropFind::::AllProp(None); let got = serialize(&orig).await; - let expected = r#" + let expected = r#" "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_allprop_res() { - use chrono::{FixedOffset,TimeZone}; + use chrono::{FixedOffset, TimeZone}; let orig = Multistatus:: { responses: vec![ @@ -853,28 +921,34 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/".into()), vec![PropStat { - prop: AnyProp(vec![ - AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600) - .unwrap() - .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) - .unwrap())), - AnyProperty::Value(Property::DisplayName("Example collection".into())), - AnyProperty::Value(Property::ResourceType(vec![ResourceType::Collection])), - AnyProperty::Value(Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ])), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate( + FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 17, 42, 21) + .unwrap(), + )), + AnyProperty::Value(Property::DisplayName( + "Example collection".into(), + )), + AnyProperty::Value(Property::ResourceType(vec![ + ResourceType::Collection, + ])), + AnyProperty::Value(Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ])), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], ), error: None, responsedescription: None, @@ -884,37 +958,43 @@ mod tests { status_or_propstat: StatusOrPropstat::PropStat( Href("/container/front.html".into()), vec![PropStat { - prop: AnyProp(vec![ - AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600) - .unwrap() - .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) - .unwrap())), - AnyProperty::Value(Property::DisplayName("Example HTML resource".into())), - AnyProperty::Value(Property::GetContentLength(4525)), - AnyProperty::Value(Property::GetContentType("text/html".into())), - AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), - AnyProperty::Value(Property::GetLastModified(FixedOffset::east_opt(0) - .unwrap() - .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) - .unwrap())), - //@FIXME know bug, can't disambiguate between an empty resource - //type value and a request resource type - AnyProperty::Request(PropertyRequest::ResourceType), - AnyProperty::Value(Property::SupportedLock(vec![ - LockEntry { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - }, - LockEntry { - lockscope: LockScope::Shared, - locktype: LockType::Write, - }, - ])), - ]), - status: Status(http::status::StatusCode::OK), - error: None, - responsedescription: None, - }] + prop: AnyProp(vec![ + AnyProperty::Value(Property::CreationDate( + FixedOffset::west_opt(8 * 3600) + .unwrap() + .with_ymd_and_hms(1997, 12, 1, 18, 27, 21) + .unwrap(), + )), + AnyProperty::Value(Property::DisplayName( + "Example HTML resource".into(), + )), + AnyProperty::Value(Property::GetContentLength(4525)), + AnyProperty::Value(Property::GetContentType("text/html".into())), + AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), + AnyProperty::Value(Property::GetLastModified( + FixedOffset::east_opt(0) + .unwrap() + .with_ymd_and_hms(1998, 1, 12, 9, 25, 56) + .unwrap(), + )), + //@FIXME know bug, can't disambiguate between an empty resource + //type value and a request resource type + AnyProperty::Request(PropertyRequest::ResourceType), + AnyProperty::Value(Property::SupportedLock(vec![ + LockEntry { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + }, + LockEntry { + lockscope: LockScope::Shared, + locktype: LockType::Write, + }, + ])), + ]), + status: Status(http::status::StatusCode::OK), + error: None, + responsedescription: None, + }], ), error: None, responsedescription: None, @@ -993,15 +1073,18 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_allprop_include() { let orig = PropFind::::AllProp(Some(Include(vec![ - PropertyRequest::DisplayName, - PropertyRequest::ResourceType, + PropertyRequest::DisplayName, + PropertyRequest::ResourceType, ]))); let got = serialize(&orig).await; @@ -1014,19 +1097,20 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } #[tokio::test] async fn rfc_propertyupdate() { let orig = PropertyUpdate::(vec![ - PropertyUpdateItem::Set(Set(PropValue(vec![ - Property::GetContentLanguage("fr-FR".into()), - ]))), - PropertyUpdateItem::Remove(Remove(PropName(vec![ - PropertyRequest::DisplayName, - ]))), + PropertyUpdateItem::Set(Set(PropValue(vec![Property::GetContentLanguage( + "fr-FR".into(), + )]))), + PropertyUpdateItem::Remove(Remove(PropName(vec![PropertyRequest::DisplayName]))), ]); let got = serialize(&orig).await; @@ -1043,8 +1127,14 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); - assert_eq!(deserialize::>(got.as_str()).await, orig) + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); + assert_eq!( + deserialize::>(got.as_str()).await, + orig + ) } #[tokio::test] @@ -1053,7 +1143,7 @@ mod tests { responses: vec![Response { status_or_propstat: StatusOrPropstat::Status( vec![Href("http://www.example.com/container/resource3".into())], - Status(http::status::StatusCode::from_u16(423).unwrap()) + Status(http::status::StatusCode::from_u16(423).unwrap()), ), error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), responsedescription: None, @@ -1074,7 +1164,10 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } @@ -1083,7 +1176,9 @@ mod tests { let orig = LockInfo { lockscope: LockScope::Exclusive, locktype: LockType::Write, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), + owner: Some(Owner::Href(Href( + "http://example.org/~ejw/contact.html".into(), + ))), }; let got = serialize(&orig).await; @@ -1100,23 +1195,30 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::(got.as_str()).await, orig) } #[tokio::test] async fn rfc_simple_lock_response() { - let orig = PropValue::(vec![ - Property::LockDiscovery(vec![ActiveLock { - lockscope: LockScope::Exclusive, - locktype: LockType::Write, - depth: Depth::Infinity, - owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), - timeout: Some(Timeout::Seconds(604800)), - locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), - lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), - }]), - ]); + let orig = PropValue::(vec![Property::LockDiscovery(vec![ActiveLock { + lockscope: LockScope::Exclusive, + locktype: LockType::Write, + depth: Depth::Infinity, + owner: Some(Owner::Href(Href( + "http://example.org/~ejw/contact.html".into(), + ))), + timeout: Some(Timeout::Seconds(604800)), + locktoken: Some(LockToken(Href( + "urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into(), + ))), + lockroot: LockRoot(Href( + "http://example.com/workspace/webdav/proposal.doc".into(), + )), + }])]); let got = serialize(&orig).await; @@ -1144,7 +1246,10 @@ mod tests { "#; - assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); + assert_eq!( + &got, expected, + "\n---GOT---\n{got}\n---EXP---\n{expected}\n" + ); assert_eq!(deserialize::>(got.as_str()).await, orig) } } diff --git a/aero-dav/src/error.rs b/aero-dav/src/error.rs index 570f779..c8f1de1 100644 --- a/aero-dav/src/error.rs +++ b/aero-dav/src/error.rs @@ -10,10 +10,10 @@ pub enum ParsingError { TagNotFound, InvalidValue, Utf8Error(std::str::Utf8Error), - QuickXml(quick_xml::Error), + QuickXml(quick_xml::Error), Chrono(chrono::format::ParseError), Int(std::num::ParseIntError), - Eof + Eof, } impl std::fmt::Display for ParsingError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { diff --git a/aero-dav/src/lib.rs b/aero-dav/src/lib.rs index 009951a..7507ddc 100644 --- a/aero-dav/src/lib.rs +++ b/aero-dav/src/lib.rs @@ -7,19 +7,19 @@ pub mod error; pub mod xml; // webdav -pub mod types; -pub mod encoder; pub mod decoder; +pub mod encoder; +pub mod types; // calendar -pub mod caltypes; -pub mod calencoder; pub mod caldecoder; +pub mod calencoder; +pub mod caltypes; // acl (wip) -pub mod acltypes; -pub mod aclencoder; pub mod acldecoder; +pub mod aclencoder; +pub mod acltypes; // versioning (wip) mod versioningtypes; diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index bfed4d7..7283e68 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -1,8 +1,8 @@ -use super::types as dav; -use super::caltypes as cal; use super::acltypes as acl; -use super::xml; +use super::caltypes as cal; use super::error; +use super::types as dav; +use super::xml; #[derive(Debug, PartialEq, Clone)] pub struct Disabled(()); @@ -12,12 +12,15 @@ impl xml::QRead for Disabled { } } impl xml::QWrite for Disabled { - async fn qwrite(&self, _xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + async fn qwrite( + &self, + _xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { unreachable!() } } -/// The base WebDAV +/// The base WebDAV /// /// Any extension is disabled through an object we can't build /// due to a private inner element. @@ -33,8 +36,7 @@ impl dav::Extension for Core { // WebDAV with the base Calendar implementation (RFC4791) #[derive(Debug, PartialEq, Clone)] pub struct Calendar {} -impl dav::Extension for Calendar -{ +impl dav::Extension for Calendar { type Error = cal::Violation; type Property = cal::Property; type PropertyRequest = cal::PropertyRequest; @@ -44,8 +46,7 @@ impl dav::Extension for Calendar // ACL #[derive(Debug, PartialEq, Clone)] pub struct Acl {} -impl dav::Extension for Acl -{ +impl dav::Extension for Acl { type Error = Disabled; type Property = acl::Property; type PropertyRequest = acl::PropertyRequest; @@ -77,7 +78,10 @@ impl xml::QRead for Property { } } impl xml::QWrite for Property { - async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + async fn qwrite( + &self, + xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { match self { Self::Cal(c) => c.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await, @@ -96,11 +100,16 @@ impl xml::QRead for PropertyRequest { Err(error::ParsingError::Recoverable) => (), otherwise => return otherwise.map(PropertyRequest::Cal), } - acl::PropertyRequest::qread(xml).await.map(PropertyRequest::Acl) + acl::PropertyRequest::qread(xml) + .await + .map(PropertyRequest::Acl) } } impl xml::QWrite for PropertyRequest { - async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + async fn qwrite( + &self, + xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { match self { Self::Cal(c) => c.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await, @@ -123,7 +132,10 @@ impl xml::QRead for ResourceType { } } impl xml::QWrite for ResourceType { - async fn qwrite(&self, xml: &mut xml::Writer) -> Result<(), quick_xml::Error> { + async fn qwrite( + &self, + xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { match self { Self::Cal(c) => c.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await, diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs index 9457a8f..d5466da 100644 --- a/aero-dav/src/types.rs +++ b/aero-dav/src/types.rs @@ -1,8 +1,8 @@ #![allow(dead_code)] use std::fmt::Debug; -use chrono::{DateTime,FixedOffset}; use super::xml; +use chrono::{DateTime, FixedOffset}; /// It's how we implement a DAV extension /// (That's the dark magic part...) @@ -42,7 +42,7 @@ pub struct ActiveLock { /// /// #[derive(Debug, PartialEq)] -pub struct Collection{} +pub struct Collection {} /// 14.4 depth XML Element /// @@ -58,7 +58,7 @@ pub struct Collection{} pub enum Depth { Zero, One, - Infinity + Infinity, } /// 14.5 error XML Element @@ -153,7 +153,6 @@ pub enum Violation { /// PROPFIND requests on collections. PropfindFiniteDepth, - /// Name: cannot-modify-protected-property /// /// Use with: 403 Forbidden @@ -172,7 +171,7 @@ pub enum Violation { /// Name: exclusive /// /// Purpose: Specifies an exclusive lock. -/// +/// /// #[derive(Debug, PartialEq)] pub struct Exclusive {} @@ -193,7 +192,6 @@ pub struct Exclusive {} #[derive(Debug, PartialEq, Clone)] pub struct Href(pub String); - /// 14.8. include XML Element /// /// Name: include @@ -280,7 +278,7 @@ pub struct LockRoot(pub Href); #[derive(Debug, PartialEq, Clone)] pub enum LockScope { Exclusive, - Shared + Shared, } /// 14.14. locktoken XML Element @@ -288,7 +286,7 @@ pub enum LockScope { /// Name: locktoken /// /// Purpose: The lock token associated with a lock. -/// +/// /// Description: The href contains a single lock token URI, which /// refers to the lock. /// @@ -314,7 +312,7 @@ pub enum LockType { /// /// /// - Write + Write, } /// 14.16. multistatus XML Element @@ -477,7 +475,6 @@ pub struct PropStat { pub responsedescription: Option, } - /// 14.23. remove XML Element /// /// Name: remove @@ -579,15 +576,14 @@ pub struct Set(pub PropValue); #[derive(Debug, PartialEq, Clone)] pub struct Shared {} - /// 14.28. status XML Element -/// +/// /// Name: status /// /// Purpose: Holds a single HTTP status-line. /// /// Value: status-line (defined in Section 6.1 of [RFC2616]) -/// +/// /// //@FIXME: Better typing is possible with an enum for example #[derive(Debug, PartialEq, Clone)] @@ -624,7 +620,6 @@ pub enum Timeout { Infinite, } - /// 15. DAV Properties /// /// For DAV properties, the name of the property is also the same as the @@ -704,7 +699,7 @@ pub enum Property { CreationDate(DateTime), /// 15.2. displayname Property - /// + /// /// Name: displayname /// /// Purpose: Provides a name for the resource that is suitable for @@ -734,7 +729,6 @@ pub enum Property { /// DisplayName(String), - /// 15.3. getcontentlanguage Property /// /// Name: getcontentlanguage @@ -893,7 +887,6 @@ pub enum Property { /// LockDiscovery(Vec), - /// 15.9. resourcetype Property /// /// Name: resourcetype @@ -920,7 +913,7 @@ pub enum Property { /// type. /// /// Example: (fictional example to show extensibility) - /// + /// /// /// /// diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index d57093e..c89f531 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -1,8 +1,8 @@ use futures::Future; -use quick_xml::events::{Event, BytesStart}; +use quick_xml::events::{BytesStart, Event}; use quick_xml::name::ResolveResult; use quick_xml::reader::NsReader; -use tokio::io::{AsyncWrite, AsyncBufRead}; +use tokio::io::{AsyncBufRead, AsyncWrite}; use super::error::ParsingError; @@ -17,7 +17,10 @@ pub trait IRead = AsyncBufRead + Unpin; // Serialization/Deserialization traits pub trait QWrite { - fn qwrite(&self, xml: &mut Writer) -> impl Future> + Send; + fn qwrite( + &self, + xml: &mut Writer, + ) -> impl Future> + Send; } pub trait QRead { fn qread(xml: &mut Reader) -> impl Future>; @@ -44,7 +47,11 @@ impl Writer { fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> { let mut start = BytesStart::new(format!("{}:{}", ns, name)); if !self.ns_to_apply.is_empty() { - start.extend_attributes(self.ns_to_apply.iter().map(|(k, n)| (k.as_str(), n.as_str()))); + start.extend_attributes( + self.ns_to_apply + .iter() + .map(|(k, n)| (k.as_str(), n.as_str())), + ); self.ns_to_apply.clear() } start @@ -66,16 +73,26 @@ impl Reader { let parents = vec![]; let prev = Event::Eof; buf.clear(); - Ok(Self { cur, prev, parents, rdr, buf }) + Ok(Self { + cur, + prev, + parents, + rdr, + buf, + }) } /// read one more tag /// do not expose it publicly async fn next(&mut self) -> Result, ParsingError> { - let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); - self.buf.clear(); - self.prev = std::mem::replace(&mut self.cur, evt); - Ok(self.prev.clone()) + let evt = self + .rdr + .read_event_into_async(&mut self.buf) + .await? + .into_owned(); + self.buf.clear(); + self.prev = std::mem::replace(&mut self.cur, evt); + Ok(self.prev.clone()) } /// skip a node at current level @@ -84,9 +101,12 @@ impl Reader { //println!("skipping inside node {:?} value {:?}", self.parents.last(), self.cur); match &self.cur { Event::Start(b) => { - let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; + let _span = self + .rdr + .read_to_end_into_async(b.to_end().name(), &mut self.buf) + .await?; self.next().await - }, + } Event::End(_) => Err(ParsingError::WrongToken), Event::Eof => Err(ParsingError::Eof), _ => self.next().await, @@ -100,13 +120,13 @@ impl Reader { Event::End(be) => be.name(), _ => return false, }; - + let (extr_ns, local) = self.rdr.resolve_element(qname); if local.into_inner() != key.as_bytes() { - return false + return false; } - + match extr_ns { ResolveResult::Bound(v) => v.into_inner() == ns, _ => false, @@ -142,7 +162,7 @@ impl Reader { Event::CData(unescaped) => { acc.push_str(std::str::from_utf8(unescaped.as_ref())?); self.next().await? - }, + } Event::Text(escaped) => { acc.push_str(escaped.unescape()?.as_ref()); self.next().await? @@ -153,33 +173,41 @@ impl Reader { } } - pub async fn maybe_read>(&mut self, t: &mut Option, dirty: &mut bool) -> Result<(), ParsingError> { + pub async fn maybe_read>( + &mut self, + t: &mut Option, + dirty: &mut bool, + ) -> Result<(), ParsingError> { if !self.parent_has_child() { - return Ok(()) + return Ok(()); } match N::qread(self).await { - Ok(v) => { - *t = Some(v); + Ok(v) => { + *t = Some(v); *dirty = true; - Ok(()) - }, + Ok(()) + } Err(ParsingError::Recoverable) => Ok(()), Err(e) => Err(e), } } - pub async fn maybe_push>(&mut self, t: &mut Vec, dirty: &mut bool) -> Result<(), ParsingError> { + pub async fn maybe_push>( + &mut self, + t: &mut Vec, + dirty: &mut bool, + ) -> Result<(), ParsingError> { if !self.parent_has_child() { - return Ok(()) + return Ok(()); } match N::qread(self).await { - Ok(v) => { - t.push(v); + Ok(v) => { + t.push(v); *dirty = true; - Ok(()) - }, + Ok(()) + } Err(ParsingError::Recoverable) => Ok(()), Err(e) => Err(e), } @@ -220,7 +248,7 @@ impl Reader { pub async fn collect>(&mut self) -> Result, ParsingError> { let mut acc = Vec::new(); if !self.parent_has_child() { - return Ok(acc) + return Ok(acc); } loop { @@ -229,7 +257,7 @@ impl Reader { Event::End(_) => return Ok(acc), _ => { self.skip().await?; - }, + } }, Ok(v) => acc.push(v), Err(e) => return Err(e), @@ -242,13 +270,13 @@ impl Reader { let evt = match self.peek() { Event::Empty(_) if self.is_tag(ns, key) => { // hack to make `prev_attr` works - // here we duplicate the current tag - // as in other words, we virtually moved one token + // here we duplicate the current tag + // as in other words, we virtually moved one token // which is useful for prev_attr and any logic based on // self.prev + self.open() on empty nodes self.prev = self.cur.clone(); self.cur.clone() - }, + } Event::Start(_) if self.is_tag(ns, key) => self.next().await?, _ => return Err(ParsingError::Recoverable), }; @@ -258,7 +286,11 @@ impl Reader { Ok(evt) } - pub async fn open_start(&mut self, ns: &[u8], key: &str) -> Result, ParsingError> { + pub async fn open_start( + &mut self, + ns: &[u8], + key: &str, + ) -> Result, ParsingError> { //println!("try open start tag {:?}, on {:?}", key, self.peek()); let evt = match self.peek() { Event::Start(_) if self.is_tag(ns, key) => self.next().await?, @@ -270,7 +302,11 @@ impl Reader { Ok(evt) } - pub async fn maybe_open(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { + pub async fn maybe_open( + &mut self, + ns: &[u8], + key: &str, + ) -> Result>, ParsingError> { match self.open(ns, key).await { Ok(v) => Ok(Some(v)), Err(ParsingError::Recoverable) => Ok(None), @@ -278,7 +314,11 @@ impl Reader { } } - pub async fn maybe_open_start(&mut self, ns: &[u8], key: &str) -> Result>, ParsingError> { + pub async fn maybe_open_start( + &mut self, + ns: &[u8], + key: &str, + ) -> Result>, ParsingError> { match self.open_start(ns, key).await { Ok(v) => Ok(Some(v)), Err(ParsingError::Recoverable) => Ok(None), @@ -289,9 +329,12 @@ impl Reader { pub fn prev_attr(&self, attr: &str) -> Option { match &self.prev { Event::Start(bs) | Event::Empty(bs) => match bs.try_get_attribute(attr) { - Ok(Some(attr)) => attr.decode_and_unescape_value(&self.rdr).ok().map(|v| v.into_owned()), + Ok(Some(attr)) => attr + .decode_and_unescape_value(&self.rdr) + .ok() + .map(|v| v.into_owned()), _ => None, - } + }, _ => None, } } @@ -303,7 +346,7 @@ impl Reader { // Handle the empty case if !self.parent_has_child() { self.parents.pop(); - return self.next().await + return self.next().await; } // Handle the start/end case @@ -311,11 +354,10 @@ impl Reader { match self.peek() { Event::End(_) => { self.parents.pop(); - return self.next().await - }, + return self.next().await; + } _ => self.skip().await?, }; } } } - diff --git a/aero-proto/Cargo.toml b/aero-proto/Cargo.toml index e9f28d1..b6f6336 100644 --- a/aero-proto/Cargo.toml +++ b/aero-proto/Cargo.toml @@ -35,3 +35,4 @@ smtp-message.workspace = true smtp-server.workspace = true tracing.workspace = true quick-xml.workspace = true +icalendar.workspace = true diff --git a/aero-proto/src/dav/codec.rs b/aero-proto/src/dav/codec.rs index 57c3808..a441e7e 100644 --- a/aero-proto/src/dav/codec.rs +++ b/aero-proto/src/dav/codec.rs @@ -1,26 +1,30 @@ use anyhow::{bail, Result}; -use hyper::{Request, Response, body::Bytes}; -use hyper::body::Incoming; -use http_body_util::Full; +use futures::sink::SinkExt; use futures::stream::StreamExt; use futures::stream::TryStreamExt; +use http_body_util::combinators::UnsyncBoxBody; +use http_body_util::BodyExt; use http_body_util::BodyStream; +use http_body_util::Full; use http_body_util::StreamBody; -use http_body_util::combinators::UnsyncBoxBody; use hyper::body::Frame; -use tokio_util::sync::PollSender; +use hyper::body::Incoming; +use hyper::{body::Bytes, Request, Response}; use std::io::{Error, ErrorKind}; -use futures::sink::SinkExt; -use tokio_util::io::{SinkWriter, CopyToBytes}; -use http_body_util::BodyExt; +use tokio_util::io::{CopyToBytes, SinkWriter}; +use tokio_util::sync::PollSender; -use aero_dav::types as dav; -use aero_dav::xml as dxml; use super::controller::HttpResponse; use super::node::PutPolicy; +use aero_dav::types as dav; +use aero_dav::xml as dxml; pub(crate) fn depth(req: &Request) -> dav::Depth { - match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { + match req + .headers() + .get("Depth") + .map(hyper::header::HeaderValue::to_str) + { Some(Ok("0")) => dav::Depth::Zero, Some(Ok("1")) => dav::Depth::One, Some(Ok("Infinity")) => dav::Depth::Infinity, @@ -29,20 +33,28 @@ pub(crate) fn depth(req: &Request) -> dav::Depth { } pub(crate) fn put_policy(req: &Request) -> Result { - if let Some(maybe_txt_etag) = req.headers().get("If-Match").map(hyper::header::HeaderValue::to_str) { + if let Some(maybe_txt_etag) = req + .headers() + .get("If-Match") + .map(hyper::header::HeaderValue::to_str) + { let etag = maybe_txt_etag?; let dquote_count = etag.chars().filter(|c| *c == '"').count(); if dquote_count != 2 { bail!("Either If-Match value is invalid or it's not supported (only single etag is supported)"); } - return Ok(PutPolicy::ReplaceEtag(etag.into())) + return Ok(PutPolicy::ReplaceEtag(etag.into())); } - if let Some(maybe_txt_etag) = req.headers().get("If-None-Match").map(hyper::header::HeaderValue::to_str) { + if let Some(maybe_txt_etag) = req + .headers() + .get("If-None-Match") + .map(hyper::header::HeaderValue::to_str) + { let etag = maybe_txt_etag?; if etag == "*" { - return Ok(PutPolicy::CreateOnly) + return Ok(PutPolicy::CreateOnly); } bail!("Either If-None-Match value is invalid or it's not supported (only asterisk is supported)") } @@ -54,7 +66,10 @@ pub(crate) fn text_body(txt: &'static str) -> UnsyncBoxBody(status_ok: hyper::StatusCode, elem: T) -> Result { +pub(crate) fn serialize( + status_ok: hyper::StatusCode, + elem: T, +) -> Result { let (tx, rx) = tokio::sync::mpsc::channel::(1); // Build the writer @@ -62,10 +77,21 @@ pub(crate) fn serialize(status_ok: hyper::Stat let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe)); let mut writer = SinkWriter::new(CopyToBytes::new(sink)); let q = quick_xml::writer::Writer::new_with_indent(&mut writer, b' ', 4); - let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()), ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()) ]; + let ns_to_apply = vec![ + ("xmlns:D".into(), "DAV:".into()), + ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), + ]; let mut qwriter = dxml::Writer { q, ns_to_apply }; - let decl = quick_xml::events::BytesDecl::from_start(quick_xml::events::BytesStart::from_content("xml version=\"1.0\" encoding=\"utf-8\"", 0)); - match qwriter.q.write_event_async(quick_xml::events::Event::Decl(decl)).await { + let decl = + quick_xml::events::BytesDecl::from_start(quick_xml::events::BytesStart::from_content( + "xml version=\"1.0\" encoding=\"utf-8\"", + 0, + )); + match qwriter + .q + .write_event_async(quick_xml::events::Event::Decl(decl)) + .await + { Ok(_) => (), Err(e) => tracing::error!(err=?e, "unable to write XML declaration "), } @@ -75,7 +101,6 @@ pub(crate) fn serialize(status_ok: hyper::Stat } }); - // Build the reader let recv = tokio_stream::wrappers::ReceiverStream::new(rx); let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v)))); @@ -89,7 +114,6 @@ pub(crate) fn serialize(status_ok: hyper::Stat Ok(response) } - /// Deserialize a request body to an XML request pub(crate) async fn deserialize>(req: Request) -> Result { let stream_of_frames = BodyStream::new(req.into_body()); @@ -97,7 +121,10 @@ pub(crate) async fn deserialize>(req: Request) -> Res .map_ok(|frame| frame.into_data()) .map(|obj| match obj { Ok(Ok(v)) => Ok(v), - Ok(Err(_)) => Err(std::io::Error::new(std::io::ErrorKind::Other, "conversion error")), + Ok(Err(_)) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "conversion error", + )), Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), }); let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 885828f..0bf7a7d 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -1,21 +1,21 @@ use anyhow::Result; -use http_body_util::combinators::{UnsyncBoxBody, BoxBody}; -use hyper::body::Incoming; -use hyper::{Request, Response, body::Bytes}; +use futures::stream::{StreamExt, TryStreamExt}; +use http_body_util::combinators::{BoxBody, UnsyncBoxBody}; use http_body_util::BodyStream; use http_body_util::StreamBody; use hyper::body::Frame; -use futures::stream::{StreamExt, TryStreamExt}; +use hyper::body::Incoming; +use hyper::{body::Bytes, Request, Response}; use aero_collections::user::User; -use aero_dav::types as dav; -use aero_dav::realization::All; use aero_dav::caltypes as cal; +use aero_dav::realization::All; +use aero_dav::types as dav; -use crate::dav::codec::{serialize, deserialize, depth, text_body}; +use crate::dav::codec; +use crate::dav::codec::{depth, deserialize, serialize, text_body}; use crate::dav::node::{DavNode, PutPolicy}; use crate::dav::resource::RootNode; -use crate::dav::codec; pub(super) type ArcUser = std::sync::Arc; pub(super) type HttpResponse = Response>; @@ -39,19 +39,22 @@ pub(crate) struct Controller { req: Request, } impl Controller { - pub(crate) async fn route(user: std::sync::Arc, req: Request) -> Result { + pub(crate) async fn route( + user: std::sync::Arc, + req: Request, + ) -> Result { let path = req.uri().path().to_string(); let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); let method = req.method().as_str().to_uppercase(); let can_create = matches!(method.as_str(), "PUT" | "MKCOL" | "MKCALENDAR"); - let node = match (RootNode {}).fetch(&user, &path_segments, can_create).await{ + let node = match (RootNode {}).fetch(&user, &path_segments, can_create).await { Ok(v) => v, Err(e) => { tracing::warn!(err=?e, "dav node fetch failed"); return Ok(Response::builder() .status(404) - .body(codec::text_body("Resource not found"))?) + .body(codec::text_body("Resource not found"))?); } }; @@ -80,7 +83,6 @@ impl Controller { } } - // --- Per-method functions --- /// REPORT has been first described in the "Versioning Extension" of WebDAV @@ -89,7 +91,7 @@ impl Controller { /// Note: current implementation is not generic at all, it is heavily tied to CalDAV. /// A rewrite would be required to make it more generic (with the extension system that has /// been introduced in aero-dav) - async fn report(self) -> Result { + async fn report(self) -> Result { let status = hyper::StatusCode::from_u16(207)?; let report = match deserialize::>(self.req).await { @@ -97,54 +99,75 @@ impl Controller { Err(e) => { tracing::error!(err=?e, "unable to decode REPORT body"); return Ok(Response::builder() - .status(400) - .body(text_body("Bad request"))?) + .status(400) + .body(text_body("Bad request"))?); } }; - // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary - // list of URLs - // @FIXME - let multiget = match report { - cal::Report::Multiget(m) => m, - cal::Report::Query(q) => todo!(), - cal::Report::FreeBusy(_) => return Ok(Response::builder() - .status(501) - .body(text_body("Not implemented"))?), - }; - - // Getting the list of nodes + // Internal representation that will handle processed request let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); - for h in multiget.href.into_iter() { - let maybe_collected_node = match Path::new(h.0.as_str()) { - Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice(), false).await.or(Err(h)), - Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice(), false).await.or(Err(h)), - Err(_) => Err(h), - }; - - match maybe_collected_node { - Ok(v) => ok_node.push(v), - Err(h) => not_found.push(h), - }; - } + let calprop: Option>; + + // Extracting request information + match report { + cal::Report::Multiget(m) => { + // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary + // list of URLs + // Getting the list of nodes + for h in m.href.into_iter() { + let maybe_collected_node = match Path::new(h.0.as_str()) { + Ok(Path::Abs(p)) => RootNode {} + .fetch(&self.user, p.as_slice(), false) + .await + .or(Err(h)), + Ok(Path::Rel(p)) => self + .node + .fetch(&self.user, p.as_slice(), false) + .await + .or(Err(h)), + Err(_) => Err(h), + }; + + match maybe_collected_node { + Ok(v) => ok_node.push(v), + Err(h) => not_found.push(h), + }; + } + calprop = m.selector; + } + cal::Report::Query(q) => { + calprop = q.selector; + ok_node = apply_filter(&self.user, self.node.children(&self.user).await, q.filter) + .try_collect() + .await?; + } + cal::Report::FreeBusy(_) => { + return Ok(Response::builder() + .status(501) + .body(text_body("Not implemented"))?) + } + }; // Getting props - let props = match multiget.selector { + let props = match calprop { None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), Some(cal::CalendarSelector::PropName) => None, Some(cal::CalendarSelector::Prop(inner)) => Some(inner), }; - serialize(status, Self::multistatus(&self.user, ok_node, not_found, props).await) + serialize( + status, + Self::multistatus(&self.user, ok_node, not_found, props).await, + ) } /// PROPFIND is the standard way to fetch WebDAV properties - async fn propfind(self) -> Result { + async fn propfind(self) -> Result { let depth = depth(&self.req); if matches!(depth, dav::Depth::Infinity) { return Ok(Response::builder() .status(501) - .body(text_body("Depth: Infinity not implemented"))?) + .body(text_body("Depth: Infinity not implemented"))?); } let status = hyper::StatusCode::from_u16(207)?; @@ -153,7 +176,9 @@ impl Controller { // request body MUST be treated as if it were an 'allprop' request. // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly // handled, but corrupted requests are also silently handled as allprop. - let propfind = deserialize::>(self.req).await.unwrap_or_else(|_| dav::PropFind::::AllProp(None)); + let propfind = deserialize::>(self.req) + .await + .unwrap_or_else(|_| dav::PropFind::::AllProp(None)); tracing::debug!(recv=?propfind, "inferred propfind request"); // Collect nodes as PROPFIND is not limited to the targeted node @@ -170,29 +195,36 @@ impl Controller { dav::PropFind::AllProp(Some(dav::Include(mut include))) => { include.extend_from_slice(&ALLPROP); Some(dav::PropName(include)) - }, + } dav::PropFind::Prop(inner) => Some(inner), }; // Not Found is currently impossible considering the way we designed this function let not_found = vec![]; - serialize(status, Self::multistatus(&self.user, nodes, not_found, propname).await) + serialize( + status, + Self::multistatus(&self.user, nodes, not_found, propname).await, + ) } - async fn put(self) -> Result { + async fn put(self) -> Result { let put_policy = codec::put_policy(&self.req)?; let stream_of_frames = BodyStream::new(self.req.into_body()); let stream_of_bytes = stream_of_frames - .map_ok(|frame| frame.into_data()) - .map(|obj| match obj { - Ok(Ok(v)) => Ok(v), - Ok(Err(_)) => Err(std::io::Error::new(std::io::ErrorKind::Other, "conversion error")), - Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), - }).boxed(); + .map_ok(|frame| frame.into_data()) + .map(|obj| match obj { + Ok(Ok(v)) => Ok(v), + Ok(Err(_)) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "conversion error", + )), + Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), + }) + .boxed(); let etag = self.node.put(put_policy, stream_of_bytes).await?; - + let response = Response::builder() .status(201) .header("ETag", etag) @@ -202,7 +234,7 @@ impl Controller { Ok(response) } - async fn get(self) -> Result { + async fn get(self) -> Result { let stream_body = StreamBody::new(self.node.content().map_ok(|v| Frame::data(v))); let boxed_body = UnsyncBoxBody::new(stream_body); @@ -227,17 +259,33 @@ impl Controller { // --- Common utility functions --- /// Build a multistatus response from a list of DavNodes - async fn multistatus(user: &ArcUser, nodes: Vec>, not_found: Vec, props: Option>) -> dav::Multistatus { + async fn multistatus( + user: &ArcUser, + nodes: Vec>, + not_found: Vec, + props: Option>, + ) -> dav::Multistatus { // Collect properties on existing objects let mut responses: Vec> = match props { - Some(props) => futures::stream::iter(nodes).then(|n| n.response_props(user, props.clone())).collect().await, - None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), + Some(props) => { + futures::stream::iter(nodes) + .then(|n| n.response_props(user, props.clone())) + .collect() + .await + } + None => nodes + .into_iter() + .map(|n| n.response_propname(user)) + .collect(), }; // Register not found objects only if relevant if !not_found.is_empty() { responses.push(dav::Response { - status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), + status_or_propstat: dav::StatusOrPropstat::Status( + not_found, + dav::Status(hyper::StatusCode::NOT_FOUND), + ), error: None, location: None, responsedescription: None, @@ -252,7 +300,6 @@ impl Controller { } } - /// Path is a voluntarily feature limited /// compared to the expressiveness of a UNIX path /// For example getting parent with ../ is not supported, scheme is not supported, etc. @@ -271,8 +318,39 @@ impl<'a> Path<'a> { let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); if path.starts_with("/") { - return Ok(Path::Abs(path_segments)) + return Ok(Path::Abs(path_segments)); } Ok(Path::Rel(path_segments)) } } + +//@FIXME move somewhere else +//@FIXME naive implementation, must be refactored later +use futures::stream::Stream; +use icalendar; +fn apply_filter( + user: &ArcUser, + nodes: Vec>, + filter: cal::Filter, +) -> impl Stream, std::io::Error>> { + futures::stream::iter(nodes).filter_map(|single_node| async move { + // Get ICS + let chunks: Vec<_> = match single_node.content().try_collect().await { + Ok(v) => v, + Err(e) => return Some(Err(e)), + }; + let raw_ics = chunks.iter().fold(String::new(), |mut acc, single_chunk| { + let str_fragment = std::str::from_utf8(single_chunk.as_ref()); + acc.extend(str_fragment); + acc + }); + + // Parse ICS + let ics = icalendar::parser::read_calendar(&raw_ics).unwrap(); + + // Do checks + + // Object has been kept + Some(Ok(single_node)) + }) +} diff --git a/aero-proto/src/dav/middleware.rs b/aero-proto/src/dav/middleware.rs index e19ce14..8964699 100644 --- a/aero-proto/src/dav/middleware.rs +++ b/aero-proto/src/dav/middleware.rs @@ -1,10 +1,10 @@ use anyhow::{anyhow, Result}; use base64::Engine; -use hyper::{Request, Response}; use hyper::body::Incoming; +use hyper::{Request, Response}; -use aero_user::login::ArcLoginProvider; use aero_collections::user::User; +use aero_user::login::ArcLoginProvider; use super::codec::text_body; use super::controller::HttpResponse; @@ -13,7 +13,7 @@ type ArcUser = std::sync::Arc; pub(super) async fn auth<'a>( login: ArcLoginProvider, - req: Request, + req: Request, next: impl Fn(ArcUser, Request) -> futures::future::BoxFuture<'a, Result>, ) -> Result { let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { @@ -23,8 +23,8 @@ pub(super) async fn auth<'a>( return Ok(Response::builder() .status(401) .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") - .body(text_body("Missing Authorization field"))?) - }, + .body(text_body("Missing Authorization field"))?); + } }; let b64_creds_maybe_padded = match auth_val.split_once(" ") { @@ -33,8 +33,8 @@ pub(super) async fn auth<'a>( tracing::info!("Unsupported authorization field"); return Ok(Response::builder() .status(400) - .body(text_body("Unsupported Authorization field"))?) - }, + .body(text_body("Unsupported Authorization field"))?); + } }; // base64urlencoded may have trailing equals, base64urlsafe has not @@ -44,22 +44,22 @@ pub(super) async fn auth<'a>( // Decode base64 let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; let str_creds = std::str::from_utf8(&creds)?; - + // Split username and password - let (username, password) = str_creds - .split_once(':') - .ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; + let (username, password) = str_creds.split_once(':').ok_or(anyhow!( + "Missing colon in Authorization, can't split decoded value into a username/password pair" + ))?; // Call login provider let creds = match login.login(username, password).await { Ok(c) => c, Err(_) => { - tracing::info!(user=username, "Wrong credentials"); + tracing::info!(user = username, "Wrong credentials"); return Ok(Response::builder() .status(401) .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") - .body(text_body("Wrong credentials"))?) - }, + .body(text_body("Wrong credentials"))?); + } }; // Build a user diff --git a/aero-proto/src/dav/mod.rs b/aero-proto/src/dav/mod.rs index de2e690..43de3a5 100644 --- a/aero-proto/src/dav/mod.rs +++ b/aero-proto/src/dav/mod.rs @@ -1,6 +1,6 @@ -mod middleware; -mod controller; mod codec; +mod controller; +mod middleware; mod node; mod resource; @@ -8,19 +8,19 @@ use std::net::SocketAddr; use std::sync::Arc; use anyhow::Result; +use futures::future::FutureExt; +use futures::stream::{FuturesUnordered, StreamExt}; +use hyper::rt::{Read, Write}; +use hyper::server::conn::http1 as http; use hyper::service::service_fn; use hyper::{Request, Response}; -use hyper::server::conn::http1 as http; -use hyper::rt::{Read, Write}; use hyper_util::rt::TokioIo; -use futures::stream::{FuturesUnordered, StreamExt}; +use rustls_pemfile::{certs, private_key}; +use tokio::io::{AsyncRead, AsyncWrite}; use tokio::net::TcpListener; +use tokio::net::TcpStream; use tokio::sync::watch; use tokio_rustls::TlsAcceptor; -use tokio::net::TcpStream; -use futures::future::FutureExt; -use tokio::io::{AsyncRead, AsyncWrite}; -use rustls_pemfile::{certs, private_key}; use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::login::ArcLoginProvider; @@ -90,7 +90,7 @@ impl Server { Ok(v) => v, Err(e) => { tracing::error!(err=?e, "TLS acceptor failed"); - continue + continue; } }; @@ -100,21 +100,31 @@ impl Server { //abitrarily bound //@FIXME replace with a handler supporting http2 and TLS - match http::Builder::new().serve_connection(stream, service_fn(|req: Request| { - let login = login.clone(); - tracing::info!("{:?} {:?}", req.method(), req.uri()); - async { - match middleware::auth(login, req, |user, request| async { Controller::route(user, request).await }.boxed()).await { - Ok(v) => Ok(v), - Err(e) => { - tracing::error!(err=?e, "internal error"); - Response::builder() - .status(500) - .body(codec::text_body("Internal error")) - }, - } - } - })).await { + match http::Builder::new() + .serve_connection( + stream, + service_fn(|req: Request| { + let login = login.clone(); + tracing::info!("{:?} {:?}", req.method(), req.uri()); + async { + match middleware::auth(login, req, |user, request| { + async { Controller::route(user, request).await }.boxed() + }) + .await + { + Ok(v) => Ok(v), + Err(e) => { + tracing::error!(err=?e, "internal error"); + Response::builder() + .status(500) + .body(codec::text_body("Internal error")) + } + } + } + }), + ) + .await + { Err(e) => tracing::warn!(err=?e, "connection failed"), Ok(()) => tracing::trace!("connection terminated with success"), } @@ -149,7 +159,6 @@ impl Server { // // - // // // diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index d246280..877342a 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -1,16 +1,17 @@ use anyhow::Result; -use futures::stream::{BoxStream, StreamExt}; use futures::future::{BoxFuture, FutureExt}; +use futures::stream::{BoxStream, StreamExt}; use hyper::body::Bytes; -use aero_dav::types as dav; -use aero_dav::realization::All; use aero_collections::davdag::Etag; +use aero_dav::realization::All; +use aero_dav::types as dav; use super::controller::ArcUser; pub(crate) type Content<'a> = BoxStream<'a, std::result::Result>; -pub(crate) type PropertyStream<'a> = BoxStream<'a, std::result::Result, dav::PropertyRequest>>; +pub(crate) type PropertyStream<'a> = + BoxStream<'a, std::result::Result, dav::PropertyRequest>>; pub(crate) enum PutPolicy { OverwriteAll, @@ -25,7 +26,12 @@ pub(crate) trait DavNode: Send { /// This node direct children fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>>; /// Recursively fetch a child (progress inside the filesystem hierarchy) - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>>; + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>>; // node properties /// Get the path @@ -36,13 +42,17 @@ pub(crate) trait DavNode: Send { fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static>; //fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; /// Put an element (create or update) - fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result>; + fn put<'a>( + &'a self, + policy: PutPolicy, + stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result>; /// Content type of the element fn content_type(&self) -> &str; /// Get ETag fn etag(&self) -> BoxFuture>; /// Get content - fn content(&self) -> Content<'static>; + fn content<'a>(&self) -> Content<'a>; /// Delete fn delete(&self) -> BoxFuture>; @@ -52,24 +62,32 @@ pub(crate) trait DavNode: Send { fn response_propname(&self, user: &ArcUser) -> dav::Response { dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( - dav::Href(self.path(user)), - vec![ - dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), - prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), - error: None, - responsedescription: None, - } - ], + dav::Href(self.path(user)), + vec![dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), + prop: dav::AnyProp( + self.supported_properties(user) + .0 + .into_iter() + .map(dav::AnyProperty::Request) + .collect(), + ), + error: None, + responsedescription: None, + }], ), error: None, location: None, - responsedescription: None + responsedescription: None, } } /// Utility function to get a prop response from a node & a list of propname - fn response_props(&self, user: &ArcUser, props: dav::PropName) -> BoxFuture<'static, dav::Response> { + fn response_props( + &self, + user: &ArcUser, + props: dav::PropName, + ) -> BoxFuture<'static, dav::Response> { //@FIXME we should make the DAV parsed object a stream... let mut result_stream = self.properties(user, props); let path = self.path(user); @@ -87,8 +105,8 @@ pub(crate) trait DavNode: Send { // If at least one property has been found on this object, adding a HTTP 200 propstat to // the response if !found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::OK), + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::OK), prop: dav::AnyProp(found), error: None, responsedescription: None, @@ -98,8 +116,8 @@ pub(crate) trait DavNode: Send { // If at least one property can't be found on this object, adding a HTTP 404 propstat to // the response if !not_found.is_empty() { - prop_desc.push(dav::PropStat { - status: dav::Status(hyper::StatusCode::NOT_FOUND), + prop_desc.push(dav::PropStat { + status: dav::Status(hyper::StatusCode::NOT_FOUND), prop: dav::AnyProp(not_found), error: None, responsedescription: None, @@ -111,9 +129,9 @@ pub(crate) trait DavNode: Send { status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(path), prop_desc), error: None, location: None, - responsedescription: None + responsedescription: None, } - }.boxed() + } + .boxed() } } - diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 944c6c8..d65ce38 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -2,23 +2,32 @@ use std::sync::Arc; type ArcUser = std::sync::Arc; use anyhow::{anyhow, Result}; -use futures::stream::{TryStreamExt, StreamExt}; use futures::io::AsyncReadExt; +use futures::stream::{StreamExt, TryStreamExt}; use futures::{future::BoxFuture, future::FutureExt}; -use aero_collections::{user::User, calendar::Calendar, davdag::{BlobId, Etag}}; -use aero_dav::types as dav; -use aero_dav::caltypes as cal; +use aero_collections::{ + calendar::Calendar, + davdag::{BlobId, Etag}, + user::User, +}; use aero_dav::acltypes as acl; -use aero_dav::realization::{All, self as all}; +use aero_dav::caltypes as cal; +use aero_dav::realization::{self as all, All}; +use aero_dav::types as dav; -use crate::dav::node::{DavNode, PutPolicy, Content}; use super::node::PropertyStream; +use crate::dav::node::{Content, DavNode, PutPolicy}; #[derive(Clone)] pub(crate) struct RootNode {} impl DavNode for RootNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>> { if path.len() == 0 { let this = self.clone(); return async { Ok(Box::new(this) as Box) }.boxed(); @@ -34,7 +43,7 @@ impl DavNode for RootNode { } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { vec![Box::new(HomeNode { }) as Box] }.boxed() + async { vec![Box::new(HomeNode {}) as Box] }.boxed() } fn path(&self, user: &ArcUser) -> String { @@ -46,33 +55,53 @@ impl DavNode for RootNode { dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), + dav::PropertyRequest::Extension(all::PropertyRequest::Acl( + acl::PropertyRequest::CurrentUserPrincipal, + )), ]) } fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { let user = user.clone(); - futures::stream::iter(prop.0).map(move |n| { - let prop = match n { - dav::PropertyRequest::DisplayName => dav::Property::DisplayName("DAV Root".to_string()), - dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - ]), - dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), - dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => - dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(&user)))))), - v => return Err(v), - }; - Ok(prop) - }).boxed() + futures::stream::iter(prop.0) + .map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => { + dav::Property::DisplayName("DAV Root".to_string()) + } + dav::PropertyRequest::ResourceType => { + dav::Property::ResourceType(vec![dav::ResourceType::Collection]) + } + dav::PropertyRequest::GetContentType => { + dav::Property::GetContentType("httpd/unix-directory".into()) + } + dav::PropertyRequest::Extension(all::PropertyRequest::Acl( + acl::PropertyRequest::CurrentUserPrincipal, + )) => dav::Property::Extension(all::Property::Acl( + acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href( + HomeNode {}.path(&user), + ))), + )), + v => return Err(v), + }; + Ok(prop) + }) + .boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + fn put<'a>( + &'a self, + _policy: PutPolicy, + stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result> { futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } - fn content(&self) -> Content<'static> { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + fn content<'a>(&self) -> Content<'a> { + futures::stream::once(futures::future::err(std::io::Error::from( + std::io::ErrorKind::Unsupported, + ))) + .boxed() } fn content_type(&self) -> &str { @@ -91,29 +120,37 @@ impl DavNode for RootNode { #[derive(Clone)] pub(crate) struct HomeNode {} impl DavNode for HomeNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() + return async { Ok(node) }.boxed(); } if path[0] == "calendar" { return async move { let child = Box::new(CalendarListNode::new(user).await?); child.fetch(user, &path[1..], create).await - }.boxed(); + } + .boxed(); } - + //@NOTE: we can't create a node at this level async { Err(anyhow!("Not found")) }.boxed() } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { - async { - CalendarListNode::new(user).await + async { + CalendarListNode::new(user) + .await .map(|c| vec![Box::new(c) as Box]) - .unwrap_or(vec![]) - }.boxed() + .unwrap_or(vec![]) + } + .boxed() } fn path(&self, user: &ArcUser) -> String { @@ -125,38 +162,58 @@ impl DavNode for HomeNode { dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::CalendarHomeSet, + )), ]) } fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { let user = user.clone(); - futures::stream::iter(prop.0).map(move |n| { - let prop = match n { - dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} home", user.username)), - dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), - ]), - dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => - dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href( - //@FIXME we are hardcoding the calendar path, instead we would want to use - //objects - format!("/{}/calendar/", user.username) - )))), - v => return Err(v), - }; - Ok(prop) - }).boxed() + futures::stream::iter(prop.0) + .map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => { + dav::Property::DisplayName(format!("{} home", user.username)) + } + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Acl( + acl::ResourceType::Principal, + )), + ]), + dav::PropertyRequest::GetContentType => { + dav::Property::GetContentType("httpd/unix-directory".into()) + } + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::CalendarHomeSet, + )) => dav::Property::Extension(all::Property::Cal( + cal::Property::CalendarHomeSet(dav::Href( + //@FIXME we are hardcoding the calendar path, instead we would want to use + //objects + format!("/{}/calendar/", user.username), + )), + )), + v => return Err(v), + }; + Ok(prop) + }) + .boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + fn put<'a>( + &'a self, + _policy: PutPolicy, + stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result> { futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } - - fn content(&self) -> Content<'static> { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + + fn content<'a>(&self) -> Content<'a> { + futures::stream::once(futures::future::err(std::io::Error::from( + std::io::ErrorKind::Unsupported, + ))) + .boxed() } fn content_type(&self) -> &str { @@ -183,7 +240,12 @@ impl CalendarListNode { } } impl DavNode for CalendarListNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; return async { Ok(node) }.boxed(); @@ -191,13 +253,18 @@ impl DavNode for CalendarListNode { async move { //@FIXME: we should create a node if the open returns a "not found". - let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; - let child = Box::new(CalendarNode { + let cal = user + .calendars + .open(user, path[0]) + .await? + .ok_or(anyhow!("Not found"))?; + let child = Box::new(CalendarNode { col: cal, - calname: path[0].to_string() + calname: path[0].to_string(), }); child.fetch(user, &path[1..], create).await - }.boxed() + } + .boxed() } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { @@ -206,18 +273,23 @@ impl DavNode for CalendarListNode { //@FIXME maybe we want to be lazy here?! futures::stream::iter(list.iter()) .filter_map(|name| async move { - user.calendars.open(user, name).await + user.calendars + .open(user, name) + .await .ok() .flatten() .map(|v| (name, v)) }) - .map(|(name, cal)| Box::new(CalendarNode { - col: cal, - calname: name.to_string(), - }) as Box) + .map(|(name, cal)| { + Box::new(CalendarNode { + col: cal, + calname: name.to_string(), + }) as Box + }) .collect::>>() .await - }.boxed() + } + .boxed() } fn path(&self, user: &ArcUser) -> String { @@ -234,23 +306,38 @@ impl DavNode for CalendarListNode { fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { let user = user.clone(); - futures::stream::iter(prop.0).map(move |n| { - let prop = match n { - dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} calendars", user.username)), - dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![dav::ResourceType::Collection]), - dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), - v => return Err(v), - }; - Ok(prop) - }).boxed() + futures::stream::iter(prop.0) + .map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => { + dav::Property::DisplayName(format!("{} calendars", user.username)) + } + dav::PropertyRequest::ResourceType => { + dav::Property::ResourceType(vec![dav::ResourceType::Collection]) + } + dav::PropertyRequest::GetContentType => { + dav::Property::GetContentType("httpd/unix-directory".into()) + } + v => return Err(v), + }; + Ok(prop) + }) + .boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + fn put<'a>( + &'a self, + _policy: PutPolicy, + stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result> { futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } - fn content(&self) -> Content<'static> { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + fn content<'a>(&self) -> Content<'a> { + futures::stream::once(futures::future::err(std::io::Error::from( + std::io::ErrorKind::Unsupported, + ))) + .boxed() } fn content_type(&self) -> &str { @@ -272,17 +359,22 @@ pub(crate) struct CalendarNode { calname: String, } impl DavNode for CalendarNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() + return async { Ok(node) }.boxed(); } let col = self.col.clone(); let calname = self.calname.clone(); async move { match (col.dag().await.idx_by_filename.get(path[0]), create) { - (Some(blob_id), _) => { + (Some(blob_id), _) => { let child = Box::new(EventNode { col: col.clone(), calname, @@ -290,7 +382,7 @@ impl DavNode for CalendarNode { blob_id: *blob_id, }); child.fetch(user, &path[1..], create).await - }, + } (None, true) => { let child = Box::new(CreateEventNode { col: col.clone(), @@ -298,11 +390,11 @@ impl DavNode for CalendarNode { filename: path[0].to_string(), }); child.fetch(user, &path[1..], create).await - }, + } _ => Err(anyhow!("Not found")), } - - }.boxed() + } + .boxed() } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { @@ -310,15 +402,21 @@ impl DavNode for CalendarNode { let calname = self.calname.clone(); async move { - col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { - Box::new(EventNode { - col: col.clone(), - calname: calname.clone(), - filename: filename.to_string(), - blob_id: *blob_id, - }) as Box - }).collect() - }.boxed() + col.dag() + .await + .idx_by_filename + .iter() + .map(|(filename, blob_id)| { + Box::new(EventNode { + col: col.clone(), + calname: calname.clone(), + filename: filename.to_string(), + blob_id: *blob_id, + }) as Box + }) + .collect() + } + .boxed() } fn path(&self, user: &ArcUser) -> String { @@ -330,38 +428,58 @@ impl DavNode for CalendarNode { dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetContentType, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::SupportedCalendarComponentSet, + )), ]) } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { let calname = self.calname.to_string(); - futures::stream::iter(prop.0).map(move |n| { - let prop = match n { - dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} calendar", calname)), - dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), - ]), - //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - //@FIXME seems wrong but seems to be what Thunderbird expects... - dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) - => dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ - cal::CompSupport(cal::Component::VEvent), - ]))), - v => return Err(v), - }; - Ok(prop) - }).boxed() + futures::stream::iter(prop.0) + .map(move |n| { + let prop = match n { + dav::PropertyRequest::DisplayName => { + dav::Property::DisplayName(format!("{} calendar", calname)) + } + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Cal( + cal::ResourceType::Calendar, + )), + ]), + //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //@FIXME seems wrong but seems to be what Thunderbird expects... + dav::PropertyRequest::GetContentType => { + dav::Property::GetContentType("text/calendar".into()) + } + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::SupportedCalendarComponentSet, + )) => dav::Property::Extension(all::Property::Cal( + cal::Property::SupportedCalendarComponentSet(vec![cal::CompSupport( + cal::Component::VEvent, + )]), + )), + v => return Err(v), + }; + Ok(prop) + }) + .boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, _stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + fn put<'a>( + &'a self, + _policy: PutPolicy, + _stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result> { futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() } - fn content<'a>(&'a self) -> Content<'static> { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + fn content<'a>(&self) -> Content<'a> { + futures::stream::once(futures::future::err(std::io::Error::from( + std::io::ErrorKind::Unsupported, + ))) + .boxed() } fn content_type(&self) -> &str { @@ -386,13 +504,23 @@ pub(crate) struct EventNode { } impl DavNode for EventNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() + return async { Ok(node) }.boxed(); } - async { Err(anyhow!("Not supported: can't create a child on an event node")) }.boxed() + async { + Err(anyhow!( + "Not supported: can't create a child on an event node" + )) + } + .boxed() } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { @@ -400,7 +528,10 @@ impl DavNode for EventNode { } fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) + format!( + "/{}/calendar/{}/{}", + user.username, self.calname, self.filename + ) } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { @@ -408,66 +539,106 @@ impl DavNode for EventNode { dav::PropertyRequest::DisplayName, dav::PropertyRequest::ResourceType, dav::PropertyRequest::GetEtag, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()), + )), ]) } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { let this = self.clone(); - futures::stream::iter(prop.0).then(move |n| { - let this = this.clone(); - - async move { - let prop = match &n { - dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} event", this.filename)), - dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![]), - dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), - dav::PropertyRequest::GetEtag => { - let etag = this.etag().await.ok_or(n.clone())?; - dav::Property::GetEtag(etag) - }, - dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(_req))) => { - let ics = String::from_utf8(this.col.get(this.blob_id).await.or(Err(n.clone()))?).or(Err(n.clone()))?; - - dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { - mime: None, - payload: ics, - }))) - }, - _ => return Err(n), - }; - Ok(prop) - } - }).boxed() - } - - fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + futures::stream::iter(prop.0) + .then(move |n| { + let this = this.clone(); + + async move { + let prop = match &n { + dav::PropertyRequest::DisplayName => { + dav::Property::DisplayName(format!("{} event", this.filename)) + } + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![]), + dav::PropertyRequest::GetContentType => { + dav::Property::GetContentType("text/calendar".into()) + } + dav::PropertyRequest::GetEtag => { + let etag = this.etag().await.ok_or(n.clone())?; + dav::Property::GetEtag(etag) + } + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::CalendarData(_req), + )) => { + let ics = String::from_utf8( + this.col.get(this.blob_id).await.or(Err(n.clone()))?, + ) + .or(Err(n.clone()))?; + + dav::Property::Extension(all::Property::Cal( + cal::Property::CalendarData(cal::CalendarDataPayload { + mime: None, + payload: ics, + }), + )) + } + _ => return Err(n), + }; + Ok(prop) + } + }) + .boxed() + } + + fn put<'a>( + &'a self, + policy: PutPolicy, + stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result> { async { - let existing_etag = self.etag().await.ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Etag error"))?; + let existing_etag = self + .etag() + .await + .ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Etag error"))?; match policy { - PutPolicy::CreateOnly => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), - PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), - _ => () + PutPolicy::CreateOnly => { + return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)) + } + PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => { + return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)) + } + _ => (), }; //@FIXME for now, our storage interface does not allow streaming, // so we load everything in memory let mut evt = Vec::new(); let mut reader = stream.into_async_read(); - reader.read_to_end(&mut evt).await.or(Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)))?; - let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; - self.col.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; + reader + .read_to_end(&mut evt) + .await + .or(Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)))?; + let (_token, entry) = self + .col + .put(self.filename.as_str(), evt.as_ref()) + .await + .or(Err(std::io::ErrorKind::Interrupted))?; + self.col + .opportunistic_sync() + .await + .or(Err(std::io::ErrorKind::ConnectionReset))?; Ok(entry.2) - }.boxed() + } + .boxed() } - fn content<'a>(&'a self) -> Content<'static> { + fn content<'a>(&self) -> Content<'a> { //@FIXME for now, our storage interface does not allow streaming, // so we load everything in memory let calendar = self.col.clone(); let blob_id = self.blob_id.clone(); let r = async move { - let content = calendar.get(blob_id).await.or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); + let content = calendar + .get(blob_id) + .await + .or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); Ok(hyper::body::Bytes::from(content?)) }; futures::stream::once(Box::pin(r)).boxed() @@ -481,8 +652,14 @@ impl DavNode for EventNode { let calendar = self.col.clone(); async move { - calendar.dag().await.table.get(&self.blob_id).map(|(_, _, etag)| etag.to_string()) - }.boxed() + calendar + .dag() + .await + .table + .get(&self.blob_id) + .map(|(_, _, etag)| etag.to_string()) + } + .boxed() } fn delete(&self) -> BoxFuture> { @@ -494,12 +671,16 @@ impl DavNode for EventNode { Ok(v) => v, Err(e) => { tracing::error!(err=?e, "delete event node"); - return Err(std::io::Error::from(std::io::ErrorKind::Interrupted)) - }, + return Err(std::io::Error::from(std::io::ErrorKind::Interrupted)); + } }; - calendar.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; + calendar + .opportunistic_sync() + .await + .or(Err(std::io::ErrorKind::ConnectionReset))?; Ok(()) - }.boxed() + } + .boxed() } } @@ -510,13 +691,23 @@ pub(crate) struct CreateEventNode { filename: String, } impl DavNode for CreateEventNode { - fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result>> { + fn fetch<'a>( + &self, + user: &'a ArcUser, + path: &'a [&str], + create: bool, + ) -> BoxFuture<'a, Result>> { if path.len() == 0 { let node = Box::new(self.clone()) as Box; - return async { Ok(node) }.boxed() + return async { Ok(node) }.boxed(); } - async { Err(anyhow!("Not supported: can't create a child on an event node")) }.boxed() + async { + Err(anyhow!( + "Not supported: can't create a child on an event node" + )) + } + .boxed() } fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec>> { @@ -524,33 +715,51 @@ impl DavNode for CreateEventNode { } fn path(&self, user: &ArcUser) -> String { - format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) + format!( + "/{}/calendar/{}/{}", + user.username, self.calname, self.filename + ) } fn supported_properties(&self, user: &ArcUser) -> dav::PropName { dav::PropName(vec![]) } - fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { + fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { futures::stream::iter(vec![]).boxed() } - fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result> { + fn put<'a>( + &'a self, + _policy: PutPolicy, + stream: Content<'a>, + ) -> BoxFuture<'a, std::result::Result> { //@NOTE: policy might not be needed here: whatever we put, there is no known entries here - + async { //@FIXME for now, our storage interface does not allow for streaming let mut evt = Vec::new(); let mut reader = stream.into_async_read(); reader.read_to_end(&mut evt).await.unwrap(); - let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; - self.col.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; + let (_token, entry) = self + .col + .put(self.filename.as_str(), evt.as_ref()) + .await + .or(Err(std::io::ErrorKind::Interrupted))?; + self.col + .opportunistic_sync() + .await + .or(Err(std::io::ErrorKind::ConnectionReset))?; Ok(entry.2) - }.boxed() + } + .boxed() } - fn content(&self) -> Content<'static> { - futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() + fn content<'a>(&self) -> Content<'a> { + futures::stream::once(futures::future::err(std::io::Error::from( + std::io::ErrorKind::Unsupported, + ))) + .boxed() } fn content_type(&self) -> &str { diff --git a/aero-proto/src/imap/command/anonymous.rs b/aero-proto/src/imap/command/anonymous.rs index 2848c30..f23ec17 100644 --- a/aero-proto/src/imap/command/anonymous.rs +++ b/aero-proto/src/imap/command/anonymous.rs @@ -4,8 +4,8 @@ use imap_codec::imap_types::core::AString; use imap_codec::imap_types::response::Code; use imap_codec::imap_types::secret::Secret; -use aero_user::login::ArcLoginProvider; use aero_collections::user::User; +use aero_user::login::ArcLoginProvider; use crate::imap::capability::ServerCapability; use crate::imap::command::anystate; diff --git a/aero-proto/src/imap/command/authenticated.rs b/aero-proto/src/imap/command/authenticated.rs index 4c8d8c1..5bd34cb 100644 --- a/aero-proto/src/imap/command/authenticated.rs +++ b/aero-proto/src/imap/command/authenticated.rs @@ -14,10 +14,10 @@ use imap_codec::imap_types::mailbox::{ListMailbox, Mailbox as MailboxCodec}; use imap_codec::imap_types::response::{Code, CodeOther, Data}; use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName}; +use aero_collections::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; use aero_collections::mail::uidindex::*; -use aero_collections::user::User; use aero_collections::mail::IMF; -use aero_collections::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; +use aero_collections::user::User; use crate::imap::capability::{ClientCapability, ServerCapability}; use crate::imap::command::{anystate, MailboxName}; diff --git a/aero-proto/src/imap/mod.rs b/aero-proto/src/imap/mod.rs index 7183a78..6a768b0 100644 --- a/aero-proto/src/imap/mod.rs +++ b/aero-proto/src/imap/mod.rs @@ -17,14 +17,14 @@ use std::net::SocketAddr; use anyhow::{anyhow, bail, Result}; use futures::stream::{FuturesUnordered, StreamExt}; -use tokio::net::TcpListener; -use tokio::sync::mpsc; -use tokio::sync::watch; use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status}; use imap_codec::imap_types::{core::Text, response::Greeting}; use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions}; use imap_flow::stream::AnyStream; use rustls_pemfile::{certs, private_key}; +use tokio::net::TcpListener; +use tokio::sync::mpsc; +use tokio::sync::watch; use tokio_rustls::TlsAcceptor; use aero_user::config::{ImapConfig, ImapUnsecureConfig}; diff --git a/aero-proto/src/lmtp.rs b/aero-proto/src/lmtp.rs index 9d40296..a82a783 100644 --- a/aero-proto/src/lmtp.rs +++ b/aero-proto/src/lmtp.rs @@ -10,16 +10,16 @@ use futures::{ stream::{FuturesOrdered, FuturesUnordered}, StreamExt, }; +use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode}; +use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata}; use tokio::net::TcpListener; use tokio::select; use tokio::sync::watch; use tokio_util::compat::*; -use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode}; -use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata}; +use aero_collections::mail::incoming::EncryptedMessage; use aero_user::config::*; use aero_user::login::*; -use aero_collections::mail::incoming::EncryptedMessage; pub struct LmtpServer { bind_addr: SocketAddr, diff --git a/aero-proto/src/sasl.rs b/aero-proto/src/sasl.rs index dae89eb..48c0815 100644 --- a/aero-proto/src/sasl.rs +++ b/aero-proto/src/sasl.rs @@ -8,9 +8,9 @@ use tokio::net::{TcpListener, TcpStream}; use tokio::sync::watch; use tokio_util::bytes::BytesMut; +use aero_sasl::{decode::client_command, encode::Encode, flow::State}; use aero_user::config::AuthConfig; use aero_user::login::ArcLoginProvider; -use aero_sasl::{flow::State, decode::client_command, encode::Encode}; pub struct AuthServer { login_provider: ArcLoginProvider, diff --git a/aero-sasl/src/flow.rs b/aero-sasl/src/flow.rs index 31c8bc5..5aa4869 100644 --- a/aero-sasl/src/flow.rs +++ b/aero-sasl/src/flow.rs @@ -1,8 +1,8 @@ use futures::Future; use rand::prelude::*; -use super::types::*; use super::decode::auth_plain; +use super::types::*; #[derive(Debug)] pub enum AuthRes { @@ -29,10 +29,10 @@ impl State { } async fn try_auth_plain(&self, data: &[u8], login: X) -> AuthRes - where - X: FnOnce(String, String) -> F, - F: Future, - { + where + X: FnOnce(String, String) -> F, + F: Future, + { // Check that we can extract user's login+pass let (ubin, pbin) = match auth_plain(&data) { Ok(([], (authz, user, pass))) if authz == user || authz == EMPTY_AUTHZ => (user, pass), @@ -65,10 +65,10 @@ impl State { } } - pub async fn progress(&mut self, cmd: ClientCommand, login: X) - where - X: FnOnce(String, String) -> F, - F: Future, + pub async fn progress(&mut self, cmd: ClientCommand, login: X) + where + X: FnOnce(String, String) -> F, + F: Future, { let new_state = 'state: { match (std::mem::replace(self, State::Error), cmd) { diff --git a/aero-sasl/src/lib.rs b/aero-sasl/src/lib.rs index 230862a..fdaa8a7 100644 --- a/aero-sasl/src/lib.rs +++ b/aero-sasl/src/lib.rs @@ -1,3 +1,6 @@ +pub mod decode; +pub mod encode; +pub mod flow; /// Seek compatibility with the Dovecot Authentication Protocol /// /// ## Trace @@ -38,6 +41,3 @@ /// https://doc.dovecot.org/configuration_manual/howto/simple_virtual_install/#simple-virtual-install-smtp-auth /// https://doc.dovecot.org/configuration_manual/howto/postfix_and_dovecot_sasl/#howto-postfix-and-dovecot-sasl pub mod types; -pub mod encode; -pub mod decode; -pub mod flow; diff --git a/aero-sasl/src/types.rs b/aero-sasl/src/types.rs index d71405e..2686677 100644 --- a/aero-sasl/src/types.rs +++ b/aero-sasl/src/types.rs @@ -159,5 +159,3 @@ pub enum ServerCommand { extra_parameters: Vec>, }, } - - diff --git a/aero-user/src/config.rs b/aero-user/src/config.rs index 44b1239..cea4520 100644 --- a/aero-user/src/config.rs +++ b/aero-user/src/config.rs @@ -11,7 +11,6 @@ pub struct CompanionConfig { pub pid: Option, pub imap: ImapUnsecureConfig, // @FIXME Add DAV - #[serde(flatten)] pub users: LoginStaticConfig, } diff --git a/aero-user/src/login/ldap_provider.rs b/aero-user/src/login/ldap_provider.rs index ca5a356..22b301e 100644 --- a/aero-user/src/login/ldap_provider.rs +++ b/aero-user/src/login/ldap_provider.rs @@ -2,9 +2,9 @@ use async_trait::async_trait; use ldap3::{LdapConnAsync, Scope, SearchEntry}; use log::debug; +use super::*; use crate::config::*; use crate::storage; -use super::*; pub struct LdapLoginProvider { ldap_server: String, diff --git a/aero-user/src/storage/in_memory.rs b/aero-user/src/storage/in_memory.rs index 9ef2721..5c8eb26 100644 --- a/aero-user/src/storage/in_memory.rs +++ b/aero-user/src/storage/in_memory.rs @@ -2,7 +2,7 @@ use std::collections::BTreeMap; use std::ops::Bound::{self, Excluded, Included, Unbounded}; use std::sync::RwLock; -use sodiumoxide::{hex, crypto::hash}; +use sodiumoxide::{crypto::hash, hex}; use tokio::sync::Notify; use crate::storage::*; diff --git a/aerogramme/src/main.rs b/aerogramme/src/main.rs index 624e8e2..39b5075 100644 --- a/aerogramme/src/main.rs +++ b/aerogramme/src/main.rs @@ -7,9 +7,9 @@ use anyhow::{bail, Context, Result}; use clap::{Parser, Subcommand}; use nix::{sys::signal, unistd::Pid}; +use crate::server::Server; use aero_user::config::*; use aero_user::login::{static_provider::*, *}; -use crate::server::Server; #[derive(Parser, Debug)] #[clap(author, version, about, long_about = None)] diff --git a/aerogramme/src/server.rs b/aerogramme/src/server.rs index e57cd72..3b3f6eb 100644 --- a/aerogramme/src/server.rs +++ b/aerogramme/src/server.rs @@ -7,13 +7,13 @@ use futures::try_join; use log::*; use tokio::sync::watch; -use aero_user::config::*; -use aero_user::login::ArcLoginProvider; -use aero_user::login::{demo_provider::*, ldap_provider::*, static_provider::*}; -use aero_proto::sasl as auth; use aero_proto::dav; use aero_proto::imap; use aero_proto::lmtp::*; +use aero_proto::sasl as auth; +use aero_user::config::*; +use aero_user::login::ArcLoginProvider; +use aero_user::login::{demo_provider::*, ldap_provider::*, static_provider::*}; pub struct Server { lmtp_server: Option>, -- cgit v1.2.3 From b2c75242eb5788242787428883471da3ec54ea7e Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 16 May 2024 21:47:21 +0200 Subject: WIP filter --- aero-proto/src/dav/controller.rs | 56 ++++++++++++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 0bf7a7d..541beb6 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -137,7 +137,7 @@ impl Controller { } cal::Report::Query(q) => { calprop = q.selector; - ok_node = apply_filter(&self.user, self.node.children(&self.user).await, q.filter) + ok_node = apply_filter(self.node.children(&self.user).await, &q.filter) .try_collect() .await?; } @@ -327,13 +327,11 @@ impl<'a> Path<'a> { //@FIXME move somewhere else //@FIXME naive implementation, must be refactored later use futures::stream::Stream; -use icalendar; -fn apply_filter( - user: &ArcUser, +fn apply_filter<'a>( nodes: Vec>, - filter: cal::Filter, -) -> impl Stream, std::io::Error>> { - futures::stream::iter(nodes).filter_map(|single_node| async move { + filter: &'a cal::Filter, +) -> impl Stream, std::io::Error>> + 'a { + futures::stream::iter(nodes).filter_map(move |single_node| async move { // Get ICS let chunks: Vec<_> = match single_node.content().try_collect().await { Ok(v) => v, @@ -346,11 +344,53 @@ fn apply_filter( }); // Parse ICS - let ics = icalendar::parser::read_calendar(&raw_ics).unwrap(); + let ics = match icalendar::parser::read_calendar(&raw_ics) { + Ok(v) => v, + Err(e) => { + tracing::warn!(err=?e, "Unable to parse ICS in calendar-query"); + return Some(Err(std::io::Error::from(std::io::ErrorKind::InvalidData))); + } + }; // Do checks + //@FIXME VCalendar root component is hardcoded + let root_filter = &filter.0; + if root_filter.name != cal::Component::VCalendar { + return None; + } + + let matcher = match &root_filter.additional_rules { + None => return Some(Ok(single_node)), + Some(cal::CompFilterRules::IsNotDefined) => return None, + Some(cal::CompFilterRules::Matches(m)) => m, + }; + + let evts = ics + .components + .iter() + .all(|single_comp| is_component_match(single_comp, matcher)); // Object has been kept Some(Ok(single_node)) }) } + +fn is_component_match( + component: &icalendar::parser::Component, + matcher: &cal::CompFilterMatch, +) -> bool { + if let Some(time_range) = &matcher.time_range { + todo!(); // check DTSTART and DTEND + } + + if !matcher.prop_filter.iter().all(|single_prop_filter| { + true // check prop filter against component + }) { + return false; + } + + //component.components.iter().any + matcher.comp_filter.iter().all(|single_comp_filter| { + true //@TODO find component, find + }) +} -- cgit v1.2.3 From 51ec1d7ff9cc678a0e08b1e221af09fc7d8f4296 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 21 May 2024 18:09:21 +0200 Subject: calendar-query filter propertiers --- aero-dav/src/caldecoder.rs | 7 +-- aero-dav/src/calencoder.rs | 3 - aero-dav/src/caltypes.rs | 1 - aero-proto/src/dav/controller.rs | 120 +++++++++++++++++++++++++++++++++++++-- 4 files changed, 117 insertions(+), 14 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 16c9c6c..b4391a4 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -805,7 +805,6 @@ impl QRead for PropFilter { impl QRead for PropFilterRules { async fn qread(xml: &mut Reader) -> Result { - let mut time_range = None; let mut time_or_text = None; let mut param_filter = Vec::new(); @@ -817,7 +816,6 @@ impl QRead for PropFilterRules { return Ok(Self::IsNotDefined); } - xml.maybe_read(&mut time_range, &mut dirty).await?; xml.maybe_read(&mut time_or_text, &mut dirty).await?; xml.maybe_push(&mut param_filter, &mut dirty).await?; @@ -829,10 +827,9 @@ impl QRead for PropFilterRules { } } - match (&time_range, &time_or_text, ¶m_filter[..]) { - (None, None, []) => Err(ParsingError::Recoverable), + match (&time_or_text, ¶m_filter[..]) { + (None, []) => Err(ParsingError::Recoverable), _ => Ok(PropFilterRules::Match(PropFilterMatch { - time_range, time_or_text, param_filter, })), diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 06cafd4..4467f7c 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -638,9 +638,6 @@ impl QWrite for PropFilterRules { impl QWrite for PropFilterMatch { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { - if let Some(time_range) = &self.time_range { - time_range.qwrite(xml).await?; - } if let Some(time_or_text) = &self.time_or_text { time_or_text.qwrite(xml).await?; } diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 7c85642..717086b 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -1242,7 +1242,6 @@ pub enum PropFilterRules { } #[derive(Debug, PartialEq, Clone)] pub struct PropFilterMatch { - pub time_range: Option, pub time_or_text: Option, pub param_filter: Vec, } diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 541beb6..a1c2660 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -365,32 +365,142 @@ fn apply_filter<'a>( Some(cal::CompFilterRules::Matches(m)) => m, }; - let evts = ics + let is_keep = ics .components .iter() - .all(|single_comp| is_component_match(single_comp, matcher)); + .any(|single_comp| is_component_match(single_comp, matcher)); // Object has been kept Some(Ok(single_node)) }) } +fn component_date( + component: &icalendar::parser::Component, + prop: &str, +) -> Option> { + component + .find_prop(prop) + .map(|p| p.val.as_str()) + .map(|raw_dtstart| { + NaiveDateTime::parse_from_str(raw_dtstart, cal::ICAL_DATETIME_FMT) + .ok() + .map(|v| v.and_utc()) + }) + .flatten() +} + +use chrono::NaiveDateTime; fn is_component_match( component: &icalendar::parser::Component, matcher: &cal::CompFilterMatch, ) -> bool { if let Some(time_range) = &matcher.time_range { - todo!(); // check DTSTART and DTEND + let (dtstart, dtend) = match ( + component_date(component, "DTSTART"), + component_date(component, "DTEND"), + ) { + (Some(start), None) => (start, start), + (None, Some(end)) => (end, end), + (Some(start), Some(end)) => (start, end), + _ => return false, + }; + + let is_in_range = match time_range { + cal::TimeRange::OnlyStart(after) => &dtend >= after, + cal::TimeRange::OnlyEnd(before) => &dtstart <= before, + cal::TimeRange::FullRange(after, before) => &dtend >= after && &dtstart <= before, + }; + + if !is_in_range { + return false; + } } if !matcher.prop_filter.iter().all(|single_prop_filter| { - true // check prop filter against component + match ( + &single_prop_filter.additional_rules, + component.find_prop(single_prop_filter.name.0.as_str()), + ) { + (None, Some(_)) | (Some(cal::PropFilterRules::IsNotDefined), None) => true, + (None, None) + | (Some(cal::PropFilterRules::IsNotDefined), Some(_)) + | (Some(cal::PropFilterRules::Match(_)), None) => false, + (Some(cal::PropFilterRules::Match(pattern)), Some(prop)) => { + // check value + match &pattern.time_or_text { + Some(cal::TimeOrText::Time(time_range)) => { + // try parse entry as date + let parsed_date = + match component_date(component, single_prop_filter.name.0.as_str()) { + Some(v) => v, + None => return false, + }; + + // see if entry is in range + let is_in_range = match time_range { + cal::TimeRange::OnlyStart(after) => &parsed_date >= after, + cal::TimeRange::OnlyEnd(before) => &parsed_date <= before, + cal::TimeRange::FullRange(after, before) => { + &parsed_date >= after && &parsed_date <= before + } + }; + if !is_in_range { + return false; + } + + // if you are here, this subcondition is valid + } + Some(cal::TimeOrText::Text(txt_match)) => { + //@FIXME ignoring collation + let is_match = match txt_match.negate_condition { + None | Some(false) => { + prop.val.as_str().contains(txt_match.text.as_str()) + } + Some(true) => !prop.val.as_str().contains(txt_match.text.as_str()), + }; + if !is_match { + return false; + } + } + None => (), // if not filter on value is set, continue + }; + + // check parameters + pattern.param_filter.iter().all(|single_param_filter| { + let maybe_param = prop.params.iter().find(|candidate| { + candidate.key.as_str() == single_param_filter.name.as_str() + }); + + match (maybe_param, &single_param_filter.additional_rules) { + (Some(_), None) => true, + (None, None) => false, + (Some(_), Some(cal::ParamFilterMatch::IsNotDefined)) => false, + (None, Some(cal::ParamFilterMatch::IsNotDefined)) => true, + (None, Some(cal::ParamFilterMatch::Match(_))) => false, + (Some(param), Some(cal::ParamFilterMatch::Match(txt_match))) => { + let param_val = match ¶m.val { + Some(v) => v, + None => return false, + }; + + match txt_match.negate_condition { + None | Some(false) => { + param_val.as_str().contains(txt_match.text.as_str()) + } + Some(true) => !param_val.as_str().contains(txt_match.text.as_str()), + } + } + } + }) + } + } }) { return false; } //component.components.iter().any - matcher.comp_filter.iter().all(|single_comp_filter| { + matcher.comp_filter.iter().any(|single_comp_filter| { true //@TODO find component, find }) } -- cgit v1.2.3 From 194e34d4e1b28957d8310ea1205989fadb1b44c7 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 10:05:52 +0200 Subject: first full filter implementation --- aero-proto/src/dav/controller.rs | 55 +++++++++++++++++++++++++++------------- 1 file changed, 38 insertions(+), 17 deletions(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index a1c2660..e5a1cff 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -353,25 +353,31 @@ fn apply_filter<'a>( }; // Do checks - //@FIXME VCalendar root component is hardcoded let root_filter = &filter.0; - if root_filter.name != cal::Component::VCalendar { - return None; - } - - let matcher = match &root_filter.additional_rules { - None => return Some(Ok(single_node)), - Some(cal::CompFilterRules::IsNotDefined) => return None, - Some(cal::CompFilterRules::Matches(m)) => m, - }; - let is_keep = ics + // Find the component in the filter + let maybe_comp = ics .components .iter() - .any(|single_comp| is_component_match(single_comp, matcher)); + .find(|candidate| candidate.name.as_str() == root_filter.name.as_str()); + + // Apply additional rules + let is_keep = match (maybe_comp, &root_filter.additional_rules) { + (Some(_), None) => true, + (None, Some(cal::CompFilterRules::IsNotDefined)) => true, + (None, None) => false, + (None, Some(cal::CompFilterRules::Matches(_))) => false, + (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, + (Some(inner_comp), Some(cal::CompFilterRules::Matches(filter))) => { + is_component_match(inner_comp, filter) + } + }; - // Object has been kept - Some(Ok(single_node)) + // Adjust return value according to filter + match is_keep { + true => Some(Ok(single_node)), + _ => None, + } }) } @@ -499,8 +505,23 @@ fn is_component_match( return false; } - //component.components.iter().any - matcher.comp_filter.iter().any(|single_comp_filter| { - true //@TODO find component, find + matcher.comp_filter.iter().all(|single_comp_filter| { + // Find the component + let maybe_comp = component + .components + .iter() + .find(|candidate| candidate.name.as_str() == single_comp_filter.name.as_str()); + + // Filter according to rules + match (maybe_comp, &single_comp_filter.additional_rules) { + (Some(_), None) => true, + (None, Some(cal::CompFilterRules::IsNotDefined)) => true, + (None, None) => false, + (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, + (None, Some(cal::CompFilterRules::Matches(_))) => false, + (Some(inner_comp), Some(cal::CompFilterRules::Matches(comp_match))) => { + is_component_match(inner_comp, comp_match) + } + } }) } -- cgit v1.2.3 From 6ca7082197aa60288c3295387bfdf47d8adbed2d Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 15:02:53 +0200 Subject: fix: parsing components & times --- aero-dav/src/caldecoder.rs | 18 ++-- aero-dav/src/calencoder.rs | 33 +++---- aero-dav/src/caltypes.rs | 3 +- aero-proto/src/dav/controller.rs | 180 +++++++++++++++++++++------------------ 4 files changed, 127 insertions(+), 107 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index b4391a4..1de4552 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -287,7 +287,7 @@ impl QRead for Property { .is_some() { let dtstr = xml.tag_string().await?; - let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); xml.close().await?; return Ok(Property::MaxDateTime(dt)); } @@ -653,8 +653,8 @@ impl QRead for Expand { _ => return Err(ParsingError::MissingAttribute), }; - let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); - let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let start = NaiveDateTime::parse_from_str(rstart.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); if start > end { return Err(ParsingError::InvalidValue); } @@ -672,8 +672,8 @@ impl QRead for LimitRecurrenceSet { _ => return Err(ParsingError::MissingAttribute), }; - let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); - let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let start = NaiveDateTime::parse_from_str(rstart.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); if start > end { return Err(ParsingError::InvalidValue); } @@ -691,8 +691,8 @@ impl QRead for LimitFreebusySet { _ => return Err(ParsingError::MissingAttribute), }; - let start = NaiveDateTime::parse_from_str(rstart.as_str(), ICAL_DATETIME_FMT)?.and_utc(); - let end = NaiveDateTime::parse_from_str(rend.as_str(), ICAL_DATETIME_FMT)?.and_utc(); + let start = NaiveDateTime::parse_from_str(rstart.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); if start > end { return Err(ParsingError::InvalidValue); } @@ -918,13 +918,13 @@ impl QRead for TimeRange { let start = match xml.prev_attr("start") { Some(r) => { - Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()) + Some(NaiveDateTime::parse_from_str(r.as_str(), CALDAV_DATETIME_FMT)?.and_utc()) } _ => None, }; let end = match xml.prev_attr("end") { Some(r) => { - Some(NaiveDateTime::parse_from_str(r.as_str(), ICAL_DATETIME_FMT)?.and_utc()) + Some(NaiveDateTime::parse_from_str(r.as_str(), CALDAV_DATETIME_FMT)?.and_utc()) } _ => None, }; diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 4467f7c..f145628 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -178,7 +178,7 @@ impl QWrite for Property { let start = xml.create_cal_element("min-date-time"); let end = start.to_end(); - let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); + let dtstr = format!("{}", dt.format(CALDAV_DATETIME_FMT)); xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q .write_event_async(Event::Text(BytesText::new(dtstr.as_str()))) @@ -189,7 +189,7 @@ impl QWrite for Property { let start = xml.create_cal_element("max-date-time"); let end = start.to_end(); - let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); + let dtstr = format!("{}", dt.format(CALDAV_DATETIME_FMT)); xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q .write_event_async(Event::Text(BytesText::new(dtstr.as_str()))) @@ -493,11 +493,11 @@ impl QWrite for Expand { let mut empty = xml.create_cal_element("expand"); empty.push_attribute(( "start", - format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", self.0.format(CALDAV_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", self.1.format(CALDAV_DATETIME_FMT)).as_str(), )); xml.q.write_event_async(Event::Empty(empty)).await } @@ -508,11 +508,11 @@ impl QWrite for LimitRecurrenceSet { let mut empty = xml.create_cal_element("limit-recurrence-set"); empty.push_attribute(( "start", - format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", self.0.format(CALDAV_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", self.1.format(CALDAV_DATETIME_FMT)).as_str(), )); xml.q.write_event_async(Event::Empty(empty)).await } @@ -523,11 +523,11 @@ impl QWrite for LimitFreebusySet { let mut empty = xml.create_cal_element("limit-freebusy-set"); empty.push_attribute(( "start", - format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", self.0.format(CALDAV_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", self.1.format(CALDAV_DATETIME_FMT)).as_str(), )); xml.q.write_event_async(Event::Empty(empty)).await } @@ -737,18 +737,21 @@ impl QWrite for TimeRange { match self { Self::OnlyStart(start) => empty.push_attribute(( "start", - format!("{}", start.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", start.format(CALDAV_DATETIME_FMT)).as_str(), + )), + Self::OnlyEnd(end) => empty.push_attribute(( + "end", + format!("{}", end.format(CALDAV_DATETIME_FMT)).as_str(), )), - Self::OnlyEnd(end) => { - empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())) - } Self::FullRange(start, end) => { empty.push_attribute(( "start", - format!("{}", start.format(ICAL_DATETIME_FMT)).as_str(), + format!("{}", start.format(CALDAV_DATETIME_FMT)).as_str(), + )); + empty.push_attribute(( + "end", + format!("{}", end.format(CALDAV_DATETIME_FMT)).as_str(), )); - empty - .push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); } } xml.q.write_event_async(Event::Empty(empty)).await diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 717086b..924b651 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -3,7 +3,8 @@ use super::types as dav; use chrono::{DateTime, Utc}; -pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; +pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%S"; +pub const CALDAV_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; //@FIXME ACL (rfc3744) is missing, required //@FIXME Versioning (rfc3253) is missing, required diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index e5a1cff..306b035 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -353,81 +353,49 @@ fn apply_filter<'a>( }; // Do checks + // @FIXME: icalendar does not consider VCALENDAR as a component + // but WebDAV does... + // Build a fake VCALENDAR component for icalendar compatibility, it's a hack let root_filter = &filter.0; - - // Find the component in the filter - let maybe_comp = ics - .components - .iter() - .find(|candidate| candidate.name.as_str() == root_filter.name.as_str()); - - // Apply additional rules - let is_keep = match (maybe_comp, &root_filter.additional_rules) { - (Some(_), None) => true, - (None, Some(cal::CompFilterRules::IsNotDefined)) => true, - (None, None) => false, - (None, Some(cal::CompFilterRules::Matches(_))) => false, - (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, - (Some(inner_comp), Some(cal::CompFilterRules::Matches(filter))) => { - is_component_match(inner_comp, filter) - } + let fake_vcal_component = icalendar::parser::Component { + name: cal::Component::VCalendar.as_str().into(), + properties: ics.properties, + components: ics.components, }; + tracing::debug!(filter=?root_filter, "calendar-query filter"); // Adjust return value according to filter - match is_keep { + match is_component_match(&[fake_vcal_component], root_filter) { true => Some(Ok(single_node)), _ => None, } }) } -fn component_date( - component: &icalendar::parser::Component, - prop: &str, +fn prop_date( + properties: &[icalendar::parser::Property], + name: &str, ) -> Option> { - component - .find_prop(prop) + properties + .iter() + .find(|candidate| candidate.name.as_str() == name) .map(|p| p.val.as_str()) - .map(|raw_dtstart| { - NaiveDateTime::parse_from_str(raw_dtstart, cal::ICAL_DATETIME_FMT) + .map(|raw_time| { + tracing::trace!(raw_time = raw_time, "VEVENT raw time"); + NaiveDateTime::parse_from_str(raw_time, cal::ICAL_DATETIME_FMT) .ok() .map(|v| v.and_utc()) }) .flatten() } -use chrono::NaiveDateTime; -fn is_component_match( - component: &icalendar::parser::Component, - matcher: &cal::CompFilterMatch, -) -> bool { - if let Some(time_range) = &matcher.time_range { - let (dtstart, dtend) = match ( - component_date(component, "DTSTART"), - component_date(component, "DTEND"), - ) { - (Some(start), None) => (start, start), - (None, Some(end)) => (end, end), - (Some(start), Some(end)) => (start, end), - _ => return false, - }; - - let is_in_range = match time_range { - cal::TimeRange::OnlyStart(after) => &dtend >= after, - cal::TimeRange::OnlyEnd(before) => &dtstart <= before, - cal::TimeRange::FullRange(after, before) => &dtend >= after && &dtstart <= before, - }; - - if !is_in_range { - return false; - } - } - - if !matcher.prop_filter.iter().all(|single_prop_filter| { - match ( - &single_prop_filter.additional_rules, - component.find_prop(single_prop_filter.name.0.as_str()), - ) { +fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::PropFilter]) -> bool { + filters.iter().all(|single_filter| { + // Find the property + let single_prop = props + .iter() + .find(|candidate| candidate.name.as_str() == single_filter.name.0.as_str()); + match (&single_filter.additional_rules, single_prop) { (None, Some(_)) | (Some(cal::PropFilterRules::IsNotDefined), None) => true, (None, None) | (Some(cal::PropFilterRules::IsNotDefined), Some(_)) @@ -436,12 +404,17 @@ fn is_component_match( // check value match &pattern.time_or_text { Some(cal::TimeOrText::Time(time_range)) => { - // try parse entry as date - let parsed_date = - match component_date(component, single_prop_filter.name.0.as_str()) { - Some(v) => v, - None => return false, - }; + let maybe_parsed_date = NaiveDateTime::parse_from_str( + prop.val.as_str(), + cal::ICAL_DATETIME_FMT, + ) + .ok() + .map(|v| v.and_utc()); + + let parsed_date = match maybe_parsed_date { + None => return false, + Some(v) => v, + }; // see if entry is in range let is_in_range = match time_range { @@ -501,27 +474,70 @@ fn is_component_match( }) } } - }) { - return false; + }) +} + +fn is_in_time_range( + properties: &[icalendar::parser::Property], + time_range: &cal::TimeRange, +) -> bool { + //@FIXME too naive: https://datatracker.ietf.org/doc/html/rfc4791#section-9.9 + + let (dtstart, dtend) = match ( + prop_date(properties, "DTSTART"), + prop_date(properties, "DTEND"), + ) { + (Some(start), None) => (start, start), + (None, Some(end)) => (end, end), + (Some(start), Some(end)) => (start, end), + _ => { + tracing::warn!("unable to extract DTSTART and DTEND from VEVENT"); + return false; + } + }; + + tracing::trace!(event_start=?dtstart, event_end=?dtend, filter=?time_range, "apply filter on VEVENT"); + match time_range { + cal::TimeRange::OnlyStart(after) => &dtend >= after, + cal::TimeRange::OnlyEnd(before) => &dtstart <= before, + cal::TimeRange::FullRange(after, before) => &dtend >= after && &dtstart <= before, } +} - matcher.comp_filter.iter().all(|single_comp_filter| { - // Find the component - let maybe_comp = component - .components - .iter() - .find(|candidate| candidate.name.as_str() == single_comp_filter.name.as_str()); - - // Filter according to rules - match (maybe_comp, &single_comp_filter.additional_rules) { - (Some(_), None) => true, - (None, Some(cal::CompFilterRules::IsNotDefined)) => true, - (None, None) => false, - (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, - (None, Some(cal::CompFilterRules::Matches(_))) => false, - (Some(inner_comp), Some(cal::CompFilterRules::Matches(comp_match))) => { - is_component_match(inner_comp, comp_match) +use chrono::NaiveDateTime; +fn is_component_match( + components: &[icalendar::parser::Component], + filter: &cal::CompFilter, +) -> bool { + // Find the component among the list + let maybe_comp = components + .iter() + .find(|candidate| candidate.name.as_str() == filter.name.as_str()); + + // Filter according to rules + match (maybe_comp, &filter.additional_rules) { + (Some(_), None) => true, + (None, Some(cal::CompFilterRules::IsNotDefined)) => true, + (None, None) => false, + (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, + (None, Some(cal::CompFilterRules::Matches(_))) => false, + (Some(component), Some(cal::CompFilterRules::Matches(matcher))) => { + // check time range + if let Some(time_range) = &matcher.time_range { + if !is_in_time_range(component.properties.as_ref(), time_range) { + return false; + } + } + + // check properties + if !is_properties_match(component.properties.as_ref(), matcher.prop_filter.as_ref()) { + return false; } + + // check inner components + matcher.comp_filter.iter().all(|inner_filter| { + is_component_match(component.components.as_ref(), &inner_filter) + }) } - }) + } } -- cgit v1.2.3 From 742beeeafbe46c7677d4af36006f0febd0edb6cd Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 15:28:14 +0200 Subject: fix unit tests --- aero-dav/src/caldecoder.rs | 1 - aero-proto/src/imap/mailbox_view.rs | 14 +++++++------- aero-proto/src/imap/mime_view.rs | 2 +- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 1de4552..02991c2 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -1347,7 +1347,6 @@ END:VCALENDAR]]> name: ComponentProperty("STATUS".into()), additional_rules: Some(PropFilterRules::Match( PropFilterMatch { - time_range: None, param_filter: vec![], time_or_text: Some(TimeOrText::Text(TextMatch { collation: None, diff --git a/aero-proto/src/imap/mailbox_view.rs b/aero-proto/src/imap/mailbox_view.rs index 0ef33d6..de81556 100644 --- a/aero-proto/src/imap/mailbox_view.rs +++ b/aero-proto/src/imap/mailbox_view.rs @@ -638,13 +638,13 @@ mod tests { use imap_codec::ResponseCodec; use std::fs; - use crate::cryptoblob; + use aero_user::cryptoblob; + use aero_collections::mail::mailbox::MailMeta; + use aero_collections::mail::query::QueryResult; + use aero_collections::unique_ident; + use crate::imap::index::MailIndex; - use crate::imap::mail_view::MailView; use crate::imap::mime_view; - use crate::mail::mailbox::MailMeta; - use crate::mail::query::QueryResult; - use crate::mail::unique_ident; #[test] fn mailview_body_ext() -> Result<()> { @@ -745,8 +745,8 @@ mod tests { for pref in prefixes.iter() { println!("{}", pref); - let txt = fs::read(format!("{}.eml", pref))?; - let oracle = fs::read(format!("{}.dovecot.body", pref))?; + let txt = fs::read(format!("../{}.eml", pref))?; + let oracle = fs::read(format!("../{}.dovecot.body", pref))?; let message = eml_codec::parse_message(&txt).unwrap().1; let test_repr = Response::Data(Data::Fetch { diff --git a/aero-proto/src/imap/mime_view.rs b/aero-proto/src/imap/mime_view.rs index 720f20a..fd0f4b0 100644 --- a/aero-proto/src/imap/mime_view.rs +++ b/aero-proto/src/imap/mime_view.rs @@ -33,7 +33,7 @@ pub enum BodySection<'a> { /// /// Example of message sections: /// -/// ``` +/// ```text /// HEADER ([RFC-2822] header of the message) /// TEXT ([RFC-2822] text body of the message) MULTIPART/MIXED /// 1 TEXT/PLAIN -- cgit v1.2.3 From 649a7b8b1be97a5d43f48ceff0d3f396fadabbbc Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 19:36:27 +0200 Subject: webdav propfind integration tests --- Cargo.lock | 228 ++++++++++++++++++++++++++++++++++++ Cargo.toml | 1 + aero-proto/src/imap/mailbox_view.rs | 2 +- aerogramme/Cargo.toml | 5 + aerogramme/tests/behavior.rs | 125 +++++++++++++++++--- aerogramme/tests/common/mod.rs | 26 +++- flake.nix | 2 + 7 files changed, 370 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c6602ab..9f8ccb6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -152,6 +152,7 @@ dependencies = [ name = "aerogramme" version = "0.3.0" dependencies = [ + "aero-dav", "aero-proto", "aero-user", "anyhow", @@ -160,6 +161,8 @@ dependencies = [ "futures", "log", "nix", + "quick-xml", + "reqwest", "rpassword", "tokio", "tracing", @@ -952,6 +955,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "base64-simd" version = "0.8.0" @@ -1515,6 +1524,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1933,6 +1957,22 @@ dependencies = [ "tower-service", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.2.0", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.3" @@ -2103,6 +2143,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + [[package]] name = "iso8601" version = "0.6.1" @@ -2297,6 +2343,12 @@ version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -2324,6 +2376,24 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "nix" version = "0.27.1" @@ -2435,12 +2505,50 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +[[package]] +name = "openssl" +version = "0.10.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "openssl-sys" +version = "0.9.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "os_str_bytes" version = "6.6.1" @@ -2724,6 +2832,49 @@ version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" +[[package]] +name = "reqwest" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.4.2", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.2.0", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite 0.2.13", + "rustls-pemfile 2.1.1", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + [[package]] name = "rfc6979" version = "0.3.1" @@ -3075,6 +3226,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + [[package]] name = "sha1" version = "0.10.6" @@ -3299,6 +3462,12 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "synstructure" version = "0.12.6" @@ -3311,12 +3480,45 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand 2.0.1", + "rustix 0.38.31", + "windows-sys 0.52.0", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -3435,6 +3637,16 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.23.4" @@ -3678,6 +3890,12 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version_check" version = "0.9.4" @@ -3979,6 +4197,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + [[package]] name = "wyz" version = "0.2.0" diff --git a/Cargo.toml b/Cargo.toml index 68b1eae..0ee7889 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,6 +66,7 @@ http-body-util = "0.1.1" hyper = "1.2" hyper-rustls = { version = "0.26", features = ["http2"] } hyper-util = { version = "0.1", features = ["full"] } +reqwest = { version = "0.12", features = [ "blocking" ]} # for testing purposes only # serialization, compression & parsing serde = "1.0.137" diff --git a/aero-proto/src/imap/mailbox_view.rs b/aero-proto/src/imap/mailbox_view.rs index de81556..0b808aa 100644 --- a/aero-proto/src/imap/mailbox_view.rs +++ b/aero-proto/src/imap/mailbox_view.rs @@ -638,10 +638,10 @@ mod tests { use imap_codec::ResponseCodec; use std::fs; - use aero_user::cryptoblob; use aero_collections::mail::mailbox::MailMeta; use aero_collections::mail::query::QueryResult; use aero_collections::unique_ident; + use aero_user::cryptoblob; use crate::imap::index::MailIndex; use crate::imap::mime_view; diff --git a/aerogramme/Cargo.toml b/aerogramme/Cargo.toml index ab62e44..77f3584 100644 --- a/aerogramme/Cargo.toml +++ b/aerogramme/Cargo.toml @@ -21,6 +21,11 @@ tracing.workspace = true tracing-subscriber.workspace = true rpassword.workspace = true +[dev-dependencies] +reqwest.workspace = true +aero-dav.workspace = true +quick-xml.workspace = true + [[test]] name = "behavior" path = "tests/behavior.rs" diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 13baf0e..1786500 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -5,21 +5,25 @@ use crate::common::constants::*; use crate::common::fragments::*; fn main() { - rfc3501_imap4rev1_base(); + // IMAP + /*rfc3501_imap4rev1_base(); rfc6851_imapext_move(); rfc4551_imapext_condstore(); rfc2177_imapext_idle(); - rfc5161_imapext_enable(); // 1 - rfc3691_imapext_unselect(); // 2 - rfc7888_imapext_literal(); // 3 - rfc4315_imapext_uidplus(); // 4 - rfc5819_imapext_liststatus(); // 5 + rfc5161_imapext_enable(); + rfc3691_imapext_unselect(); + rfc7888_imapext_literal(); + rfc4315_imapext_uidplus(); + rfc5819_imapext_liststatus();*/ + + // WebDAV + rfc4918_webdav_core(); println!("✅ SUCCESS 🌟🚀🥳🙏🥹"); } fn rfc3501_imap4rev1_base() { println!("🧪 rfc3501_imap4rev1_base"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { connect(imap_socket).context("server says hello")?; capability(imap_socket, Extension::None).context("check server capabilities")?; login(imap_socket, Account::Alice).context("login test")?; @@ -69,7 +73,7 @@ fn rfc3501_imap4rev1_base() { fn rfc3691_imapext_unselect() { println!("🧪 rfc3691_imapext_unselect"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { connect(imap_socket).context("server says hello")?; lmtp_handshake(lmtp_socket).context("handshake lmtp done")?; @@ -118,7 +122,7 @@ fn rfc3691_imapext_unselect() { fn rfc5161_imapext_enable() { println!("🧪 rfc5161_imapext_enable"); - common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket, _dav_socket| { connect(imap_socket).context("server says hello")?; login(imap_socket, Account::Alice).context("login test")?; enable(imap_socket, Enable::Utf8Accept, Some(Enable::Utf8Accept))?; @@ -132,7 +136,7 @@ fn rfc5161_imapext_enable() { fn rfc6851_imapext_move() { println!("🧪 rfc6851_imapext_move"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { connect(imap_socket).context("server says hello")?; capability(imap_socket, Extension::Move).context("check server capabilities")?; @@ -174,7 +178,7 @@ fn rfc6851_imapext_move() { fn rfc7888_imapext_literal() { println!("🧪 rfc7888_imapext_literal"); - common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, _lmtp_socket, _dav_socket| { connect(imap_socket).context("server says hello")?; capability(imap_socket, Extension::LiteralPlus).context("check server capabilities")?; @@ -187,7 +191,7 @@ fn rfc7888_imapext_literal() { fn rfc4551_imapext_condstore() { println!("🧪 rfc4551_imapext_condstore"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { // Setup the test connect(imap_socket).context("server says hello")?; @@ -245,7 +249,7 @@ fn rfc4551_imapext_condstore() { fn rfc2177_imapext_idle() { println!("🧪 rfc2177_imapext_idle"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { // Test setup, check capability connect(imap_socket).context("server says hello")?; capability(imap_socket, Extension::Idle).context("check server capabilities")?; @@ -266,7 +270,7 @@ fn rfc2177_imapext_idle() { fn rfc4315_imapext_uidplus() { println!("🧪 rfc4315_imapext_uidplus"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { // Test setup, check capability, insert 2 emails connect(imap_socket).context("server says hello")?; capability(imap_socket, Extension::UidPlus).context("check server capabilities")?; @@ -320,7 +324,7 @@ fn rfc4315_imapext_uidplus() { /// ``` fn rfc5819_imapext_liststatus() { println!("🧪 rfc5819_imapext_liststatus"); - common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket| { + common::aerogramme_provider_daemon_dev(|imap_socket, lmtp_socket, _dav_socket| { // Test setup, check capability, add 2 emails, read 1 connect(imap_socket).context("server says hello")?; capability(imap_socket, Extension::ListStatus).context("check server capabilities")?; @@ -355,3 +359,94 @@ fn rfc5819_imapext_liststatus() { }) .expect("test fully run"); } + +use aero_dav::caltypes as cal; +use aero_dav::realization::All; +use aero_dav::types as dav; + +use crate::common::dav_deserialize; + +fn rfc4918_webdav_core() { + println!("🧪 rfc4918_webdav_core"); + common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { + // --- PROPFIND --- + // empty request body (assume "allprop") + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/" => Some(x), + _ => None, + }) + .expect("propstats for root must exist"); + + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + let display_name = root_success.prop.0.iter() + .find_map(|v| match v { dav::AnyProperty::Value(dav::Property::DisplayName(x)) => Some(x), _ => None } ) + .expect("root has a display name"); + let content_type = root_success.prop.0.iter() + .find_map(|v| match v { dav::AnyProperty::Value(dav::Property::GetContentType(x)) => Some(x), _ => None } ) + .expect("root has a content type"); + let resource_type = root_success.prop.0.iter() + .find_map(|v| match v { dav::AnyProperty::Value(dav::Property::ResourceType(x)) => Some(x), _ => None } ) + .expect("root has a resource type"); + + assert_eq!(display_name, "DAV Root"); + assert_eq!(content_type, "httpd/unix-directory"); + assert_eq!(resource_type, &[ dav::ResourceType::Collection ]); + + // propname + let propfind_req = r#""#; + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/" => Some(x), + _ => None, + }) + .expect("propstats for root must exist"); + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::DisplayName))).is_some()); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::ResourceType))).is_some()); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::GetContentType))).is_some()); + + // list of properties + let propfind_req = r#""#; + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/" => Some(x), + _ => None, + }) + .expect("propstats for root must exist"); + + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + let root_not_found = root_propstats.iter().find(|p| p.status.0.as_u16() == 404).expect("some propstats for root must be not found"); + + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Value(dav::Property::DisplayName(x)) if x == "DAV Root")).is_some()); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Value(dav::Property::ResourceType(_)))).is_none()); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Value(dav::Property::GetContentType(_)))).is_none()); + assert!(root_not_found.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::GetContentLength))).is_some()); + + // depth 1 + + // check tree (calendar, Personal) + + // --- PUT --- + + // --- GET --- + + // --- DELETE --- + + + Ok(()) + }) + .expect("test fully run"); +} + +// @TODO ACL + +// @TODO CALDAV + +// @TODO SYNC diff --git a/aerogramme/tests/common/mod.rs b/aerogramme/tests/common/mod.rs index cbe0271..12f2764 100644 --- a/aerogramme/tests/common/mod.rs +++ b/aerogramme/tests/common/mod.rs @@ -8,10 +8,13 @@ use std::net::{Shutdown, TcpStream}; use std::process::Command; use std::thread; +use reqwest::blocking::Client; +use reqwest::header; + use constants::SMALL_DELAY; pub fn aerogramme_provider_daemon_dev( - mut fx: impl FnMut(&mut TcpStream, &mut TcpStream) -> Result<()>, + mut fx: impl FnMut(&mut TcpStream, &mut TcpStream, &mut Client) -> Result<()>, ) -> Result<()> { // Check port is not used (= free) before starting the test let mut max_retry = 20; @@ -53,8 +56,15 @@ pub fn aerogramme_provider_daemon_dev( let mut lmtp_socket = TcpStream::connect("[::1]:1025").context("lmtp socket must be connected")?; - println!("-- ready to test imap features --"); - let result = fx(&mut imap_socket, &mut lmtp_socket); + let mut headers = header::HeaderMap::new(); + headers.insert( + header::AUTHORIZATION, + header::HeaderValue::from_static("Basic YWxpY2U6aHVudGVyMg=="), + ); + let mut http_client = Client::builder().default_headers(headers).build()?; + + println!("-- ready to test features --"); + let result = fx(&mut imap_socket, &mut lmtp_socket, &mut http_client); println!("-- test teardown --"); imap_socket @@ -97,3 +107,13 @@ pub fn read_first_u32(inp: &str) -> Result { .collect::() .parse::()?) } + +use aero_dav::xml::{Node, Reader}; +pub fn dav_deserialize>(src: &str) -> T { + futures::executor::block_on(async { + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())) + .await + .expect("build reader"); + rdr.find().await.expect("parse XML") + }) +} diff --git a/flake.nix b/flake.nix index c6ae4ce..8dcd326 100644 --- a/flake.nix +++ b/flake.nix @@ -185,6 +185,8 @@ # Shell shell = gpkgs.mkShell { buildInputs = [ + gpkgs.openssl + gpkgs.pkg-config cargo2nix.packages.x86_64-linux.default fenix.packages.x86_64-linux.complete.toolchain #fenix.packages.x86_64-linux.rust-analyzer -- cgit v1.2.3 From e522251bec3519b4ca867e6ef5131c6fdf6cd2b1 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 19:58:20 +0200 Subject: test webdav put --- aerogramme/tests/behavior.rs | 48 +++++++++++++++++++++++- aerogramme/tests/common/constants.rs | 72 ++++++++++++++++++++++++++++++++++++ 2 files changed, 118 insertions(+), 2 deletions(-) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 1786500..c514f06 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -429,11 +429,55 @@ fn rfc4918_webdav_core() { assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Value(dav::Property::GetContentType(_)))).is_none()); assert!(root_not_found.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::GetContentLength))).is_some()); - // depth 1 + // depth 1 / -> /alice/ + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let _user_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/" => Some(x), + _ => None, + }) + .expect("user collection must exist"); + + // depth 1 /alice/ -> /alice/calendar/ + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let _user_calendars_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/calendar/" => Some(x), + _ => None, + }) + .expect("user collection must exist"); + + // depth 1 /alice/calendar/ -> /alice/calendar/Personal/ + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let _user_calendars_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/calendar/Personal/" => Some(x), + _ => None, + }) + .expect("Personal calendar must exist"); - // check tree (calendar, Personal) + // depth 1 /alice/calendar/Personal/ -> empty for now... + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + assert_eq!(multistatus.responses.len(), 1); // --- PUT --- + let resp = http.request(reqwest::Method::from_bytes(b"PUT")?, "http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-None-Match", "*").body(ICAL_RFC2).send()?; + assert_eq!(resp.status(), 201); + + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + assert_eq!(multistatus.responses.len(), 2); + + let resp = http.request(reqwest::Method::from_bytes(b"PUT")?, "http://localhost:8087/alice/calendar/Personal/rfc3.ics").header("If-None-Match", "*").body(ICAL_RFC3).send()?; + assert_eq!(resp.status(), 201); + + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + assert_eq!(multistatus.responses.len(), 3); // --- GET --- diff --git a/aerogramme/tests/common/constants.rs b/aerogramme/tests/common/constants.rs index c11a04d..6b17c4f 100644 --- a/aerogramme/tests/common/constants.rs +++ b/aerogramme/tests/common/constants.rs @@ -52,3 +52,75 @@ Subject: Test\r \r Hello world!\r "; + +pub static ICAL_RFC1: &[u8] = b"BEGIN:VCALENDAR +PRODID:-//Example Corp.//CalDAV Client//EN +VERSION:2.0 +BEGIN:VEVENT +UID:1@example.com +SUMMARY:One-off Meeting +DTSTAMP:20041210T183904Z +DTSTART:20041207T120000Z +DTEND:20041207T130000Z +END:VEVENT +BEGIN:VEVENT +UID:2@example.com +SUMMARY:Weekly Meeting +DTSTAMP:20041210T183838Z +DTSTART:20041206T120000Z +DTEND:20041206T130000Z +RRULE:FREQ=WEEKLY +END:VEVENT +BEGIN:VEVENT +UID:2@example.com +SUMMARY:Weekly Meeting +RECURRENCE-ID:20041213T120000Z +DTSTAMP:20041210T183838Z +DTSTART:20041213T130000Z +DTEND:20041213T140000Z +END:VEVENT +END:VCALENDAR +"; + +pub static ICAL_RFC2: &[u8] = b"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VEVENT +UID:20010712T182145Z-123401@example.com +DTSTAMP:20060712T182145Z +DTSTART:20060714T170000Z +DTEND:20060715T040000Z +SUMMARY:Bastille Day Party +END:VEVENT +END:VCALENDAR +"; + +pub static ICAL_RFC3: &[u8] = b"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTIMEZONE +LAST-MODIFIED:20040110T032845Z +TZID:US/Eastern +BEGIN:DAYLIGHT +DTSTART:20000404T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZNAME:EDT +TZOFFSETFROM:-0500 +TZOFFSETTO:-0400 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:20001026T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:EST +TZOFFSETFROM:-0400 +TZOFFSETTO:-0500 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +DTSTART;TZID=US/Eastern:20060104T100000 +DURATION:PT1H +SUMMARY:Event #3 +UID:DC6C50A017428C5216A2F1CD@example.com +END:VEVENT +END:VCALENDAR +"; -- cgit v1.2.3 From 2ca485fb87125b543307748e73b04bcd68f2d9ad Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 23:22:03 +0200 Subject: test webdav core get, delete, update --- aero-proto/src/dav/controller.rs | 10 +++++++++- aerogramme/tests/behavior.rs | 34 ++++++++++++++++++++++++++++++++-- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 306b035..0a47cf4 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -223,7 +223,15 @@ impl Controller { }) .boxed(); - let etag = self.node.put(put_policy, stream_of_bytes).await?; + let etag = match self.node.put(put_policy, stream_of_bytes).await { + Ok(etag) => etag, + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => { + tracing::warn!("put pre-condition failed"); + let response = Response::builder().status(412).body(text_body(""))?; + return Ok(response); + } + Err(e) => Err(e)?, + }; let response = Response::builder() .status(201) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index c514f06..18095ef 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -465,24 +465,54 @@ fn rfc4918_webdav_core() { assert_eq!(multistatus.responses.len(), 1); // --- PUT --- - let resp = http.request(reqwest::Method::from_bytes(b"PUT")?, "http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-None-Match", "*").body(ICAL_RFC2).send()?; + // first object + let resp = http.put("http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-None-Match", "*").body(ICAL_RFC2).send()?; + let obj1_etag = resp.headers().get("etag").expect("etag must be set"); assert_eq!(resp.status(), 201); let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; let multistatus = dav_deserialize::>(&body); assert_eq!(multistatus.responses.len(), 2); - let resp = http.request(reqwest::Method::from_bytes(b"PUT")?, "http://localhost:8087/alice/calendar/Personal/rfc3.ics").header("If-None-Match", "*").body(ICAL_RFC3).send()?; + // second object + let resp = http.put("http://localhost:8087/alice/calendar/Personal/rfc3.ics").header("If-None-Match", "*").body(ICAL_RFC3).send()?; assert_eq!(resp.status(), 201); let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; let multistatus = dav_deserialize::>(&body); assert_eq!(multistatus.responses.len(), 3); + // can't create an event on an existing path + let resp = http.put("http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-None-Match", "*").body(ICAL_RFC1).send()?; + assert_eq!(resp.status(), 412); + + // update first object by knowing its ETag + let resp = http.put("http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-Match", obj1_etag).body(ICAL_RFC1).send()?; + assert_eq!(resp.status(), 201); + // --- GET --- + let body = http.get("http://localhost:8087/alice/calendar/Personal/rfc2.ics").send()?.text()?; + assert_eq!(body.as_bytes(), ICAL_RFC1); + + let body = http.get("http://localhost:8087/alice/calendar/Personal/rfc3.ics").send()?.text()?; + assert_eq!(body.as_bytes(), ICAL_RFC3); // --- DELETE --- + // delete 1st object + let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc2.ics").send()?; + assert_eq!(resp.status(), 204); + + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + assert_eq!(multistatus.responses.len(), 2); + + // delete 2nd object + let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc3.ics").send()?; + assert_eq!(resp.status(), 204); + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").header("Depth", "1").send()?.text()?; + let multistatus = dav_deserialize::>(&body); + assert_eq!(multistatus.responses.len(), 1); Ok(()) }) -- cgit v1.2.3 From a4df1a6ef16b1a41d20e6e39ad0d808973ce0926 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 23:38:41 +0200 Subject: test rfc5397 current-user-principal --- aerogramme/tests/behavior.rs | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 18095ef..7d2fc67 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -18,6 +18,7 @@ fn main() { // WebDAV rfc4918_webdav_core(); + rfc5397_webdav_principal(); println!("✅ SUCCESS 🌟🚀🥳🙏🥹"); } @@ -360,8 +361,9 @@ fn rfc5819_imapext_liststatus() { .expect("test fully run"); } +use aero_dav::acltypes as acl; use aero_dav::caltypes as cal; -use aero_dav::realization::All; +use aero_dav::realization::{self, All}; use aero_dav::types as dav; use crate::common::dav_deserialize; @@ -379,7 +381,7 @@ fn rfc4918_webdav_core() { _ => None, }) .expect("propstats for root must exist"); - + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); let display_name = root_success.prop.0.iter() .find_map(|v| match v { dav::AnyProperty::Value(dav::Property::DisplayName(x)) => Some(x), _ => None } ) @@ -496,7 +498,7 @@ fn rfc4918_webdav_core() { let body = http.get("http://localhost:8087/alice/calendar/Personal/rfc3.ics").send()?.text()?; assert_eq!(body.as_bytes(), ICAL_RFC3); - + // --- DELETE --- // delete 1st object let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc2.ics").send()?; @@ -519,8 +521,35 @@ fn rfc4918_webdav_core() { .expect("test fully run"); } -// @TODO ACL +fn rfc5397_webdav_principal() { + println!("🧪 rfc5397_webdav_principal"); + common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { + // Find principal + let propfind_req = r#""#; + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/" => Some(x), + _ => None, + }) + .expect("propstats for root must exist"); + + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("current-user-principal must exist"); + let principal = root_success.prop.0.iter() + .find_map(|v| match v { + dav::AnyProperty::Value(dav::Property::Extension(realization::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(x)))))) => Some(x), + _ => None, + }) + .expect("request returned an authenticated principal"); + assert_eq!(principal, "/alice/"); + + Ok(()) + }) + .expect("test fully run") +} // @TODO CALDAV +// @TODO find calendar-home-set // @TODO SYNC -- cgit v1.2.3 From 54d10ed48274607c7bc4e0fd5fb1919f57317b70 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 22 May 2024 23:48:34 +0200 Subject: check calendar autodiscovery --- aerogramme/tests/behavior.rs | 54 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 2 deletions(-) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 7d2fc67..d13e556 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -19,6 +19,7 @@ fn main() { // WebDAV rfc4918_webdav_core(); rfc5397_webdav_principal(); + rfc4791_webdav_caldav(); println!("✅ SUCCESS 🌟🚀🥳🙏🥹"); } @@ -549,7 +550,56 @@ fn rfc5397_webdav_principal() { .expect("test fully run") } -// @TODO CALDAV -// @TODO find calendar-home-set +fn rfc4791_webdav_caldav() { + println!("🧪 rfc4791_webdav_caldav"); + common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { + // Check calendar discovery from principal + let propfind_req = r#" + + + "#; + + let body = http + .request( + reqwest::Method::from_bytes(b"PROPFIND")?, + "http://localhost:8087/alice/", + ) + .body(propfind_req) + .send()? + .text()?; + let multistatus = dav_deserialize::>(&body); + let principal_propstats = multistatus + .responses + .iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/" => { + Some(x) + } + _ => None, + }) + .expect("propstats for root must exist"); + let principal_success = principal_propstats + .iter() + .find(|p| p.status.0.as_u16() == 200) + .expect("current-user-principal must exist"); + let calendar_home_set = principal_success + .prop + .0 + .iter() + .find_map(|v| match v { + dav::AnyProperty::Value(dav::Property::Extension(realization::Property::Cal( + cal::Property::CalendarHomeSet(dav::Href(x)), + ))) => Some(x), + _ => None, + }) + .expect("request returns a calendar home set"); + assert_eq!(calendar_home_set, "/alice/calendar/"); + + + + Ok(()) + }) + .expect("test fully run") +} // @TODO SYNC -- cgit v1.2.3 From a859fe38b1044c576f042254a0f9677054b417a0 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 23 May 2024 08:55:53 +0200 Subject: test calendar-query vevent filtering --- aero-dav/src/caldecoder.rs | 11 ++++ aero-dav/src/xml.rs | 6 +- aerogramme/tests/behavior.rs | 124 +++++++++++++++++++++++++++++++++++ aerogramme/tests/common/constants.rs | 34 ++++++++++ 4 files changed, 174 insertions(+), 1 deletion(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 02991c2..6bc911f 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -973,6 +973,17 @@ mod tests { rdr.find().await.unwrap() } + #[tokio::test] + async fn simple_comp_filter() { + let expected = CompFilter { + name: Component::VEvent, + additional_rules: None, + }; + let src = r#""#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + #[tokio::test] async fn basic_mkcalendar() { let expected = MkCalendar(dav::Set(dav::PropValue(vec![dav::Property::DisplayName( diff --git a/aero-dav/src/xml.rs b/aero-dav/src/xml.rs index c89f531..e59f136 100644 --- a/aero-dav/src/xml.rs +++ b/aero-dav/src/xml.rs @@ -229,7 +229,10 @@ impl Reader { } pub async fn maybe_find>(&mut self) -> Result, ParsingError> { - self.ensure_parent_has_child()?; + // We can't find anything inside a self-closed tag + if !self.parent_has_child() { + return Ok(None); + } loop { // Try parse @@ -238,6 +241,7 @@ impl Reader { otherwise => return otherwise.map(Some), } + // Skip or stop match self.peek() { Event::End(_) => return Ok(None), _ => self.skip().await?, diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index d13e556..975dae9 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -553,6 +553,45 @@ fn rfc5397_webdav_principal() { fn rfc4791_webdav_caldav() { println!("🧪 rfc4791_webdav_caldav"); common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { + // --- INITIAL TEST SETUP --- + // Add entries (3 VEVENT, 1 FREEBUSY, 1 VTODO) + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc1.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC1) + .send()?; + let obj1_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc2.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC2) + .send()?; + let obj2_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc3.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC3) + .send()?; + let obj3_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc4.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC4) + .send()?; + let obj4_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc5.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC5) + .send()?; + let obj5_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + + // --- AUTODISCOVERY --- // Check calendar discovery from principal let propfind_req = r#" @@ -595,7 +634,92 @@ fn rfc4791_webdav_caldav() { .expect("request returns a calendar home set"); assert_eq!(calendar_home_set, "/alice/calendar/"); + // Check calendar access support + let resp = http + .request( + reqwest::Method::from_bytes(b"OPTIONS")?, + "http://localhost:8087/alice/calendar/", + ) + .send()?; + //@FIXME not yet supported. returns DAV: 1 ; expects DAV: 1 calendar-access + + //@FIXME missing support for calendar-data... + //println!("{:?}", resp); + + // --- REPORT calendar-query --- + // 7.8.8. Example: Retrieval of Events Only + let cal_query = r#" + + + + + + + + + + + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 3); + [ + ("/alice/calendar/Personal/rfc1.ics", obj1_etag, ICAL_RFC1), + ("/alice/calendar/Personal/rfc2.ics", obj2_etag, ICAL_RFC2), + ("/alice/calendar/Personal/rfc3.ics", obj3_etag, ICAL_RFC3), + ] + .iter() + .for_each(|(ref_path, ref_etag, ref_ical)| { + let obj_stats = multistatus + .responses + .iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == *ref_path => { + Some(x) + } + _ => None, + }) + .expect("propstats must exist"); + let obj_success = obj_stats + .iter() + .find(|p| p.status.0.as_u16() == 200) + .expect("some propstats must be 200"); + let etag = obj_success + .prop + .0 + .iter() + .find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x), + _ => None, + }) + .expect("etag is return in propstats"); + assert_eq!( + etag.as_str(), + ref_etag + .to_str() + .expect("header value is convertible to string") + ); + let calendar_data = obj_success + .prop + .0 + .iter() + .find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension( + realization::Property::Cal(cal::Property::CalendarData(x)), + )) => Some(x), + _ => None, + }) + .expect("calendar data is returned in propstats"); + assert_eq!(calendar_data.payload.as_bytes(), *ref_ical); + }); Ok(()) }) diff --git a/aerogramme/tests/common/constants.rs b/aerogramme/tests/common/constants.rs index 6b17c4f..8874876 100644 --- a/aerogramme/tests/common/constants.rs +++ b/aerogramme/tests/common/constants.rs @@ -124,3 +124,37 @@ UID:DC6C50A017428C5216A2F1CD@example.com END:VEVENT END:VCALENDAR "; + +pub static ICAL_RFC4: &[u8] = br#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VFREEBUSY +ORGANIZER;CN="Bernard Desruisseaux":mailto:bernard@example.com +UID:76ef34-54a3d2@example.com +DTSTAMP:20050530T123421Z +DTSTART:20060101T000000Z +DTEND:20060108T000000Z +FREEBUSY:20050531T230000Z/20050601T010000Z +FREEBUSY;FBTYPE=BUSY-TENTATIVE:20060102T100000Z/20060102T120000Z +FREEBUSY:20060103T100000Z/20060103T120000Z +FREEBUSY:20060104T100000Z/20060104T120000Z +FREEBUSY;FBTYPE=BUSY-UNAVAILABLE:20060105T100000Z/20060105T120000Z +FREEBUSY:20060106T100000Z/20060106T120000Z +END:VFREEBUSY +END:VCALENDAR +"#; + +pub static ICAL_RFC5: &[u8] = br#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTODO +DTSTAMP:20060205T235600Z +DUE;VALUE=DATE:20060101 +LAST-MODIFIED:20060205T235308Z +SEQUENCE:1 +STATUS:CANCELLED +SUMMARY:Task #4 +UID:E10BA47467C5C69BB74E8725@example.com +END:VTODO +END:VCALENDAR +"#; -- cgit v1.2.3 From 7687065bfc824127fda657363894a30268e95385 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 23 May 2024 09:24:06 +0200 Subject: test calendar-multiget --- aerogramme/tests/behavior.rs | 139 ++++++++++++++++++++++++++++--------------- 1 file changed, 92 insertions(+), 47 deletions(-) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 975dae9..a83f1a7 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -581,16 +581,60 @@ fn rfc4791_webdav_caldav() { .header("If-None-Match", "*") .body(ICAL_RFC4) .send()?; - let obj4_etag = resp.headers().get("etag").expect("etag must be set"); + let _obj4_etag = resp.headers().get("etag").expect("etag must be set"); assert_eq!(resp.status(), 201); let resp = http .put("http://localhost:8087/alice/calendar/Personal/rfc5.ics") .header("If-None-Match", "*") .body(ICAL_RFC5) .send()?; - let obj5_etag = resp.headers().get("etag").expect("etag must be set"); + let _obj5_etag = resp.headers().get("etag").expect("etag must be set"); assert_eq!(resp.status(), 201); + // A generic function to check a query result + let check_full_cal = + |multistatus: &dav::Multistatus, + (ref_path, ref_etag, ref_ical): (&str, &str, &[u8])| { + let obj_stats = multistatus + .responses + .iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) + if p.as_str() == ref_path => + { + Some(x) + } + _ => None, + }) + .expect("propstats must exist"); + let obj_success = obj_stats + .iter() + .find(|p| p.status.0.as_u16() == 200) + .expect("some propstats must be 200"); + let etag = obj_success + .prop + .0 + .iter() + .find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x), + _ => None, + }) + .expect("etag is return in propstats"); + assert_eq!(etag.as_str(), ref_etag); + let calendar_data = obj_success + .prop + .0 + .iter() + .find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension( + realization::Property::Cal(cal::Property::CalendarData(x)), + )) => Some(x), + _ => None, + }) + .expect("calendar data is returned in propstats"); + assert_eq!(calendar_data.payload.as_bytes(), ref_ical); + }; + // --- AUTODISCOVERY --- // Check calendar discovery from principal let propfind_req = r#" @@ -635,7 +679,7 @@ fn rfc4791_webdav_caldav() { assert_eq!(calendar_home_set, "/alice/calendar/"); // Check calendar access support - let resp = http + let _resp = http .request( reqwest::Method::from_bytes(b"OPTIONS")?, "http://localhost:8087/alice/calendar/", @@ -643,10 +687,8 @@ fn rfc4791_webdav_caldav() { .send()?; //@FIXME not yet supported. returns DAV: 1 ; expects DAV: 1 calendar-access - //@FIXME missing support for calendar-data... - //println!("{:?}", resp); - // --- REPORT calendar-query --- + //@FIXME missing support for calendar-data... // 7.8.8. Example: Retrieval of Events Only let cal_query = r#" @@ -678,47 +720,50 @@ fn rfc4791_webdav_caldav() { ] .iter() .for_each(|(ref_path, ref_etag, ref_ical)| { - let obj_stats = multistatus - .responses - .iter() - .find_map(|v| match &v.status_or_propstat { - dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == *ref_path => { - Some(x) - } - _ => None, - }) - .expect("propstats must exist"); - let obj_success = obj_stats - .iter() - .find(|p| p.status.0.as_u16() == 200) - .expect("some propstats must be 200"); - let etag = obj_success - .prop - .0 - .iter() - .find_map(|p| match p { - dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x), - _ => None, - }) - .expect("etag is return in propstats"); - assert_eq!( - etag.as_str(), - ref_etag - .to_str() - .expect("header value is convertible to string") - ); - let calendar_data = obj_success - .prop - .0 - .iter() - .find_map(|p| match p { - dav::AnyProperty::Value(dav::Property::Extension( - realization::Property::Cal(cal::Property::CalendarData(x)), - )) => Some(x), - _ => None, - }) - .expect("calendar data is returned in propstats"); - assert_eq!(calendar_data.payload.as_bytes(), *ref_ical); + check_full_cal( + &multistatus, + ( + ref_path, + ref_etag.to_str().expect("etag header convertible to str"), + ref_ical, + ), + ) + }); + + // --- REPORT calendar-multiget --- + let cal_query = r#" + + + + + + /alice/calendar/Personal/rfc1.ics + /alice/calendar/Personal/rfc3.ics + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 2); + [ + ("/alice/calendar/Personal/rfc1.ics", obj1_etag, ICAL_RFC1), + ("/alice/calendar/Personal/rfc3.ics", obj3_etag, ICAL_RFC3), + ] + .iter() + .for_each(|(ref_path, ref_etag, ref_ical)| { + check_full_cal( + &multistatus, + ( + ref_path, + ref_etag.to_str().expect("etag header convertible to str"), + ref_ical, + ), + ) }); Ok(()) -- cgit v1.2.3 From ff823a10f049e06c711537560ba10f3dc826afcd Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Thu, 23 May 2024 10:01:43 +0200 Subject: improve ical date parsing --- aero-dav/src/caldecoder.rs | 18 ++++++------- aero-dav/src/calencoder.rs | 24 ++++++++--------- aero-dav/src/caltypes.rs | 4 +-- aero-proto/src/dav/controller.rs | 30 ++++++++++++--------- aerogramme/tests/behavior.rs | 57 +++++++++++++++++++++++++++++----------- 5 files changed, 82 insertions(+), 51 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 6bc911f..7de5e2a 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -287,7 +287,7 @@ impl QRead for Property { .is_some() { let dtstr = xml.tag_string().await?; - let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let dt = NaiveDateTime::parse_from_str(dtstr.as_str(), UTC_DATETIME_FMT)?.and_utc(); xml.close().await?; return Ok(Property::MaxDateTime(dt)); } @@ -653,8 +653,8 @@ impl QRead for Expand { _ => return Err(ParsingError::MissingAttribute), }; - let start = NaiveDateTime::parse_from_str(rstart.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); - let end = NaiveDateTime::parse_from_str(rend.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let start = NaiveDateTime::parse_from_str(rstart.as_str(), UTC_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), UTC_DATETIME_FMT)?.and_utc(); if start > end { return Err(ParsingError::InvalidValue); } @@ -672,8 +672,8 @@ impl QRead for LimitRecurrenceSet { _ => return Err(ParsingError::MissingAttribute), }; - let start = NaiveDateTime::parse_from_str(rstart.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); - let end = NaiveDateTime::parse_from_str(rend.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let start = NaiveDateTime::parse_from_str(rstart.as_str(), UTC_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), UTC_DATETIME_FMT)?.and_utc(); if start > end { return Err(ParsingError::InvalidValue); } @@ -691,8 +691,8 @@ impl QRead for LimitFreebusySet { _ => return Err(ParsingError::MissingAttribute), }; - let start = NaiveDateTime::parse_from_str(rstart.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); - let end = NaiveDateTime::parse_from_str(rend.as_str(), CALDAV_DATETIME_FMT)?.and_utc(); + let start = NaiveDateTime::parse_from_str(rstart.as_str(), UTC_DATETIME_FMT)?.and_utc(); + let end = NaiveDateTime::parse_from_str(rend.as_str(), UTC_DATETIME_FMT)?.and_utc(); if start > end { return Err(ParsingError::InvalidValue); } @@ -918,13 +918,13 @@ impl QRead for TimeRange { let start = match xml.prev_attr("start") { Some(r) => { - Some(NaiveDateTime::parse_from_str(r.as_str(), CALDAV_DATETIME_FMT)?.and_utc()) + Some(NaiveDateTime::parse_from_str(r.as_str(), UTC_DATETIME_FMT)?.and_utc()) } _ => None, }; let end = match xml.prev_attr("end") { Some(r) => { - Some(NaiveDateTime::parse_from_str(r.as_str(), CALDAV_DATETIME_FMT)?.and_utc()) + Some(NaiveDateTime::parse_from_str(r.as_str(), UTC_DATETIME_FMT)?.and_utc()) } _ => None, }; diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index f145628..d5d4305 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -178,7 +178,7 @@ impl QWrite for Property { let start = xml.create_cal_element("min-date-time"); let end = start.to_end(); - let dtstr = format!("{}", dt.format(CALDAV_DATETIME_FMT)); + let dtstr = format!("{}", dt.format(UTC_DATETIME_FMT)); xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q .write_event_async(Event::Text(BytesText::new(dtstr.as_str()))) @@ -189,7 +189,7 @@ impl QWrite for Property { let start = xml.create_cal_element("max-date-time"); let end = start.to_end(); - let dtstr = format!("{}", dt.format(CALDAV_DATETIME_FMT)); + let dtstr = format!("{}", dt.format(UTC_DATETIME_FMT)); xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q .write_event_async(Event::Text(BytesText::new(dtstr.as_str()))) @@ -493,11 +493,11 @@ impl QWrite for Expand { let mut empty = xml.create_cal_element("expand"); empty.push_attribute(( "start", - format!("{}", self.0.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", self.0.format(UTC_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", self.1.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", self.1.format(UTC_DATETIME_FMT)).as_str(), )); xml.q.write_event_async(Event::Empty(empty)).await } @@ -508,11 +508,11 @@ impl QWrite for LimitRecurrenceSet { let mut empty = xml.create_cal_element("limit-recurrence-set"); empty.push_attribute(( "start", - format!("{}", self.0.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", self.0.format(UTC_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", self.1.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", self.1.format(UTC_DATETIME_FMT)).as_str(), )); xml.q.write_event_async(Event::Empty(empty)).await } @@ -523,11 +523,11 @@ impl QWrite for LimitFreebusySet { let mut empty = xml.create_cal_element("limit-freebusy-set"); empty.push_attribute(( "start", - format!("{}", self.0.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", self.0.format(UTC_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", self.1.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", self.1.format(UTC_DATETIME_FMT)).as_str(), )); xml.q.write_event_async(Event::Empty(empty)).await } @@ -737,20 +737,20 @@ impl QWrite for TimeRange { match self { Self::OnlyStart(start) => empty.push_attribute(( "start", - format!("{}", start.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", start.format(UTC_DATETIME_FMT)).as_str(), )), Self::OnlyEnd(end) => empty.push_attribute(( "end", - format!("{}", end.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", end.format(UTC_DATETIME_FMT)).as_str(), )), Self::FullRange(start, end) => { empty.push_attribute(( "start", - format!("{}", start.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", start.format(UTC_DATETIME_FMT)).as_str(), )); empty.push_attribute(( "end", - format!("{}", end.format(CALDAV_DATETIME_FMT)).as_str(), + format!("{}", end.format(UTC_DATETIME_FMT)).as_str(), )); } } diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 924b651..50cdb92 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -3,8 +3,8 @@ use super::types as dav; use chrono::{DateTime, Utc}; -pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%S"; -pub const CALDAV_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; +pub const FLOATING_DATETIME_FMT: &str = "%Y%m%dT%H%M%S"; +pub const UTC_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; //@FIXME ACL (rfc3744) is missing, required //@FIXME Versioning (rfc3253) is missing, required diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 0a47cf4..4cf520e 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -380,6 +380,22 @@ fn apply_filter<'a>( }) } +fn ical_parse_date(dt: &str) -> Option> { + tracing::trace!(raw_time = dt, "VEVENT raw time"); + let tmpl = match dt.chars().last() { + Some('Z') => cal::UTC_DATETIME_FMT, + Some(_) => { + tracing::warn!(raw_time=dt, "floating datetime is not properly supported yet"); + cal::FLOATING_DATETIME_FMT + }, + None => return None + }; + + NaiveDateTime::parse_from_str(dt, tmpl) + .ok() + .map(|v| v.and_utc()) +} + fn prop_date( properties: &[icalendar::parser::Property], name: &str, @@ -388,12 +404,7 @@ fn prop_date( .iter() .find(|candidate| candidate.name.as_str() == name) .map(|p| p.val.as_str()) - .map(|raw_time| { - tracing::trace!(raw_time = raw_time, "VEVENT raw time"); - NaiveDateTime::parse_from_str(raw_time, cal::ICAL_DATETIME_FMT) - .ok() - .map(|v| v.and_utc()) - }) + .map(ical_parse_date) .flatten() } @@ -412,12 +423,7 @@ fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::Pr // check value match &pattern.time_or_text { Some(cal::TimeOrText::Time(time_range)) => { - let maybe_parsed_date = NaiveDateTime::parse_from_str( - prop.val.as_str(), - cal::ICAL_DATETIME_FMT, - ) - .ok() - .map(|v| v.and_utc()); + let maybe_parsed_date = ical_parse_date(prop.val.as_str()); let parsed_date = match maybe_parsed_date { None => return false, diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index a83f1a7..b6c1c6e 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -592,9 +592,9 @@ fn rfc4791_webdav_caldav() { assert_eq!(resp.status(), 201); // A generic function to check a query result - let check_full_cal = + let check_cal = |multistatus: &dav::Multistatus, - (ref_path, ref_etag, ref_ical): (&str, &str, &[u8])| { + (ref_path, ref_etag, ref_ical): (&str, Option<&str>, Option<&[u8]>)| { let obj_stats = multistatus .responses .iter() @@ -616,11 +616,10 @@ fn rfc4791_webdav_caldav() { .0 .iter() .find_map(|p| match p { - dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x), + dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x.as_str()), _ => None, - }) - .expect("etag is return in propstats"); - assert_eq!(etag.as_str(), ref_etag); + }); + assert_eq!(etag, ref_etag); let calendar_data = obj_success .prop .0 @@ -628,11 +627,10 @@ fn rfc4791_webdav_caldav() { .find_map(|p| match p { dav::AnyProperty::Value(dav::Property::Extension( realization::Property::Cal(cal::Property::CalendarData(x)), - )) => Some(x), + )) => Some(x.payload.as_bytes()), _ => None, - }) - .expect("calendar data is returned in propstats"); - assert_eq!(calendar_data.payload.as_bytes(), ref_ical); + }); + assert_eq!(calendar_data, ref_ical); }; // --- AUTODISCOVERY --- @@ -720,16 +718,43 @@ fn rfc4791_webdav_caldav() { ] .iter() .for_each(|(ref_path, ref_etag, ref_ical)| { - check_full_cal( + check_cal( &multistatus, ( ref_path, - ref_etag.to_str().expect("etag header convertible to str"), - ref_ical, + Some(ref_etag.to_str().expect("etag header convertible to str")), + Some(ref_ical), ), ) }); + // 8.2.1.2. Synchronize by Time Range (here: July 2006) + let cal_query = r#" + + + + + + + + + + + + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + check_cal(&multistatus, ("/alice/calendar/Personal/rfc2.ics", Some(obj2_etag.to_str().expect("etag header convertible to str")), None)); + + // --- REPORT calendar-multiget --- let cal_query = r#" @@ -756,12 +781,12 @@ fn rfc4791_webdav_caldav() { ] .iter() .for_each(|(ref_path, ref_etag, ref_ical)| { - check_full_cal( + check_cal( &multistatus, ( ref_path, - ref_etag.to_str().expect("etag header convertible to str"), - ref_ical, + Some(ref_etag.to_str().expect("etag header convertible to str")), + Some(ref_ical), ), ) }); -- cgit v1.2.3 From 52f870633c2cab8a4aeeec74792774931139b8b5 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sat, 25 May 2024 19:30:59 +0200 Subject: add a new aero-ical module --- Cargo.lock | 16 ++- Cargo.toml | 1 + aero-ical/Cargo.toml | 15 +++ aero-ical/src/lib.rs | 8 ++ aero-ical/src/parser.rs | 138 +++++++++++++++++++ aero-ical/src/query.rs | 280 +++++++++++++++++++++++++++++++++++++++ aero-proto/Cargo.toml | 1 + aero-proto/src/dav/controller.rs | 183 +------------------------ aerogramme/tests/behavior.rs | 40 +++++- 9 files changed, 500 insertions(+), 182 deletions(-) create mode 100644 aero-ical/Cargo.toml create mode 100644 aero-ical/src/lib.rs create mode 100644 aero-ical/src/parser.rs create mode 100644 aero-ical/src/query.rs diff --git a/Cargo.lock b/Cargo.lock index 9f8ccb6..d22a5fc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -72,12 +72,24 @@ dependencies = [ "tokio", ] +[[package]] +name = "aero-ical" +version = "0.3.0" +dependencies = [ + "aero-dav", + "chrono", + "icalendar", + "nom 7.1.3", + "tracing", +] + [[package]] name = "aero-proto" version = "0.3.0" dependencies = [ "aero-collections", "aero-dav", + "aero-ical", "aero-sasl", "aero-user", "anyhow", @@ -1110,9 +1122,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.35" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", diff --git a/Cargo.toml b/Cargo.toml index 0ee7889..91c6413 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ aero-user = { version = "0.3.0", path = "aero-user" } aero-bayou = { version = "0.3.0", path = "aero-bayou" } aero-sasl = { version = "0.3.0", path = "aero-sasl" } aero-dav = { version = "0.3.0", path = "aero-dav" } +aero-ical = { version = "0.3.0", path = "aero-ical" } aero-collections = { version = "0.3.0", path = "aero-collections" } aero-proto = { version = "0.3.0", path = "aero-proto" } aerogramme = { version = "0.3.0", path = "aerogramme" } diff --git a/aero-ical/Cargo.toml b/aero-ical/Cargo.toml new file mode 100644 index 0000000..6cfe882 --- /dev/null +++ b/aero-ical/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "aero-ical" +version = "0.3.0" +authors = ["Alex Auvolat ", "Quentin Dufour "] +edition = "2021" +license = "EUPL-1.2" +description = "An iCalendar parser" + +[dependencies] +aero-dav.workspace = true + +icalendar.workspace = true +nom.workspace = true +chrono.workspace = true +tracing.workspace = true diff --git a/aero-ical/src/lib.rs b/aero-ical/src/lib.rs new file mode 100644 index 0000000..808c885 --- /dev/null +++ b/aero-ical/src/lib.rs @@ -0,0 +1,8 @@ +/// The iCalendar module is not yet properly rewritten +/// Instead we heavily rely on the icalendar library +/// However, for many reason, it's not satisfying: +/// the goal will be to rewrite it in the end so it better +/// integrates into Aerogramme + +pub mod parser; +pub mod query; diff --git a/aero-ical/src/parser.rs b/aero-ical/src/parser.rs new file mode 100644 index 0000000..4354737 --- /dev/null +++ b/aero-ical/src/parser.rs @@ -0,0 +1,138 @@ +use chrono::TimeDelta; + +use nom::IResult; +use nom::branch::alt; +use nom::bytes::complete::{tag, tag_no_case}; +use nom::combinator::{value, opt, map, map_opt}; +use nom::sequence::{pair, tuple}; +use nom::character::complete as nomchar; + +use aero_dav::caltypes as cal; + +//@FIXME too simple, we have 4 cases in practices: +// - floating datetime +// - floating datetime with a tzid as param so convertible to tz datetime +// - utc datetime +// - floating(?) date (without time) +pub fn date_time(dt: &str) -> Option> { + tracing::trace!(raw_time = dt, "VEVENT raw time"); + let tmpl = match dt.chars().last() { + Some('Z') => cal::UTC_DATETIME_FMT, + Some(_) => { + tracing::warn!(raw_time=dt, "floating datetime is not properly supported yet"); + cal::FLOATING_DATETIME_FMT + }, + None => return None + }; + + chrono::NaiveDateTime::parse_from_str(dt, tmpl) + .ok() + .map(|v| v.and_utc()) +} + +/// RFC3389 Duration Value +/// +/// ```abnf +/// dur-value = (["+"] / "-") "P" (dur-date / dur-time / dur-week) +/// dur-date = dur-day [dur-time] +/// dur-time = "T" (dur-hour / dur-minute / dur-second) +/// dur-week = 1*DIGIT "W" +/// dur-hour = 1*DIGIT "H" [dur-minute] +/// dur-minute = 1*DIGIT "M" [dur-second] +/// dur-second = 1*DIGIT "S" +/// dur-day = 1*DIGIT "D" +/// ``` +pub fn dur_value(text: &str) -> IResult<&str, TimeDelta> { + map_opt(tuple(( + dur_sign, + tag_no_case("P"), + alt(( + dur_date, + dur_time, + dur_week, + )) + )), |(sign, _, delta)| { + delta.checked_mul(sign) + })(text) +} + +fn dur_sign(text: &str) -> IResult<&str, i32> { + map(opt(alt((value(1, tag("+")), value(-1, tag("-"))))), |x| x.unwrap_or(1))(text) +} +fn dur_date(text: &str) -> IResult<&str, TimeDelta> { + map(pair(dur_day, opt(dur_time)), |(day, time)| day + time.unwrap_or(TimeDelta::zero()))(text) +} +fn dur_time(text: &str) -> IResult<&str, TimeDelta> { + map(pair(tag_no_case("T"), alt((dur_hour, dur_minute, dur_second))), |(_, x)| x)(text) +} +fn dur_week(text: &str) -> IResult<&str, TimeDelta> { + map_opt(pair(nomchar::i64, tag_no_case("W")), |(i, _)| TimeDelta::try_weeks(i))(text) +} +fn dur_day(text: &str) -> IResult<&str, TimeDelta> { + map_opt(pair(nomchar::i64, tag_no_case("D")), |(i, _)| TimeDelta::try_days(i))(text) +} +fn dur_hour(text: &str) -> IResult<&str, TimeDelta> { + map_opt(tuple((nomchar::i64, tag_no_case("H"), opt(dur_minute))), |(i, _, mm)| { + TimeDelta::try_hours(i).map(|hours| hours + mm.unwrap_or(TimeDelta::zero())) + })(text) +} +fn dur_minute(text: &str) -> IResult<&str, TimeDelta> { + map_opt(tuple((nomchar::i64, tag_no_case("M"), opt(dur_second))), |(i, _, ms)| { + TimeDelta::try_minutes(i).map(|min| min + ms.unwrap_or(TimeDelta::zero())) + })(text) +} +fn dur_second(text: &str) -> IResult<&str, TimeDelta> { + map_opt(pair(nomchar::i64, tag_no_case("S")), |(i, _)| TimeDelta::try_seconds(i))(text) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn rfc5545_example1() { + // A duration of 15 days, 5 hours, and 20 seconds would be: + let to_parse = "P15DT5H0M20S"; + let (_, time_delta) = dur_value(to_parse).unwrap(); + assert_eq!( + time_delta, + TimeDelta::try_days(15).unwrap() + TimeDelta::try_hours(5).unwrap() + TimeDelta::try_seconds(20).unwrap()); + } + + #[test] + fn rfc5545_example2() { + // A duration of 7 weeks would be: + let to_parse = "P7W"; + let (_, time_delta) = dur_value(to_parse).unwrap(); + assert_eq!( + time_delta, + TimeDelta::try_weeks(7).unwrap() + ); + } + + #[test] + fn rfc4791_example1() { + // 10 minutes before + let to_parse = "-PT10M"; + + let (_, time_delta) = dur_value(to_parse).unwrap(); + assert_eq!( + time_delta, + TimeDelta::try_minutes(-10).unwrap() + ); + } + + + #[test] + fn ical_org_example1() { + // The following example is for a "VALARM" calendar component that specifies an email alarm + // that will trigger 2 days before the scheduled due DATE-TIME of a to-do with which it is associated. + let to_parse = "-P2D"; + + let (_, time_delta) = dur_value(to_parse).unwrap(); + assert_eq!( + time_delta, + TimeDelta::try_days(-2).unwrap() + ); + } +} diff --git a/aero-ical/src/query.rs b/aero-ical/src/query.rs new file mode 100644 index 0000000..5d857bb --- /dev/null +++ b/aero-ical/src/query.rs @@ -0,0 +1,280 @@ +use aero_dav::caltypes as cal; +use crate::parser as parser; + +pub fn is_component_match( + parent: &icalendar::parser::Component, + components: &[icalendar::parser::Component], + filter: &cal::CompFilter, +) -> bool { + // Find the component among the list + let maybe_comp = components + .iter() + .find(|candidate| candidate.name.as_str() == filter.name.as_str()); + + // Filter according to rules + match (maybe_comp, &filter.additional_rules) { + (Some(_), None) => true, + (None, Some(cal::CompFilterRules::IsNotDefined)) => true, + (None, None) => false, + (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, + (None, Some(cal::CompFilterRules::Matches(_))) => false, + (Some(component), Some(cal::CompFilterRules::Matches(matcher))) => { + // check time range + if let Some(time_range) = &matcher.time_range { + if !is_in_time_range(&filter.name, parent, component.properties.as_ref(), time_range) { + return false; + } + } + + // check properties + if !is_properties_match(component.properties.as_ref(), matcher.prop_filter.as_ref()) { + return false; + } + + // check inner components + matcher.comp_filter.iter().all(|inner_filter| { + is_component_match(component, component.components.as_ref(), &inner_filter) + }) + } + } +} + +fn prop_date( + properties: &[icalendar::parser::Property], + name: &str, +) -> Option> { + properties + .iter() + .find(|candidate| candidate.name.as_str() == name) + .map(|p| p.val.as_str()) + .map(parser::date_time) + .flatten() +} + +fn prop_parse( + properties: &[icalendar::parser::Property], + name: &str, +) -> Option { + properties + .iter() + .find(|candidate| candidate.name.as_str() == name) + .map(|p| p.val.as_str().parse::().ok()) + .flatten() +} + +fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::PropFilter]) -> bool { + filters.iter().all(|single_filter| { + // Find the property + let single_prop = props + .iter() + .find(|candidate| candidate.name.as_str() == single_filter.name.0.as_str()); + match (&single_filter.additional_rules, single_prop) { + (None, Some(_)) | (Some(cal::PropFilterRules::IsNotDefined), None) => true, + (None, None) + | (Some(cal::PropFilterRules::IsNotDefined), Some(_)) + | (Some(cal::PropFilterRules::Match(_)), None) => false, + (Some(cal::PropFilterRules::Match(pattern)), Some(prop)) => { + // check value + match &pattern.time_or_text { + Some(cal::TimeOrText::Time(time_range)) => { + let maybe_parsed_date = parser::date_time(prop.val.as_str()); + + let parsed_date = match maybe_parsed_date { + None => return false, + Some(v) => v, + }; + + // see if entry is in range + let is_in_range = match time_range { + cal::TimeRange::OnlyStart(after) => &parsed_date >= after, + cal::TimeRange::OnlyEnd(before) => &parsed_date <= before, + cal::TimeRange::FullRange(after, before) => { + &parsed_date >= after && &parsed_date <= before + } + }; + if !is_in_range { + return false; + } + + // if you are here, this subcondition is valid + } + Some(cal::TimeOrText::Text(txt_match)) => { + //@FIXME ignoring collation + let is_match = match txt_match.negate_condition { + None | Some(false) => { + prop.val.as_str().contains(txt_match.text.as_str()) + } + Some(true) => !prop.val.as_str().contains(txt_match.text.as_str()), + }; + if !is_match { + return false; + } + } + None => (), // if not filter on value is set, continue + }; + + // check parameters + pattern.param_filter.iter().all(|single_param_filter| { + let maybe_param = prop.params.iter().find(|candidate| { + candidate.key.as_str() == single_param_filter.name.as_str() + }); + + match (maybe_param, &single_param_filter.additional_rules) { + (Some(_), None) => true, + (None, None) => false, + (Some(_), Some(cal::ParamFilterMatch::IsNotDefined)) => false, + (None, Some(cal::ParamFilterMatch::IsNotDefined)) => true, + (None, Some(cal::ParamFilterMatch::Match(_))) => false, + (Some(param), Some(cal::ParamFilterMatch::Match(txt_match))) => { + let param_val = match ¶m.val { + Some(v) => v, + None => return false, + }; + + match txt_match.negate_condition { + None | Some(false) => { + param_val.as_str().contains(txt_match.text.as_str()) + } + Some(true) => !param_val.as_str().contains(txt_match.text.as_str()), + } + } + } + }) + } + } + }) +} + +fn resolve_trigger( + parent: &icalendar::parser::Component, + properties: &[icalendar::parser::Property] +) -> Option> { + // A. Do we have a TRIGGER property? If not, returns early + let maybe_trigger_prop = properties + .iter() + .find(|candidate| candidate.name.as_str() == "TRIGGER"); + + let trigger_prop = match maybe_trigger_prop { + None => return None, + Some(v) => v, + }; + + // B.1 Is it an absolute datetime? If so, returns early + let maybe_absolute = trigger_prop.params.iter() + .find(|param| param.key.as_str() == "VALUE") + .map(|param| param.val.as_ref()).flatten() + .map(|v| v.as_str() == "DATE-TIME"); + + if maybe_absolute.is_some() { + return prop_date(properties, "TRIGGER"); + } + + // B.2 Otherwise it's a timedelta relative to a parent field. + // C.1 Parse the timedelta value, returns early if invalid + + // C.2 Get the parent reference absolute datetime, returns early if invalid + let maybe_related_field = trigger_prop + .params + .iter() + .find(|param| param.key.as_str() == "RELATED") + .map(|param| param.val.as_ref()) + .flatten(); + let related_field = maybe_related_field.map(|v| v.as_str()).unwrap_or("DTSTART"); + let parent_date = match prop_date(parent.properties.as_ref(), related_field) { + Some(v) => v, + _ => return None, + }; + + // C.3 Compute the final date from the base date + timedelta + + todo!() +} + +fn is_in_time_range( + component: &cal::Component, + parent: &icalendar::parser::Component, + properties: &[icalendar::parser::Property], + time_range: &cal::TimeRange, +) -> bool { + //@FIXME timezones are not properly handled currently (everything is UTC) + //@FIXME does not support repeat + //ref: https://datatracker.ietf.org/doc/html/rfc4791#section-9.9 + let (start, end) = match time_range { + cal::TimeRange::OnlyStart(start) => (start, &chrono::DateTime::::MAX_UTC), + cal::TimeRange::OnlyEnd(end) => (&chrono::DateTime::::MIN_UTC, end), + cal::TimeRange::FullRange(start, end) => (start, end), + }; + + match component { + cal::Component::VEvent => { + let dtstart = match prop_date(properties, "DTSTART") { + Some(v) => v, + _ => return false, + }; + let maybe_dtend = prop_date(properties, "DTEND"); + let maybe_duration = prop_parse::(properties, "DURATION").map(|d| chrono::TimeDelta::new(std::cmp::max(d, 0), 0)).flatten(); + + //@FIXME missing "date" management (only support "datetime") + match (&maybe_dtend, &maybe_duration) { + // | Y | N | N | * | (start < DTEND AND end > DTSTART) | + (Some(dtend), _) => start < dtend && end > &dtstart, + // | N | Y | Y | * | (start < DTSTART+DURATION AND end > DTSTART) | + (_, Some(duration)) => *start <= dtstart + *duration && end > &dtstart, + // | N | N | N | Y | (start <= DTSTART AND end > DTSTART) | + _ => start <= &dtstart && end > &dtstart, + } + }, + cal::Component::VTodo => { + let maybe_dtstart = prop_date(properties, "DTSTART"); + let maybe_due = prop_date(properties, "DUE"); + let maybe_completed = prop_date(properties, "COMPLETED"); + let maybe_created = prop_date(properties, "CREATED"); + let maybe_duration = prop_parse::(properties, "DURATION").map(|d| chrono::TimeDelta::new(d, 0)).flatten(); + + match (maybe_dtstart, maybe_duration, maybe_due, maybe_completed, maybe_created) { + // | Y | Y | N | * | * | (start <= DTSTART+DURATION) AND | + // | | | | | | ((end > DTSTART) OR | + // | | | | | | (end >= DTSTART+DURATION)) | + (Some(dtstart), Some(duration), None, _, _) => *start <= dtstart + duration && (*end > dtstart || *end >= dtstart + duration), + // | Y | N | Y | * | * | ((start < DUE) OR (start <= DTSTART)) | + // | | | | | | AND | + // | | | | | | ((end > DTSTART) OR (end >= DUE)) | + (Some(dtstart), None, Some(due), _, _) => (*start < due || *start <= dtstart) && (*end > dtstart || *end >= due), + // | Y | N | N | * | * | (start <= DTSTART) AND (end > DTSTART) | + (Some(dtstart), None, None, _, _) => *start <= dtstart && *end > dtstart, + // | N | N | Y | * | * | (start < DUE) AND (end >= DUE) | + (None, None, Some(due), _, _) => *start < due && *end >= due, + // | N | N | N | Y | Y | ((start <= CREATED) OR (start <= COMPLETED))| + // | | | | | | AND | + // | | | | | | ((end >= CREATED) OR (end >= COMPLETED))| + (None, None, None, Some(completed), Some(created)) => (*start <= created || *start <= completed) && (*end >= created || *end >= completed), + // | N | N | N | Y | N | (start <= COMPLETED) AND (end >= COMPLETED) | + (None, None, None, Some(completed), None) => *start <= completed && *end >= completed, + // | N | N | N | N | Y | (end > CREATED) | + (None, None, None, None, Some(created)) => *end > created, + // | N | N | N | N | N | TRUE | + _ => true, + } + }, + cal::Component::VJournal => { + let maybe_dtstart = prop_date(properties, "DTSTART"); + match maybe_dtstart { + // | Y | Y | (start <= DTSTART) AND (end > DTSTART) | + Some(dtstart) => *start <= dtstart && *end > dtstart, + // | N | * | FALSE | + None => false, + } + }, + cal::Component::VFreeBusy => { + //@FIXME freebusy is not supported yet + false + }, + cal::Component::VAlarm => { + //@FIXME does not support REPEAT + let maybe_trigger = resolve_trigger(parent, properties); + // (start <= trigger-time) AND (end > trigger-time) + false + }, + _ => false, + } +} diff --git a/aero-proto/Cargo.toml b/aero-proto/Cargo.toml index b6f6336..e8d6b8f 100644 --- a/aero-proto/Cargo.toml +++ b/aero-proto/Cargo.toml @@ -7,6 +7,7 @@ license = "EUPL-1.2" description = "Binding between Aerogramme's internal components and well-known protocols" [dependencies] +aero-ical.workspace = true aero-sasl.workspace = true aero-dav.workspace = true aero-user.workspace = true diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 4cf520e..873f768 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -1,6 +1,6 @@ use anyhow::Result; use futures::stream::{StreamExt, TryStreamExt}; -use http_body_util::combinators::{BoxBody, UnsyncBoxBody}; +use http_body_util::combinators::UnsyncBoxBody; use http_body_util::BodyStream; use http_body_util::StreamBody; use hyper::body::Frame; @@ -11,10 +11,11 @@ use aero_collections::user::User; use aero_dav::caltypes as cal; use aero_dav::realization::All; use aero_dav::types as dav; +use aero_ical::query::is_component_match; use crate::dav::codec; use crate::dav::codec::{depth, deserialize, serialize, text_body}; -use crate::dav::node::{DavNode, PutPolicy}; +use crate::dav::node::DavNode; use crate::dav::resource::RootNode; pub(super) type ArcUser = std::sync::Arc; @@ -373,185 +374,9 @@ fn apply_filter<'a>( tracing::debug!(filter=?root_filter, "calendar-query filter"); // Adjust return value according to filter - match is_component_match(&[fake_vcal_component], root_filter) { + match is_component_match(&fake_vcal_component, &[fake_vcal_component.clone()], root_filter) { true => Some(Ok(single_node)), _ => None, } }) } - -fn ical_parse_date(dt: &str) -> Option> { - tracing::trace!(raw_time = dt, "VEVENT raw time"); - let tmpl = match dt.chars().last() { - Some('Z') => cal::UTC_DATETIME_FMT, - Some(_) => { - tracing::warn!(raw_time=dt, "floating datetime is not properly supported yet"); - cal::FLOATING_DATETIME_FMT - }, - None => return None - }; - - NaiveDateTime::parse_from_str(dt, tmpl) - .ok() - .map(|v| v.and_utc()) -} - -fn prop_date( - properties: &[icalendar::parser::Property], - name: &str, -) -> Option> { - properties - .iter() - .find(|candidate| candidate.name.as_str() == name) - .map(|p| p.val.as_str()) - .map(ical_parse_date) - .flatten() -} - -fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::PropFilter]) -> bool { - filters.iter().all(|single_filter| { - // Find the property - let single_prop = props - .iter() - .find(|candidate| candidate.name.as_str() == single_filter.name.0.as_str()); - match (&single_filter.additional_rules, single_prop) { - (None, Some(_)) | (Some(cal::PropFilterRules::IsNotDefined), None) => true, - (None, None) - | (Some(cal::PropFilterRules::IsNotDefined), Some(_)) - | (Some(cal::PropFilterRules::Match(_)), None) => false, - (Some(cal::PropFilterRules::Match(pattern)), Some(prop)) => { - // check value - match &pattern.time_or_text { - Some(cal::TimeOrText::Time(time_range)) => { - let maybe_parsed_date = ical_parse_date(prop.val.as_str()); - - let parsed_date = match maybe_parsed_date { - None => return false, - Some(v) => v, - }; - - // see if entry is in range - let is_in_range = match time_range { - cal::TimeRange::OnlyStart(after) => &parsed_date >= after, - cal::TimeRange::OnlyEnd(before) => &parsed_date <= before, - cal::TimeRange::FullRange(after, before) => { - &parsed_date >= after && &parsed_date <= before - } - }; - if !is_in_range { - return false; - } - - // if you are here, this subcondition is valid - } - Some(cal::TimeOrText::Text(txt_match)) => { - //@FIXME ignoring collation - let is_match = match txt_match.negate_condition { - None | Some(false) => { - prop.val.as_str().contains(txt_match.text.as_str()) - } - Some(true) => !prop.val.as_str().contains(txt_match.text.as_str()), - }; - if !is_match { - return false; - } - } - None => (), // if not filter on value is set, continue - }; - - // check parameters - pattern.param_filter.iter().all(|single_param_filter| { - let maybe_param = prop.params.iter().find(|candidate| { - candidate.key.as_str() == single_param_filter.name.as_str() - }); - - match (maybe_param, &single_param_filter.additional_rules) { - (Some(_), None) => true, - (None, None) => false, - (Some(_), Some(cal::ParamFilterMatch::IsNotDefined)) => false, - (None, Some(cal::ParamFilterMatch::IsNotDefined)) => true, - (None, Some(cal::ParamFilterMatch::Match(_))) => false, - (Some(param), Some(cal::ParamFilterMatch::Match(txt_match))) => { - let param_val = match ¶m.val { - Some(v) => v, - None => return false, - }; - - match txt_match.negate_condition { - None | Some(false) => { - param_val.as_str().contains(txt_match.text.as_str()) - } - Some(true) => !param_val.as_str().contains(txt_match.text.as_str()), - } - } - } - }) - } - } - }) -} - -fn is_in_time_range( - properties: &[icalendar::parser::Property], - time_range: &cal::TimeRange, -) -> bool { - //@FIXME too naive: https://datatracker.ietf.org/doc/html/rfc4791#section-9.9 - - let (dtstart, dtend) = match ( - prop_date(properties, "DTSTART"), - prop_date(properties, "DTEND"), - ) { - (Some(start), None) => (start, start), - (None, Some(end)) => (end, end), - (Some(start), Some(end)) => (start, end), - _ => { - tracing::warn!("unable to extract DTSTART and DTEND from VEVENT"); - return false; - } - }; - - tracing::trace!(event_start=?dtstart, event_end=?dtend, filter=?time_range, "apply filter on VEVENT"); - match time_range { - cal::TimeRange::OnlyStart(after) => &dtend >= after, - cal::TimeRange::OnlyEnd(before) => &dtstart <= before, - cal::TimeRange::FullRange(after, before) => &dtend >= after && &dtstart <= before, - } -} - -use chrono::NaiveDateTime; -fn is_component_match( - components: &[icalendar::parser::Component], - filter: &cal::CompFilter, -) -> bool { - // Find the component among the list - let maybe_comp = components - .iter() - .find(|candidate| candidate.name.as_str() == filter.name.as_str()); - - // Filter according to rules - match (maybe_comp, &filter.additional_rules) { - (Some(_), None) => true, - (None, Some(cal::CompFilterRules::IsNotDefined)) => true, - (None, None) => false, - (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, - (None, Some(cal::CompFilterRules::Matches(_))) => false, - (Some(component), Some(cal::CompFilterRules::Matches(matcher))) => { - // check time range - if let Some(time_range) = &matcher.time_range { - if !is_in_time_range(component.properties.as_ref(), time_range) { - return false; - } - } - - // check properties - if !is_properties_match(component.properties.as_ref(), matcher.prop_filter.as_ref()) { - return false; - } - - // check inner components - matcher.comp_filter.iter().all(|inner_filter| { - is_component_match(component.components.as_ref(), &inner_filter) - }) - } - } -} diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index b6c1c6e..0e6dab6 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -684,6 +684,7 @@ fn rfc4791_webdav_caldav() { ) .send()?; //@FIXME not yet supported. returns DAV: 1 ; expects DAV: 1 calendar-access + // Not used by any client I know, so not implementing it now. // --- REPORT calendar-query --- //@FIXME missing support for calendar-data... @@ -729,7 +730,7 @@ fn rfc4791_webdav_caldav() { }); // 8.2.1.2. Synchronize by Time Range (here: July 2006) - let cal_query = r#" + let cal_query = r#" @@ -754,6 +755,43 @@ fn rfc4791_webdav_caldav() { assert_eq!(multistatus.responses.len(), 1); check_cal(&multistatus, ("/alice/calendar/Personal/rfc2.ics", Some(obj2_etag.to_str().expect("etag header convertible to str")), None)); + // 7.8.5. Example: Retrieval of To-Dos by Alarm Time Range + let cal_query = r#" + + + + + + + + + + + + + + + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + //assert_eq!(multistatus.responses.len(), 1); + + // 7.8.6. Example: Retrieval of Event by UID + // @TODO + + // 7.8.7. Example: Retrieval of Events by PARTSTAT + // @TODO + + // 7.8.9. Example: Retrieval of All Pending To-Dos + // @TODO + // --- REPORT calendar-multiget --- let cal_query = r#" -- cgit v1.2.3 From 6b9720844aaa86ad25a77c0821dcdbc772937065 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 26 May 2024 10:33:04 +0200 Subject: better support for time-range --- aero-dav/src/caldecoder.rs | 8 +-- aero-dav/src/calencoder.rs | 12 ++-- aero-ical/src/lib.rs | 1 - aero-ical/src/parser.rs | 100 +++++++++++++++++---------------- aero-ical/src/query.rs | 104 ++++++++++++++++++++++++++--------- aero-proto/src/dav/controller.rs | 6 +- aerogramme/tests/behavior.rs | 51 +++++++++-------- aerogramme/tests/common/constants.rs | 17 ++++++ 8 files changed, 187 insertions(+), 112 deletions(-) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 7de5e2a..b6a843f 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -917,15 +917,11 @@ impl QRead for TimeRange { xml.open(CAL_URN, "time-range").await?; let start = match xml.prev_attr("start") { - Some(r) => { - Some(NaiveDateTime::parse_from_str(r.as_str(), UTC_DATETIME_FMT)?.and_utc()) - } + Some(r) => Some(NaiveDateTime::parse_from_str(r.as_str(), UTC_DATETIME_FMT)?.and_utc()), _ => None, }; let end = match xml.prev_attr("end") { - Some(r) => { - Some(NaiveDateTime::parse_from_str(r.as_str(), UTC_DATETIME_FMT)?.and_utc()) - } + Some(r) => Some(NaiveDateTime::parse_from_str(r.as_str(), UTC_DATETIME_FMT)?.and_utc()), _ => None, }; diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index d5d4305..723d95d 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -739,19 +739,15 @@ impl QWrite for TimeRange { "start", format!("{}", start.format(UTC_DATETIME_FMT)).as_str(), )), - Self::OnlyEnd(end) => empty.push_attribute(( - "end", - format!("{}", end.format(UTC_DATETIME_FMT)).as_str(), - )), + Self::OnlyEnd(end) => { + empty.push_attribute(("end", format!("{}", end.format(UTC_DATETIME_FMT)).as_str())) + } Self::FullRange(start, end) => { empty.push_attribute(( "start", format!("{}", start.format(UTC_DATETIME_FMT)).as_str(), )); - empty.push_attribute(( - "end", - format!("{}", end.format(UTC_DATETIME_FMT)).as_str(), - )); + empty.push_attribute(("end", format!("{}", end.format(UTC_DATETIME_FMT)).as_str())); } } xml.q.write_event_async(Event::Empty(empty)).await diff --git a/aero-ical/src/lib.rs b/aero-ical/src/lib.rs index 808c885..f6b4ad4 100644 --- a/aero-ical/src/lib.rs +++ b/aero-ical/src/lib.rs @@ -3,6 +3,5 @@ /// However, for many reason, it's not satisfying: /// the goal will be to rewrite it in the end so it better /// integrates into Aerogramme - pub mod parser; pub mod query; diff --git a/aero-ical/src/parser.rs b/aero-ical/src/parser.rs index 4354737..ca271a5 100644 --- a/aero-ical/src/parser.rs +++ b/aero-ical/src/parser.rs @@ -1,11 +1,11 @@ use chrono::TimeDelta; -use nom::IResult; use nom::branch::alt; use nom::bytes::complete::{tag, tag_no_case}; -use nom::combinator::{value, opt, map, map_opt}; -use nom::sequence::{pair, tuple}; use nom::character::complete as nomchar; +use nom::combinator::{map, map_opt, opt, value}; +use nom::sequence::{pair, tuple}; +use nom::IResult; use aero_dav::caltypes as cal; @@ -19,10 +19,13 @@ pub fn date_time(dt: &str) -> Option> { let tmpl = match dt.chars().last() { Some('Z') => cal::UTC_DATETIME_FMT, Some(_) => { - tracing::warn!(raw_time=dt, "floating datetime is not properly supported yet"); + tracing::warn!( + raw_time = dt, + "floating datetime is not properly supported yet" + ); cal::FLOATING_DATETIME_FMT - }, - None => return None + } + None => return None, }; chrono::NaiveDateTime::parse_from_str(dt, tmpl) @@ -43,46 +46,58 @@ pub fn date_time(dt: &str) -> Option> { /// dur-day = 1*DIGIT "D" /// ``` pub fn dur_value(text: &str) -> IResult<&str, TimeDelta> { - map_opt(tuple(( - dur_sign, - tag_no_case("P"), - alt(( - dur_date, - dur_time, - dur_week, - )) - )), |(sign, _, delta)| { - delta.checked_mul(sign) - })(text) + map_opt( + tuple(( + dur_sign, + tag_no_case("P"), + alt((dur_date, dur_time, dur_week)), + )), + |(sign, _, delta)| delta.checked_mul(sign), + )(text) } fn dur_sign(text: &str) -> IResult<&str, i32> { - map(opt(alt((value(1, tag("+")), value(-1, tag("-"))))), |x| x.unwrap_or(1))(text) + map(opt(alt((value(1, tag("+")), value(-1, tag("-"))))), |x| { + x.unwrap_or(1) + })(text) } fn dur_date(text: &str) -> IResult<&str, TimeDelta> { - map(pair(dur_day, opt(dur_time)), |(day, time)| day + time.unwrap_or(TimeDelta::zero()))(text) + map(pair(dur_day, opt(dur_time)), |(day, time)| { + day + time.unwrap_or(TimeDelta::zero()) + })(text) } fn dur_time(text: &str) -> IResult<&str, TimeDelta> { - map(pair(tag_no_case("T"), alt((dur_hour, dur_minute, dur_second))), |(_, x)| x)(text) + map( + pair(tag_no_case("T"), alt((dur_hour, dur_minute, dur_second))), + |(_, x)| x, + )(text) } fn dur_week(text: &str) -> IResult<&str, TimeDelta> { - map_opt(pair(nomchar::i64, tag_no_case("W")), |(i, _)| TimeDelta::try_weeks(i))(text) + map_opt(pair(nomchar::i64, tag_no_case("W")), |(i, _)| { + TimeDelta::try_weeks(i) + })(text) } fn dur_day(text: &str) -> IResult<&str, TimeDelta> { - map_opt(pair(nomchar::i64, tag_no_case("D")), |(i, _)| TimeDelta::try_days(i))(text) + map_opt(pair(nomchar::i64, tag_no_case("D")), |(i, _)| { + TimeDelta::try_days(i) + })(text) } fn dur_hour(text: &str) -> IResult<&str, TimeDelta> { - map_opt(tuple((nomchar::i64, tag_no_case("H"), opt(dur_minute))), |(i, _, mm)| { - TimeDelta::try_hours(i).map(|hours| hours + mm.unwrap_or(TimeDelta::zero())) - })(text) + map_opt( + tuple((nomchar::i64, tag_no_case("H"), opt(dur_minute))), + |(i, _, mm)| TimeDelta::try_hours(i).map(|hours| hours + mm.unwrap_or(TimeDelta::zero())), + )(text) } fn dur_minute(text: &str) -> IResult<&str, TimeDelta> { - map_opt(tuple((nomchar::i64, tag_no_case("M"), opt(dur_second))), |(i, _, ms)| { - TimeDelta::try_minutes(i).map(|min| min + ms.unwrap_or(TimeDelta::zero())) - })(text) + map_opt( + tuple((nomchar::i64, tag_no_case("M"), opt(dur_second))), + |(i, _, ms)| TimeDelta::try_minutes(i).map(|min| min + ms.unwrap_or(TimeDelta::zero())), + )(text) } fn dur_second(text: &str) -> IResult<&str, TimeDelta> { - map_opt(pair(nomchar::i64, tag_no_case("S")), |(i, _)| TimeDelta::try_seconds(i))(text) + map_opt(pair(nomchar::i64, tag_no_case("S")), |(i, _)| { + TimeDelta::try_seconds(i) + })(text) } #[cfg(test)] @@ -95,8 +110,11 @@ mod tests { let to_parse = "P15DT5H0M20S"; let (_, time_delta) = dur_value(to_parse).unwrap(); assert_eq!( - time_delta, - TimeDelta::try_days(15).unwrap() + TimeDelta::try_hours(5).unwrap() + TimeDelta::try_seconds(20).unwrap()); + time_delta, + TimeDelta::try_days(15).unwrap() + + TimeDelta::try_hours(5).unwrap() + + TimeDelta::try_seconds(20).unwrap() + ); } #[test] @@ -104,35 +122,25 @@ mod tests { // A duration of 7 weeks would be: let to_parse = "P7W"; let (_, time_delta) = dur_value(to_parse).unwrap(); - assert_eq!( - time_delta, - TimeDelta::try_weeks(7).unwrap() - ); + assert_eq!(time_delta, TimeDelta::try_weeks(7).unwrap()); } #[test] - fn rfc4791_example1() { + fn rfc4791_example1() { // 10 minutes before let to_parse = "-PT10M"; let (_, time_delta) = dur_value(to_parse).unwrap(); - assert_eq!( - time_delta, - TimeDelta::try_minutes(-10).unwrap() - ); + assert_eq!(time_delta, TimeDelta::try_minutes(-10).unwrap()); } - #[test] fn ical_org_example1() { - // The following example is for a "VALARM" calendar component that specifies an email alarm + // The following example is for a "VALARM" calendar component that specifies an email alarm // that will trigger 2 days before the scheduled due DATE-TIME of a to-do with which it is associated. let to_parse = "-P2D"; let (_, time_delta) = dur_value(to_parse).unwrap(); - assert_eq!( - time_delta, - TimeDelta::try_days(-2).unwrap() - ); + assert_eq!(time_delta, TimeDelta::try_days(-2).unwrap()); } } diff --git a/aero-ical/src/query.rs b/aero-ical/src/query.rs index 5d857bb..440441f 100644 --- a/aero-ical/src/query.rs +++ b/aero-ical/src/query.rs @@ -1,5 +1,5 @@ +use crate::parser; use aero_dav::caltypes as cal; -use crate::parser as parser; pub fn is_component_match( parent: &icalendar::parser::Component, @@ -7,6 +7,7 @@ pub fn is_component_match( filter: &cal::CompFilter, ) -> bool { // Find the component among the list + //@FIXME do not handle correctly multiple entities (eg. 3 VEVENT) let maybe_comp = components .iter() .find(|candidate| candidate.name.as_str() == filter.name.as_str()); @@ -21,7 +22,12 @@ pub fn is_component_match( (Some(component), Some(cal::CompFilterRules::Matches(matcher))) => { // check time range if let Some(time_range) = &matcher.time_range { - if !is_in_time_range(&filter.name, parent, component.properties.as_ref(), time_range) { + if !is_in_time_range( + &filter.name, + parent, + component.properties.as_ref(), + time_range, + ) { return false; } } @@ -77,7 +83,7 @@ fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::Pr // check value match &pattern.time_or_text { Some(cal::TimeOrText::Time(time_range)) => { - let maybe_parsed_date = parser::date_time(prop.val.as_str()); + let maybe_parsed_date = parser::date_time(prop.val.as_str()); let parsed_date = match maybe_parsed_date { None => return false, @@ -146,8 +152,8 @@ fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::Pr } fn resolve_trigger( - parent: &icalendar::parser::Component, - properties: &[icalendar::parser::Property] + parent: &icalendar::parser::Component, + properties: &[icalendar::parser::Property], ) -> Option> { // A. Do we have a TRIGGER property? If not, returns early let maybe_trigger_prop = properties @@ -160,34 +166,56 @@ fn resolve_trigger( }; // B.1 Is it an absolute datetime? If so, returns early - let maybe_absolute = trigger_prop.params.iter() + let maybe_absolute = trigger_prop + .params + .iter() .find(|param| param.key.as_str() == "VALUE") - .map(|param| param.val.as_ref()).flatten() + .map(|param| param.val.as_ref()) + .flatten() .map(|v| v.as_str() == "DATE-TIME"); if maybe_absolute.is_some() { - return prop_date(properties, "TRIGGER"); + let final_date = prop_date(properties, "TRIGGER"); + tracing::trace!(trigger=?final_date, "resolved absolute trigger"); + return final_date; } // B.2 Otherwise it's a timedelta relative to a parent field. // C.1 Parse the timedelta value, returns early if invalid + let (_, time_delta) = parser::dur_value(trigger_prop.val.as_str()).ok()?; // C.2 Get the parent reference absolute datetime, returns early if invalid - let maybe_related_field = trigger_prop + let maybe_bound = trigger_prop .params .iter() .find(|param| param.key.as_str() == "RELATED") .map(|param| param.val.as_ref()) .flatten(); - let related_field = maybe_related_field.map(|v| v.as_str()).unwrap_or("DTSTART"); + + // If the trigger is set relative to START, then the "DTSTART" property MUST be present in the associated + // "VEVENT" or "VTODO" calendar component. + // + // If an alarm is specified for an event with the trigger set relative to the END, + // then the "DTEND" property or the "DTSTART" and "DURATION " properties MUST be present + // in the associated "VEVENT" calendar component. + // + // If the alarm is specified for a to-do with a trigger set relative to the END, + // then either the "DUE" property or the "DTSTART" and "DURATION " properties + // MUST be present in the associated "VTODO" calendar component. + let related_field = match maybe_bound.as_ref().map(|v| v.as_str()) { + Some("START") => "DTSTART", + Some("END") => "DTEND", //@FIXME must add support for DUE, DTSTART, and DURATION + _ => "DTSTART", // by default use DTSTART + }; let parent_date = match prop_date(parent.properties.as_ref(), related_field) { Some(v) => v, _ => return None, }; // C.3 Compute the final date from the base date + timedelta - - todo!() + let final_date = parent_date + time_delta; + tracing::trace!(trigger=?final_date, "resolved relative trigger"); + Some(final_date) } fn is_in_time_range( @@ -209,10 +237,12 @@ fn is_in_time_range( cal::Component::VEvent => { let dtstart = match prop_date(properties, "DTSTART") { Some(v) => v, - _ => return false, + _ => return false, }; let maybe_dtend = prop_date(properties, "DTEND"); - let maybe_duration = prop_parse::(properties, "DURATION").map(|d| chrono::TimeDelta::new(std::cmp::max(d, 0), 0)).flatten(); + let maybe_duration = prop_parse::(properties, "DURATION") + .map(|d| chrono::TimeDelta::new(std::cmp::max(d, 0), 0)) + .flatten(); //@FIXME missing "date" management (only support "datetime") match (&maybe_dtend, &maybe_duration) { @@ -223,23 +253,35 @@ fn is_in_time_range( // | N | N | N | Y | (start <= DTSTART AND end > DTSTART) | _ => start <= &dtstart && end > &dtstart, } - }, + } cal::Component::VTodo => { let maybe_dtstart = prop_date(properties, "DTSTART"); let maybe_due = prop_date(properties, "DUE"); let maybe_completed = prop_date(properties, "COMPLETED"); let maybe_created = prop_date(properties, "CREATED"); - let maybe_duration = prop_parse::(properties, "DURATION").map(|d| chrono::TimeDelta::new(d, 0)).flatten(); + let maybe_duration = prop_parse::(properties, "DURATION") + .map(|d| chrono::TimeDelta::new(d, 0)) + .flatten(); - match (maybe_dtstart, maybe_duration, maybe_due, maybe_completed, maybe_created) { + match ( + maybe_dtstart, + maybe_duration, + maybe_due, + maybe_completed, + maybe_created, + ) { // | Y | Y | N | * | * | (start <= DTSTART+DURATION) AND | // | | | | | | ((end > DTSTART) OR | // | | | | | | (end >= DTSTART+DURATION)) | - (Some(dtstart), Some(duration), None, _, _) => *start <= dtstart + duration && (*end > dtstart || *end >= dtstart + duration), + (Some(dtstart), Some(duration), None, _, _) => { + *start <= dtstart + duration && (*end > dtstart || *end >= dtstart + duration) + } // | Y | N | Y | * | * | ((start < DUE) OR (start <= DTSTART)) | // | | | | | | AND | // | | | | | | ((end > DTSTART) OR (end >= DUE)) | - (Some(dtstart), None, Some(due), _, _) => (*start < due || *start <= dtstart) && (*end > dtstart || *end >= due), + (Some(dtstart), None, Some(due), _, _) => { + (*start < due || *start <= dtstart) && (*end > dtstart || *end >= due) + } // | Y | N | N | * | * | (start <= DTSTART) AND (end > DTSTART) | (Some(dtstart), None, None, _, _) => *start <= dtstart && *end > dtstart, // | N | N | Y | * | * | (start < DUE) AND (end >= DUE) | @@ -247,15 +289,20 @@ fn is_in_time_range( // | N | N | N | Y | Y | ((start <= CREATED) OR (start <= COMPLETED))| // | | | | | | AND | // | | | | | | ((end >= CREATED) OR (end >= COMPLETED))| - (None, None, None, Some(completed), Some(created)) => (*start <= created || *start <= completed) && (*end >= created || *end >= completed), + (None, None, None, Some(completed), Some(created)) => { + (*start <= created || *start <= completed) + && (*end >= created || *end >= completed) + } // | N | N | N | Y | N | (start <= COMPLETED) AND (end >= COMPLETED) | - (None, None, None, Some(completed), None) => *start <= completed && *end >= completed, + (None, None, None, Some(completed), None) => { + *start <= completed && *end >= completed + } // | N | N | N | N | Y | (end > CREATED) | (None, None, None, None, Some(created)) => *end > created, // | N | N | N | N | N | TRUE | _ => true, } - }, + } cal::Component::VJournal => { let maybe_dtstart = prop_date(properties, "DTSTART"); match maybe_dtstart { @@ -264,17 +311,20 @@ fn is_in_time_range( // | N | * | FALSE | None => false, } - }, + } cal::Component::VFreeBusy => { //@FIXME freebusy is not supported yet false - }, + } cal::Component::VAlarm => { //@FIXME does not support REPEAT let maybe_trigger = resolve_trigger(parent, properties); - // (start <= trigger-time) AND (end > trigger-time) - false - }, + match maybe_trigger { + // (start <= trigger-time) AND (end > trigger-time) + Some(trigger_time) => *start <= trigger_time && *end > trigger_time, + _ => false, + } + } _ => false, } } diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 873f768..abf6a97 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -374,7 +374,11 @@ fn apply_filter<'a>( tracing::debug!(filter=?root_filter, "calendar-query filter"); // Adjust return value according to filter - match is_component_match(&fake_vcal_component, &[fake_vcal_component.clone()], root_filter) { + match is_component_match( + &fake_vcal_component, + &[fake_vcal_component.clone()], + root_filter, + ) { true => Some(Ok(single_node)), _ => None, } diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 0e6dab6..d6c73e3 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -590,6 +590,13 @@ fn rfc4791_webdav_caldav() { .send()?; let _obj5_etag = resp.headers().get("etag").expect("etag must be set"); assert_eq!(resp.status(), 201); + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc6.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC6) + .send()?; + let _obj6_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); // A generic function to check a query result let check_cal = @@ -611,25 +618,17 @@ fn rfc4791_webdav_caldav() { .iter() .find(|p| p.status.0.as_u16() == 200) .expect("some propstats must be 200"); - let etag = obj_success - .prop - .0 - .iter() - .find_map(|p| match p { - dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x.as_str()), - _ => None, - }); + let etag = obj_success.prop.0.iter().find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::GetEtag(x)) => Some(x.as_str()), + _ => None, + }); assert_eq!(etag, ref_etag); - let calendar_data = obj_success - .prop - .0 - .iter() - .find_map(|p| match p { - dav::AnyProperty::Value(dav::Property::Extension( - realization::Property::Cal(cal::Property::CalendarData(x)), - )) => Some(x.payload.as_bytes()), - _ => None, - }); + let calendar_data = obj_success.prop.0.iter().find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension( + realization::Property::Cal(cal::Property::CalendarData(x)), + )) => Some(x.payload.as_bytes()), + _ => None, + }); assert_eq!(calendar_data, ref_ical); }; @@ -753,7 +752,14 @@ fn rfc4791_webdav_caldav() { assert_eq!(resp.status(), 207); let multistatus = dav_deserialize::>(&resp.text()?); assert_eq!(multistatus.responses.len(), 1); - check_cal(&multistatus, ("/alice/calendar/Personal/rfc2.ics", Some(obj2_etag.to_str().expect("etag header convertible to str")), None)); + check_cal( + &multistatus, + ( + "/alice/calendar/Personal/rfc2.ics", + Some(obj2_etag.to_str().expect("etag header convertible to str")), + None, + ), + ); // 7.8.5. Example: Retrieval of To-Dos by Alarm Time Range let cal_query = r#" @@ -766,7 +772,7 @@ fn rfc4791_webdav_caldav() { - + @@ -781,18 +787,17 @@ fn rfc4791_webdav_caldav() { .send()?; assert_eq!(resp.status(), 207); let multistatus = dav_deserialize::>(&resp.text()?); - //assert_eq!(multistatus.responses.len(), 1); + assert_eq!(multistatus.responses.len(), 1); // 7.8.6. Example: Retrieval of Event by UID // @TODO // 7.8.7. Example: Retrieval of Events by PARTSTAT // @TODO - + // 7.8.9. Example: Retrieval of All Pending To-Dos // @TODO - // --- REPORT calendar-multiget --- let cal_query = r#" diff --git a/aerogramme/tests/common/constants.rs b/aerogramme/tests/common/constants.rs index 8874876..91ee159 100644 --- a/aerogramme/tests/common/constants.rs +++ b/aerogramme/tests/common/constants.rs @@ -158,3 +158,20 @@ UID:E10BA47467C5C69BB74E8725@example.com END:VTODO END:VCALENDAR "#; + +pub static ICAL_RFC6: &[u8] = br#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTODO +DTSTART:20060205T235335Z +DUE;VALUE=DATE:20060104 +STATUS:NEEDS-ACTION +SUMMARY:Task #1 +UID:DDDEEB7915FA61233B861457@example.com +BEGIN:VALARM +ACTION:AUDIO +TRIGGER;RELATED=START:-PT10M +END:VALARM +END:VTODO +END:VCALENDAR +"#; -- cgit v1.2.3 From d5a222967dbc774ad04cff572a0d901c832b36bf Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 26 May 2024 11:03:39 +0200 Subject: support multiple same name components, properties & parameters --- aero-ical/src/query.rs | 154 ++++++++++++++++++----------------- aerogramme/tests/behavior.rs | 154 +++++++++++++++++++++++++++-------- aerogramme/tests/common/constants.rs | 38 +++++++++ 3 files changed, 240 insertions(+), 106 deletions(-) diff --git a/aero-ical/src/query.rs b/aero-ical/src/query.rs index 440441f..d69a919 100644 --- a/aero-ical/src/query.rs +++ b/aero-ical/src/query.rs @@ -7,19 +7,18 @@ pub fn is_component_match( filter: &cal::CompFilter, ) -> bool { // Find the component among the list - //@FIXME do not handle correctly multiple entities (eg. 3 VEVENT) - let maybe_comp = components + let maybe_comps = components .iter() - .find(|candidate| candidate.name.as_str() == filter.name.as_str()); + .filter(|candidate| candidate.name.as_str() == filter.name.as_str()) + .collect::>(); // Filter according to rules - match (maybe_comp, &filter.additional_rules) { - (Some(_), None) => true, - (None, Some(cal::CompFilterRules::IsNotDefined)) => true, - (None, None) => false, - (Some(_), Some(cal::CompFilterRules::IsNotDefined)) => false, - (None, Some(cal::CompFilterRules::Matches(_))) => false, - (Some(component), Some(cal::CompFilterRules::Matches(matcher))) => { + match (&maybe_comps[..], &filter.additional_rules) { + ([_, ..], None) => true, + ([], Some(cal::CompFilterRules::IsNotDefined)) => true, + ([], None) => false, + ([_, ..], Some(cal::CompFilterRules::IsNotDefined)) => false, + (comps, Some(cal::CompFilterRules::Matches(matcher))) => comps.iter().any(|component| { // check time range if let Some(time_range) = &matcher.time_range { if !is_in_time_range( @@ -41,7 +40,7 @@ pub fn is_component_match( matcher.comp_filter.iter().all(|inner_filter| { is_component_match(component, component.components.as_ref(), &inner_filter) }) - } + }), } } @@ -71,80 +70,89 @@ fn prop_parse( fn is_properties_match(props: &[icalendar::parser::Property], filters: &[cal::PropFilter]) -> bool { filters.iter().all(|single_filter| { // Find the property - let single_prop = props + let candidate_props = props .iter() - .find(|candidate| candidate.name.as_str() == single_filter.name.0.as_str()); - match (&single_filter.additional_rules, single_prop) { - (None, Some(_)) | (Some(cal::PropFilterRules::IsNotDefined), None) => true, - (None, None) - | (Some(cal::PropFilterRules::IsNotDefined), Some(_)) - | (Some(cal::PropFilterRules::Match(_)), None) => false, - (Some(cal::PropFilterRules::Match(pattern)), Some(prop)) => { - // check value - match &pattern.time_or_text { - Some(cal::TimeOrText::Time(time_range)) => { - let maybe_parsed_date = parser::date_time(prop.val.as_str()); + .filter(|candidate| candidate.name.as_str() == single_filter.name.0.as_str()) + .collect::>(); + + match (&single_filter.additional_rules, &candidate_props[..]) { + (None, [_, ..]) | (Some(cal::PropFilterRules::IsNotDefined), []) => true, + (None, []) | (Some(cal::PropFilterRules::IsNotDefined), [_, ..]) => false, + (Some(cal::PropFilterRules::Match(pattern)), multi_props) => { + multi_props.iter().any(|prop| { + // check value + match &pattern.time_or_text { + Some(cal::TimeOrText::Time(time_range)) => { + let maybe_parsed_date = parser::date_time(prop.val.as_str()); - let parsed_date = match maybe_parsed_date { - None => return false, - Some(v) => v, - }; + let parsed_date = match maybe_parsed_date { + None => return false, + Some(v) => v, + }; - // see if entry is in range - let is_in_range = match time_range { - cal::TimeRange::OnlyStart(after) => &parsed_date >= after, - cal::TimeRange::OnlyEnd(before) => &parsed_date <= before, - cal::TimeRange::FullRange(after, before) => { - &parsed_date >= after && &parsed_date <= before + // see if entry is in range + let is_in_range = match time_range { + cal::TimeRange::OnlyStart(after) => &parsed_date >= after, + cal::TimeRange::OnlyEnd(before) => &parsed_date <= before, + cal::TimeRange::FullRange(after, before) => { + &parsed_date >= after && &parsed_date <= before + } + }; + if !is_in_range { + return false; } - }; - if !is_in_range { - return false; - } - // if you are here, this subcondition is valid - } - Some(cal::TimeOrText::Text(txt_match)) => { - //@FIXME ignoring collation - let is_match = match txt_match.negate_condition { - None | Some(false) => { - prop.val.as_str().contains(txt_match.text.as_str()) + // if you are here, this subcondition is valid + } + Some(cal::TimeOrText::Text(txt_match)) => { + //@FIXME ignoring collation + let is_match = match txt_match.negate_condition { + None | Some(false) => { + prop.val.as_str().contains(txt_match.text.as_str()) + } + Some(true) => !prop.val.as_str().contains(txt_match.text.as_str()), + }; + if !is_match { + return false; } - Some(true) => !prop.val.as_str().contains(txt_match.text.as_str()), - }; - if !is_match { - return false; } - } - None => (), // if not filter on value is set, continue - }; + None => (), // if not filter on value is set, continue + }; - // check parameters - pattern.param_filter.iter().all(|single_param_filter| { - let maybe_param = prop.params.iter().find(|candidate| { - candidate.key.as_str() == single_param_filter.name.as_str() - }); + // check parameters + pattern.param_filter.iter().all(|single_param_filter| { + let multi_param = prop + .params + .iter() + .filter(|candidate| { + candidate.key.as_str() == single_param_filter.name.as_str() + }) + .collect::>(); - match (maybe_param, &single_param_filter.additional_rules) { - (Some(_), None) => true, - (None, None) => false, - (Some(_), Some(cal::ParamFilterMatch::IsNotDefined)) => false, - (None, Some(cal::ParamFilterMatch::IsNotDefined)) => true, - (None, Some(cal::ParamFilterMatch::Match(_))) => false, - (Some(param), Some(cal::ParamFilterMatch::Match(txt_match))) => { - let param_val = match ¶m.val { - Some(v) => v, - None => return false, - }; + match (&multi_param[..], &single_param_filter.additional_rules) { + ([.., _], None) => true, + ([], None) => false, + ([.., _], Some(cal::ParamFilterMatch::IsNotDefined)) => false, + ([], Some(cal::ParamFilterMatch::IsNotDefined)) => true, + (many_params, Some(cal::ParamFilterMatch::Match(txt_match))) => { + many_params.iter().any(|param| { + let param_val = match ¶m.val { + Some(v) => v, + None => return false, + }; - match txt_match.negate_condition { - None | Some(false) => { - param_val.as_str().contains(txt_match.text.as_str()) - } - Some(true) => !param_val.as_str().contains(txt_match.text.as_str()), + match txt_match.negate_condition { + None | Some(false) => { + param_val.as_str().contains(txt_match.text.as_str()) + } + Some(true) => { + !param_val.as_str().contains(txt_match.text.as_str()) + } + } + }) } } - } + }) }) } } diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index d6c73e3..ef58182 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -554,7 +554,7 @@ fn rfc4791_webdav_caldav() { println!("🧪 rfc4791_webdav_caldav"); common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { // --- INITIAL TEST SETUP --- - // Add entries (3 VEVENT, 1 FREEBUSY, 1 VTODO) + // Add entries let resp = http .put("http://localhost:8087/alice/calendar/Personal/rfc1.ics") .header("If-None-Match", "*") @@ -595,7 +595,14 @@ fn rfc4791_webdav_caldav() { .header("If-None-Match", "*") .body(ICAL_RFC6) .send()?; - let _obj6_etag = resp.headers().get("etag").expect("etag must be set"); + let obj6_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc7.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC7) + .send()?; + let obj7_etag = resp.headers().get("etag").expect("etag must be set"); assert_eq!(resp.status(), 201); // A generic function to check a query result @@ -684,9 +691,44 @@ fn rfc4791_webdav_caldav() { .send()?; //@FIXME not yet supported. returns DAV: 1 ; expects DAV: 1 calendar-access // Not used by any client I know, so not implementing it now. + + // --- REPORT calendar-multiget --- + let cal_query = r#" + + + + + + /alice/calendar/Personal/rfc1.ics + /alice/calendar/Personal/rfc3.ics + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 2); + [ + ("/alice/calendar/Personal/rfc1.ics", obj1_etag, ICAL_RFC1), + ("/alice/calendar/Personal/rfc3.ics", obj3_etag, ICAL_RFC3), + ] + .iter() + .for_each(|(ref_path, ref_etag, ref_ical)| { + check_cal( + &multistatus, + ( + ref_path, + Some(ref_etag.to_str().expect("etag header convertible to str")), + Some(ref_ical), + ), + ) + }); - // --- REPORT calendar-query --- - //@FIXME missing support for calendar-data... + // --- REPORT calendar-query, only filtering --- // 7.8.8. Example: Retrieval of Events Only let cal_query = r#" @@ -709,12 +751,13 @@ fn rfc4791_webdav_caldav() { .send()?; assert_eq!(resp.status(), 207); let multistatus = dav_deserialize::>(&resp.text()?); - assert_eq!(multistatus.responses.len(), 3); + assert_eq!(multistatus.responses.len(), 4); [ ("/alice/calendar/Personal/rfc1.ics", obj1_etag, ICAL_RFC1), ("/alice/calendar/Personal/rfc2.ics", obj2_etag, ICAL_RFC2), ("/alice/calendar/Personal/rfc3.ics", obj3_etag, ICAL_RFC3), + ("/alice/calendar/Personal/rfc7.ics", obj7_etag, ICAL_RFC7), ] .iter() .for_each(|(ref_path, ref_etag, ref_ical)| { @@ -788,26 +831,72 @@ fn rfc4791_webdav_caldav() { assert_eq!(resp.status(), 207); let multistatus = dav_deserialize::>(&resp.text()?); assert_eq!(multistatus.responses.len(), 1); + check_cal( + &multistatus, + ( + "/alice/calendar/Personal/rfc6.ics", + Some(obj6_etag.to_str().expect("etag header convertible to str")), + Some(ICAL_RFC6), + ), + ); // 7.8.6. Example: Retrieval of Event by UID - // @TODO - - // 7.8.7. Example: Retrieval of Events by PARTSTAT - // @TODO + let cal_query = r#" + + + + + + + + + + DC6C50A017428C5216A2F1CD@example.com + + + + + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + check_cal( + &multistatus, + ( + "/alice/calendar/Personal/rfc3.ics", + Some(obj3_etag.to_str().expect("etag header convertible to str")), + Some(ICAL_RFC3), + ), + ); - // 7.8.9. Example: Retrieval of All Pending To-Dos - // @TODO - // --- REPORT calendar-multiget --- + // 7.8.7. Example: Retrieval of Events by PARTSTAT let cal_query = r#" - - + + - /alice/calendar/Personal/rfc1.ics - /alice/calendar/Personal/rfc3.ics - "#; + + + + + mailto:lisa@example.com + + NEEDS-ACTION + + + + + + "#; let resp = http .request( reqwest::Method::from_bytes(b"REPORT")?, @@ -817,22 +906,21 @@ fn rfc4791_webdav_caldav() { .send()?; assert_eq!(resp.status(), 207); let multistatus = dav_deserialize::>(&resp.text()?); - assert_eq!(multistatus.responses.len(), 2); - [ - ("/alice/calendar/Personal/rfc1.ics", obj1_etag, ICAL_RFC1), - ("/alice/calendar/Personal/rfc3.ics", obj3_etag, ICAL_RFC3), - ] - .iter() - .for_each(|(ref_path, ref_etag, ref_ical)| { - check_cal( - &multistatus, - ( - ref_path, - Some(ref_etag.to_str().expect("etag header convertible to str")), - Some(ref_ical), - ), - ) - }); + assert_eq!(multistatus.responses.len(), 1); + check_cal( + &multistatus, + ( + "/alice/calendar/Personal/rfc7.ics", + Some(obj7_etag.to_str().expect("etag header convertible to str")), + Some(ICAL_RFC7), + ), + ); + + // 7.8.9. Example: Retrieval of All Pending To-Dos + // @TODO + + // --- REPORT calendar-query, with calendar-data tx --- + //@FIXME add support for calendar-data... Ok(()) }) diff --git a/aerogramme/tests/common/constants.rs b/aerogramme/tests/common/constants.rs index 91ee159..c04bae0 100644 --- a/aerogramme/tests/common/constants.rs +++ b/aerogramme/tests/common/constants.rs @@ -175,3 +175,41 @@ END:VALARM END:VTODO END:VCALENDAR "#; + +pub static ICAL_RFC7: &[u8] = br#"BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Example Corp.//CalDAV Client//EN +BEGIN:VTIMEZONE +LAST-MODIFIED:20040110T032845Z +TZID:US/Eastern +BEGIN:DAYLIGHT +DTSTART:20000404T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZNAME:EDT +TZOFFSETFROM:-0500 +TZOFFSETTO:-0400 +END:DAYLIGHT +BEGIN:STANDARD +DTSTART:20001026T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZNAME:EST +TZOFFSETFROM:-0400 +TZOFFSETTO:-0500 +END:STANDARD +END:VTIMEZONE +BEGIN:VEVENT +ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com +ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com +DTSTAMP:20090206T001220Z +DTSTART;TZID=US/Eastern:20090104T100000 +DURATION:PT1H +LAST-MODIFIED:20090206T001330Z +ORGANIZER:mailto:cyrus@example.com +SEQUENCE:1 +STATUS:TENTATIVE +SUMMARY:Event #3 +UID:DC6C50A017428C5216A2F1CA@example.com +X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com +END:VEVENT +END:VCALENDAR +"#; -- cgit v1.2.3 From ac528d215646b1d82799fafc4211ade3558074ff Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 26 May 2024 11:07:24 +0200 Subject: test fetching pending VTODOs --- aerogramme/tests/behavior.rs | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index ef58182..7b93d51 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -917,7 +917,43 @@ fn rfc4791_webdav_caldav() { ); // 7.8.9. Example: Retrieval of All Pending To-Dos - // @TODO + let cal_query = r#" + + + + + + + + + + + + + CANCELLED + + + + + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + check_cal( + &multistatus, + ( + "/alice/calendar/Personal/rfc6.ics", + Some(obj6_etag.to_str().expect("etag header convertible to str")), + Some(ICAL_RFC6), + ), + ); // --- REPORT calendar-query, with calendar-data tx --- //@FIXME add support for calendar-data... -- cgit v1.2.3 From 68e08bed4f8f589d2e45bcd82a99090032a56b95 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Sun, 26 May 2024 15:31:12 +0200 Subject: add prune logic --- aero-ical/src/lib.rs | 1 + aero-ical/src/prune.rs | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 aero-ical/src/prune.rs diff --git a/aero-ical/src/lib.rs b/aero-ical/src/lib.rs index f6b4ad4..696010a 100644 --- a/aero-ical/src/lib.rs +++ b/aero-ical/src/lib.rs @@ -5,3 +5,4 @@ /// integrates into Aerogramme pub mod parser; pub mod query; +pub mod prune; diff --git a/aero-ical/src/prune.rs b/aero-ical/src/prune.rs new file mode 100644 index 0000000..d04f700 --- /dev/null +++ b/aero-ical/src/prune.rs @@ -0,0 +1,40 @@ +use icalendar::parser::{Component, Property}; +use aero_dav::caltypes as cal; + +pub fn component<'a>(src: &'a Component<'a>, prune: &cal::Comp) -> Option> { + if src.name.as_str() != prune.name.as_str() { + return None + } + + let name = src.name.clone(); + + let properties = match &prune.prop_kind { + None => vec![], + Some(cal::PropKind::AllProp) => src.properties.clone(), + Some(cal::PropKind::Prop(l)) => src.properties.iter().filter_map(|prop| { + let sel_filt = match l.iter().find(|filt| filt.name.0.as_str() == prop.name.as_str()) { + Some(v) => v, + None => return None + }; + + match sel_filt.novalue { + None | Some(false) => Some(prop.clone()), + Some(true) => Some(Property { + name: prop.name.clone(), + params: prop.params.clone(), + val: "".into() + }), + } + }).collect::>(), + }; + + let components = match &prune.comp_kind { + None => vec![], + Some(cal::CompKind::AllComp) => src.components.clone(), + Some(cal::CompKind::Comp(many_inner_prune)) => src.components.iter().filter_map(|src_component| { + many_inner_prune.iter().find_map(|inner_prune| component(src_component, inner_prune)) + }).collect::>(), + }; + + Some(Component { name, properties, components }) +} -- cgit v1.2.3 From 418adf92be86ea83008a145180837f1e0ad3018a Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 27 May 2024 08:03:21 +0200 Subject: debug support of calendar-data pruning --- aero-ical/src/lib.rs | 2 +- aero-ical/src/prune.rs | 63 ++++++++++++++++++++++-------------- aero-proto/src/dav/controller.rs | 1 - aero-proto/src/dav/resource.rs | 53 ++++++++++++++++++++++++------ aerogramme/tests/behavior.rs | 50 +++++++++++++++++++++++++++- aerogramme/tests/common/constants.rs | 28 ++++++++++++++++ 6 files changed, 160 insertions(+), 37 deletions(-) diff --git a/aero-ical/src/lib.rs b/aero-ical/src/lib.rs index 696010a..3f6f633 100644 --- a/aero-ical/src/lib.rs +++ b/aero-ical/src/lib.rs @@ -4,5 +4,5 @@ /// the goal will be to rewrite it in the end so it better /// integrates into Aerogramme pub mod parser; -pub mod query; pub mod prune; +pub mod query; diff --git a/aero-ical/src/prune.rs b/aero-ical/src/prune.rs index d04f700..3eb50ca 100644 --- a/aero-ical/src/prune.rs +++ b/aero-ical/src/prune.rs @@ -1,40 +1,55 @@ -use icalendar::parser::{Component, Property}; use aero_dav::caltypes as cal; +use icalendar::parser::{Component, Property}; pub fn component<'a>(src: &'a Component<'a>, prune: &cal::Comp) -> Option> { if src.name.as_str() != prune.name.as_str() { - return None + return None; } let name = src.name.clone(); let properties = match &prune.prop_kind { - None => vec![], - Some(cal::PropKind::AllProp) => src.properties.clone(), - Some(cal::PropKind::Prop(l)) => src.properties.iter().filter_map(|prop| { - let sel_filt = match l.iter().find(|filt| filt.name.0.as_str() == prop.name.as_str()) { - Some(v) => v, - None => return None - }; + Some(cal::PropKind::AllProp) | None => src.properties.clone(), + Some(cal::PropKind::Prop(l)) => src + .properties + .iter() + .filter_map(|prop| { + let sel_filt = match l + .iter() + .find(|filt| filt.name.0.as_str() == prop.name.as_str()) + { + Some(v) => v, + None => return None, + }; - match sel_filt.novalue { - None | Some(false) => Some(prop.clone()), - Some(true) => Some(Property { - name: prop.name.clone(), - params: prop.params.clone(), - val: "".into() - }), - } - }).collect::>(), + match sel_filt.novalue { + None | Some(false) => Some(prop.clone()), + Some(true) => Some(Property { + name: prop.name.clone(), + params: prop.params.clone(), + val: "".into(), + }), + } + }) + .collect::>(), }; let components = match &prune.comp_kind { - None => vec![], - Some(cal::CompKind::AllComp) => src.components.clone(), - Some(cal::CompKind::Comp(many_inner_prune)) => src.components.iter().filter_map(|src_component| { - many_inner_prune.iter().find_map(|inner_prune| component(src_component, inner_prune)) - }).collect::>(), + Some(cal::CompKind::AllComp) | None => src.components.clone(), + Some(cal::CompKind::Comp(many_inner_prune)) => src + .components + .iter() + .filter_map(|src_component| { + many_inner_prune + .iter() + .find_map(|inner_prune| component(src_component, inner_prune)) + }) + .collect::>(), }; - Some(Component { name, properties, components }) + Some(Component { + name, + properties, + components, + }) } diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index abf6a97..eeb6d43 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -333,7 +333,6 @@ impl<'a> Path<'a> { } } -//@FIXME move somewhere else //@FIXME naive implementation, must be refactored later use futures::stream::Stream; fn apply_filter<'a>( diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index d65ce38..04bae4f 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -565,17 +565,49 @@ impl DavNode for EventNode { dav::Property::GetEtag(etag) } dav::PropertyRequest::Extension(all::PropertyRequest::Cal( - cal::PropertyRequest::CalendarData(_req), - )) => { + cal::PropertyRequest::CalendarData(req), + )) => { let ics = String::from_utf8( this.col.get(this.blob_id).await.or(Err(n.clone()))?, - ) - .or(Err(n.clone()))?; + ) + .or(Err(n.clone()))?; + + let new_ics = match &req.comp { + None => ics, + Some(prune_comp) => { + // parse content + let ics = match icalendar::parser::read_calendar(&ics) { + Ok(v) => v, + Err(e) => { + tracing::warn!(err=?e, "Unable to parse ICS in calendar-query"); + return Err(n.clone()) + } + }; + + // build a fake vcal component for caldav compat + let fake_vcal_component = icalendar::parser::Component { + name: cal::Component::VCalendar.as_str().into(), + properties: ics.properties, + components: ics.components, + }; + + // rebuild component + let new_comp = match aero_ical::prune::component(&fake_vcal_component, prune_comp) { + Some(v) => v, + None => return Err(n.clone()), + }; + + // reserialize + format!("{}", icalendar::parser::Calendar { properties: new_comp.properties, components: new_comp.components }) + }, + }; + + dav::Property::Extension(all::Property::Cal( cal::Property::CalendarData(cal::CalendarDataPayload { mime: None, - payload: ics, + payload: new_ics, }), )) } @@ -634,14 +666,15 @@ impl DavNode for EventNode { // so we load everything in memory let calendar = self.col.clone(); let blob_id = self.blob_id.clone(); - let r = async move { - let content = calendar + let calblob = async move { + let raw_ics = calendar .get(blob_id) .await - .or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); - Ok(hyper::body::Bytes::from(content?)) + .or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted)))?; + + Ok(hyper::body::Bytes::from(raw_ics)) }; - futures::stream::once(Box::pin(r)).boxed() + futures::stream::once(Box::pin(calblob)).boxed() } fn content_type(&self) -> &str { diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 7b93d51..c88583f 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -956,7 +956,55 @@ fn rfc4791_webdav_caldav() { ); // --- REPORT calendar-query, with calendar-data tx --- - //@FIXME add support for calendar-data... + let cal_query = r#" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + "#; + + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(cal_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + check_cal( + &multistatus, + ( + "/alice/calendar/Personal/rfc3.ics", + Some(obj3_etag.to_str().expect("etag header convertible to str")), + Some(ICAL_RFC3_STRIPPED), + ), + ); Ok(()) }) diff --git a/aerogramme/tests/common/constants.rs b/aerogramme/tests/common/constants.rs index c04bae0..16daec6 100644 --- a/aerogramme/tests/common/constants.rs +++ b/aerogramme/tests/common/constants.rs @@ -125,6 +125,34 @@ END:VEVENT END:VCALENDAR "; +pub static ICAL_RFC3_STRIPPED: &[u8] = b"BEGIN:VCALENDAR\r +VERSION:2.0\r +BEGIN:VTIMEZONE\r +LAST-MODIFIED:20040110T032845Z\r +TZID:US/Eastern\r +BEGIN:DAYLIGHT\r +DTSTART:20000404T020000\r +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4\r +TZNAME:EDT\r +TZOFFSETFROM:-0500\r +TZOFFSETTO:-0400\r +END:DAYLIGHT\r +BEGIN:STANDARD\r +DTSTART:20001026T020000\r +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10\r +TZNAME:EST\r +TZOFFSETFROM:-0400\r +TZOFFSETTO:-0500\r +END:STANDARD\r +END:VTIMEZONE\r +BEGIN:VEVENT\r +DTSTART;TZID=US/Eastern:20060104T100000\r +DURATION:PT1H\r +UID:DC6C50A017428C5216A2F1CD@example.com\r +END:VEVENT\r +END:VCALENDAR\r +"; + pub static ICAL_RFC4: &[u8] = br#"BEGIN:VCALENDAR VERSION:2.0 PRODID:-//Example Corp.//CalDAV Client//EN -- cgit v1.2.3 From 5b1da2a33b265b674a130a90377c289faea7a210 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Mon, 27 May 2024 18:16:53 +0200 Subject: webdav sync core codec --- aero-dav/src/caldecoder.rs | 2 +- aero-dav/src/calencoder.rs | 2 +- aero-dav/src/caltypes.rs | 2 +- aero-dav/src/lib.rs | 13 ++- aero-dav/src/realization.rs | 33 +++++++ aero-dav/src/syncdecoder.rs | 175 ++++++++++++++++++++++++++++++++++++++ aero-dav/src/syncencoder.rs | 144 +++++++++++++++++++++++++++++++ aero-dav/src/synctypes.rs | 68 +++++++++++++++ aero-dav/src/types.rs | 5 ++ aero-dav/src/versioningdecoder.rs | 62 ++++++++++++++ aero-dav/src/versioningencoder.rs | 81 ++++++++++++++++++ aero-dav/src/versioningtypes.rs | 36 ++++++++ aero-proto/src/dav/controller.rs | 16 ++-- aerogramme/tests/behavior.rs | 2 +- 14 files changed, 628 insertions(+), 13 deletions(-) create mode 100644 aero-dav/src/syncdecoder.rs create mode 100644 aero-dav/src/syncencoder.rs create mode 100644 aero-dav/src/synctypes.rs create mode 100644 aero-dav/src/versioningdecoder.rs create mode 100644 aero-dav/src/versioningencoder.rs diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index b6a843f..ff79845 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -25,7 +25,7 @@ impl QRead> for MkCalendarResponse { } } -impl QRead> for Report { +impl QRead> for ReportType { async fn qread(xml: &mut Reader) -> Result { match CalendarQuery::::qread(xml).await { Err(ParsingError::Recoverable) => (), diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 723d95d..48c93d0 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -33,7 +33,7 @@ impl QWrite for MkCalendarResponse { } // ----------------------- REPORT METHOD ------------------------------------- -impl QWrite for Report { +impl QWrite for ReportType { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { Self::Query(v) => v.qwrite(xml).await, diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index 50cdb92..a763653 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -51,7 +51,7 @@ pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- #[derive(Debug, PartialEq, Clone)] -pub enum Report { +pub enum ReportType { Query(CalendarQuery), Multiget(CalendarMultiget), FreeBusy(FreeBusyQuery), diff --git a/aero-dav/src/lib.rs b/aero-dav/src/lib.rs index 7507ddc..64be929 100644 --- a/aero-dav/src/lib.rs +++ b/aero-dav/src/lib.rs @@ -16,13 +16,20 @@ pub mod caldecoder; pub mod calencoder; pub mod caltypes; -// acl (wip) +// acl (partial) pub mod acldecoder; pub mod aclencoder; pub mod acltypes; -// versioning (wip) -mod versioningtypes; +// versioning (partial) +pub mod versioningdecoder; +pub mod versioningencoder; +pub mod versioningtypes; + +// sync +pub mod syncdecoder; +pub mod syncencoder; +pub mod synctypes; // final type pub mod realization; diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 7283e68..0f3aec4 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -1,6 +1,7 @@ use super::acltypes as acl; use super::caltypes as cal; use super::error; +use super::synctypes as sync; use super::types as dav; use super::xml; @@ -31,6 +32,7 @@ impl dav::Extension for Core { type Property = Disabled; type PropertyRequest = Disabled; type ResourceType = Disabled; + type ReportType = Disabled; } // WebDAV with the base Calendar implementation (RFC4791) @@ -41,6 +43,7 @@ impl dav::Extension for Calendar { type Property = cal::Property; type PropertyRequest = cal::PropertyRequest; type ResourceType = cal::ResourceType; + type ReportType = cal::ReportType; } // ACL @@ -51,6 +54,7 @@ impl dav::Extension for Acl { type Property = acl::Property; type PropertyRequest = acl::PropertyRequest; type ResourceType = acl::ResourceType; + type ReportType = Disabled; } // All merged @@ -61,6 +65,7 @@ impl dav::Extension for All { type Property = Property; type PropertyRequest = PropertyRequest; type ResourceType = ResourceType; + type ReportType = ReportType; } #[derive(Debug, PartialEq, Clone)] @@ -142,3 +147,31 @@ impl xml::QWrite for ResourceType { } } } + +#[derive(Debug, PartialEq, Clone)] +pub enum ReportType { + Cal(cal::ReportType), + Sync(sync::SyncCollection), +} +impl xml::QRead> for ReportType { + async fn qread( + xml: &mut xml::Reader, + ) -> Result, error::ParsingError> { + match cal::ReportType::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(ReportType::Cal), + } + sync::SyncCollection::qread(xml).await.map(ReportType::Sync) + } +} +impl xml::QWrite for ReportType { + async fn qwrite( + &self, + xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { + match self { + Self::Cal(c) => c.qwrite(xml).await, + Self::Sync(s) => s.qwrite(xml).await, + } + } +} diff --git a/aero-dav/src/syncdecoder.rs b/aero-dav/src/syncdecoder.rs new file mode 100644 index 0000000..8e035ab --- /dev/null +++ b/aero-dav/src/syncdecoder.rs @@ -0,0 +1,175 @@ +use quick_xml::events::Event; + +use super::error::ParsingError; +use super::synctypes::*; +use super::types as dav; +use super::xml::{IRead, QRead, Reader, DAV_URN}; + +impl QRead> for SyncCollection { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "sync-collection").await?; + let (mut sync_token, mut sync_level, mut limit, mut prop) = (None, None, None, None); + loop { + let mut dirty = false; + xml.maybe_read(&mut sync_token, &mut dirty).await?; + xml.maybe_read(&mut sync_level, &mut dirty).await?; + xml.maybe_read(&mut limit, &mut dirty).await?; + xml.maybe_read(&mut prop, &mut dirty).await?; + + if !dirty { + match xml.peek() { + Event::End(_) => break, + _ => xml.skip().await?, + }; + } + } + + xml.close().await?; + match (sync_token, sync_level, prop) { + (Some(sync_token), Some(sync_level), Some(prop)) => Ok(SyncCollection { + sync_token, + sync_level, + limit, + prop, + }), + _ => Err(ParsingError::MissingChild), + } + } +} + +impl QRead for SyncTokenRequest { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "sync-token").await?; + let token = match xml.tag_string().await { + Ok(v) => SyncTokenRequest::IncrementalSync(v), + Err(ParsingError::Recoverable) => SyncTokenRequest::InitialSync, + Err(e) => return Err(e), + }; + xml.close().await?; + Ok(token) + } +} + +impl QRead for SyncToken { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "sync-token").await?; + let token = xml.tag_string().await?; + xml.close().await?; + Ok(SyncToken(token)) + } +} + +impl QRead for SyncLevel { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "sync-level").await?; + let lvl = match xml.tag_string().await?.to_lowercase().as_str() { + "1" => SyncLevel::One, + "infinite" => SyncLevel::Infinite, + _ => return Err(ParsingError::InvalidValue), + }; + xml.close().await?; + Ok(lvl) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::realization::All; + use crate::types as dav; + use crate::versioningtypes as vers; + use crate::xml::Node; + + async fn deserialize>(src: &str) -> T { + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + rdr.find().await.unwrap() + } + + #[tokio::test] + async fn sync_level() { + { + let expected = SyncLevel::One; + let src = r#"1"#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + { + let expected = SyncLevel::Infinite; + let src = r#"infinite"#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + } + + #[tokio::test] + async fn sync_token_request() { + { + let expected = SyncTokenRequest::InitialSync; + let src = r#""#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + { + let expected = + SyncTokenRequest::IncrementalSync("http://example.com/ns/sync/1232".into()); + let src = + r#"http://example.com/ns/sync/1232"#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + } + + #[tokio::test] + async fn sync_token() { + let expected = SyncToken("http://example.com/ns/sync/1232".into()); + let src = r#"http://example.com/ns/sync/1232"#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + + #[tokio::test] + async fn sync_collection() { + { + let expected = SyncCollection:: { + sync_token: SyncTokenRequest::IncrementalSync( + "http://example.com/ns/sync/1232".into(), + ), + sync_level: SyncLevel::One, + limit: Some(vers::Limit(vers::NResults(100))), + prop: dav::PropName(vec![dav::PropertyRequest::GetEtag]), + }; + let src = r#" + http://example.com/ns/sync/1232 + 1 + + 100 + + + + + "#; + let got = deserialize::>(src).await; + assert_eq!(got, expected); + } + + { + let expected = SyncCollection:: { + sync_token: SyncTokenRequest::InitialSync, + sync_level: SyncLevel::Infinite, + limit: None, + prop: dav::PropName(vec![dav::PropertyRequest::GetEtag]), + }; + let src = r#" + + infinite + + + + "#; + let got = deserialize::>(src).await; + assert_eq!(got, expected); + } + } +} diff --git a/aero-dav/src/syncencoder.rs b/aero-dav/src/syncencoder.rs new file mode 100644 index 0000000..22b288b --- /dev/null +++ b/aero-dav/src/syncencoder.rs @@ -0,0 +1,144 @@ +use quick_xml::events::{BytesText, Event}; +use quick_xml::Error as QError; + +use super::synctypes::*; +use super::types::Extension; +use super::xml::{IWrite, QWrite, Writer}; + +impl QWrite for SyncCollection { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("sync-collection"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.sync_token.qwrite(xml).await?; + self.sync_level.qwrite(xml).await?; + if let Some(limit) = &self.limit { + limit.qwrite(xml).await?; + } + self.prop.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for SyncTokenRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("sync-token"); + + match self { + Self::InitialSync => xml.q.write_event_async(Event::Empty(start)).await, + Self::IncrementalSync(uri) => { + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(uri.as_str()))) + .await?; + xml.q.write_event_async(Event::End(end)).await + } + } + } +} + +impl QWrite for SyncToken { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("sync-token"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(self.0.as_str()))) + .await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for SyncLevel { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("sync-level"); + let end = start.to_end(); + let text = match self { + Self::One => "1", + Self::Infinite => "infinite", + }; + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(text))) + .await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::realization::All; + use crate::types as dav; + use crate::versioningtypes as vers; + use crate::xml::Node; + use crate::xml::Reader; + use tokio::io::AsyncWriteExt; + + async fn serialize_deserialize>(src: &T) { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ + ("xmlns:D".into(), "DAV:".into()), + ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), + ]; + let mut writer = Writer { q, ns_to_apply }; + + src.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + + // deserialize + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(got.as_bytes())) + .await + .unwrap(); + let res = rdr.find().await.unwrap(); + + // check + assert_eq!(src, &res); + } + + #[tokio::test] + async fn sync_level() { + serialize_deserialize(&SyncLevel::One).await; + serialize_deserialize(&SyncLevel::Infinite).await; + } + + #[tokio::test] + async fn sync_token_request() { + serialize_deserialize(&SyncTokenRequest::InitialSync).await; + serialize_deserialize(&SyncTokenRequest::IncrementalSync( + "http://example.com/ns/sync/1232".into(), + )) + .await; + } + + #[tokio::test] + async fn sync_token() { + serialize_deserialize(&SyncToken("http://example.com/ns/sync/1232".into())).await; + } + + #[tokio::test] + async fn sync_collection() { + serialize_deserialize(&SyncCollection:: { + sync_token: SyncTokenRequest::IncrementalSync("http://example.com/ns/sync/1232".into()), + sync_level: SyncLevel::One, + limit: Some(vers::Limit(vers::NResults(100))), + prop: dav::PropName(vec![dav::PropertyRequest::GetEtag]), + }) + .await; + + serialize_deserialize(&SyncCollection:: { + sync_token: SyncTokenRequest::InitialSync, + sync_level: SyncLevel::Infinite, + limit: None, + prop: dav::PropName(vec![dav::PropertyRequest::GetEtag]), + }) + .await; + } +} diff --git a/aero-dav/src/synctypes.rs b/aero-dav/src/synctypes.rs new file mode 100644 index 0000000..a2f40bd --- /dev/null +++ b/aero-dav/src/synctypes.rs @@ -0,0 +1,68 @@ +use super::types as dav; +use super::versioningtypes as vers; + +// RFC 6578 +// https://datatracker.ietf.org/doc/html/rfc6578 + +//@FIXME add SyncTokenRequest to PropertyRequest +//@FIXME add SyncToken to Property +//@FIXME add SyncToken to Multistatus + +/// Name: sync-collection +/// +/// Namespace: DAV: +/// +/// Purpose: WebDAV report used to synchronize data between client and +/// server. +/// +/// Description: See Section 3. +/// +/// +/// +/// +/// + +#[derive(Debug, PartialEq, Clone)] +pub struct SyncCollection { + pub sync_token: SyncTokenRequest, + pub sync_level: SyncLevel, + pub limit: Option, + pub prop: dav::PropName, +} + +/// Name: sync-token +/// +/// Namespace: DAV: +/// +/// Purpose: The synchronization token provided by the server and +/// returned by the client. +/// +/// Description: See Section 3. +/// +/// +/// +/// +/// Used by multistatus +#[derive(Debug, PartialEq, Clone)] +pub struct SyncToken(pub String); + +/// Used by propfind and report sync-collection +#[derive(Debug, PartialEq, Clone)] +pub enum SyncTokenRequest { + InitialSync, + IncrementalSync(String), +} + +/// Name: sync-level +/// +/// Namespace: DAV: +/// +/// Purpose: Indicates the "scope" of the synchronization report +/// request. +/// +/// Description: See Section 3.3. +#[derive(Debug, PartialEq, Clone)] +pub enum SyncLevel { + One, + Infinite, +} diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs index d5466da..6039a26 100644 --- a/aero-dav/src/types.rs +++ b/aero-dav/src/types.rs @@ -11,6 +11,7 @@ pub trait Extension: std::fmt::Debug + PartialEq + Clone { type Property: xml::Node; type PropertyRequest: xml::Node; type ResourceType: xml::Node; + type ReportType: xml::Node; } /// 14.1. activelock XML Element @@ -328,6 +329,10 @@ pub enum LockType { /// response descriptions contained within the responses. /// /// +/// +/// In WebDAV sync (rfc6578), multistatus is extended: +/// +/// #[derive(Debug, PartialEq, Clone)] pub struct Multistatus { pub responses: Vec>, diff --git a/aero-dav/src/versioningdecoder.rs b/aero-dav/src/versioningdecoder.rs new file mode 100644 index 0000000..4816cf1 --- /dev/null +++ b/aero-dav/src/versioningdecoder.rs @@ -0,0 +1,62 @@ +use super::error::ParsingError; +use super::types as dav; +use super::versioningtypes::*; +use super::xml::{IRead, QRead, Reader, DAV_URN}; + +impl QRead> for Report { + async fn qread(xml: &mut Reader) -> Result { + //@FIXME VersionTree not implemented + //@FIXME ExpandTree not implemented + + E::ReportType::qread(xml).await.map(Report::Extension) + } +} + +impl QRead for Limit { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "limit").await?; + let nres = xml.find().await?; + xml.close().await?; + Ok(Limit(nres)) + } +} + +impl QRead for NResults { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "nresults").await?; + let sz = xml.tag_string().await?.parse::()?; + xml.close().await?; + Ok(NResults(sz)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::xml::Node; + + async fn deserialize>(src: &str) -> T { + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())) + .await + .unwrap(); + rdr.find().await.unwrap() + } + + #[tokio::test] + async fn nresults() { + let expected = NResults(100); + let src = r#"100"#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } + + #[tokio::test] + async fn limit() { + let expected = Limit(NResults(1024)); + let src = r#" + 1024 + "#; + let got = deserialize::(src).await; + assert_eq!(got, expected); + } +} diff --git a/aero-dav/src/versioningencoder.rs b/aero-dav/src/versioningencoder.rs new file mode 100644 index 0000000..bd40f1b --- /dev/null +++ b/aero-dav/src/versioningencoder.rs @@ -0,0 +1,81 @@ +use quick_xml::events::{BytesText, Event}; +use quick_xml::Error as QError; + +use super::types::Extension; +use super::versioningtypes::*; +use super::xml::{IWrite, QWrite, Writer}; + +impl QWrite for Report { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Report::VersionTree => unimplemented!(), + Report::ExpandProperty => unimplemented!(), + Report::Extension(inner) => inner.qwrite(xml).await, + } + } +} + +impl QWrite for Limit { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("limit"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for NResults { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("nresults"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + xml.q + .write_event_async(Event::Text(BytesText::new(&format!("{}", self.0)))) + .await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::xml::Node; + use crate::xml::Reader; + use tokio::io::AsyncWriteExt; + + async fn serialize_deserialize>(src: &T) -> T { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ + ("xmlns:D".into(), "DAV:".into()), + ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), + ]; + let mut writer = Writer { q, ns_to_apply }; + + src.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + + // deserialize + let mut rdr = Reader::new(quick_xml::NsReader::from_reader(got.as_bytes())) + .await + .unwrap(); + rdr.find().await.unwrap() + } + + #[tokio::test] + async fn nresults() { + let orig = NResults(100); + assert_eq!(orig, serialize_deserialize(&orig).await); + } + + #[tokio::test] + async fn limit() { + let orig = Limit(NResults(1024)); + assert_eq!(orig, serialize_deserialize(&orig).await); + } +} diff --git a/aero-dav/src/versioningtypes.rs b/aero-dav/src/versioningtypes.rs index 6c1c204..ba64b05 100644 --- a/aero-dav/src/versioningtypes.rs +++ b/aero-dav/src/versioningtypes.rs @@ -1,3 +1,39 @@ +use super::types as dav; + //@FIXME required for a full DAV implementation // See section 7.1 of the CalDAV RFC // It seems it's mainly due to the fact that the REPORT method is re-used. +// https://datatracker.ietf.org/doc/html/rfc4791#section-7.1 +// +// Defines (required by CalDAV): +// - REPORT method +// - expand-property root report method +// +// Defines (required by Sync): +// - limit, nresults +// - supported-report-set + +// This property identifies the reports that are supported by the +// resource. +// +// +// +// +// ANY value: a report element type + +#[derive(Debug, PartialEq, Clone)] +pub enum Report { + VersionTree, // Not yet implemented + ExpandProperty, // Not yet implemented + Extension(E::ReportType), +} + +/// Limit +/// +#[derive(Debug, PartialEq, Clone)] +pub struct Limit(pub NResults); + +/// NResults +/// +#[derive(Debug, PartialEq, Clone)] +pub struct NResults(pub u64); diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index eeb6d43..1b7f7ee 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -9,8 +9,9 @@ use hyper::{body::Bytes, Request, Response}; use aero_collections::user::User; use aero_dav::caltypes as cal; -use aero_dav::realization::All; +use aero_dav::realization::{self, All}; use aero_dav::types as dav; +use aero_dav::versioningtypes as vers; use aero_ical::query::is_component_match; use crate::dav::codec; @@ -95,7 +96,7 @@ impl Controller { async fn report(self) -> Result { let status = hyper::StatusCode::from_u16(207)?; - let report = match deserialize::>(self.req).await { + let cal_report = match deserialize::>(self.req).await { Ok(v) => v, Err(e) => { tracing::error!(err=?e, "unable to decode REPORT body"); @@ -110,8 +111,8 @@ impl Controller { let calprop: Option>; // Extracting request information - match report { - cal::Report::Multiget(m) => { + match cal_report { + vers::Report::Extension(realization::ReportType::Cal(cal::ReportType::Multiget(m))) => { // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary // list of URLs // Getting the list of nodes @@ -136,13 +137,16 @@ impl Controller { } calprop = m.selector; } - cal::Report::Query(q) => { + vers::Report::Extension(realization::ReportType::Cal(cal::ReportType::Query(q))) => { calprop = q.selector; ok_node = apply_filter(self.node.children(&self.user).await, &q.filter) .try_collect() .await?; } - cal::Report::FreeBusy(_) => { + vers::Report::Extension(realization::ReportType::Sync(_sync_col)) => { + todo!() + } + _ => { return Ok(Response::builder() .status(501) .body(text_body("Not implemented"))?) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index c88583f..1097fe7 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -691,7 +691,7 @@ fn rfc4791_webdav_caldav() { .send()?; //@FIXME not yet supported. returns DAV: 1 ; expects DAV: 1 calendar-access // Not used by any client I know, so not implementing it now. - + // --- REPORT calendar-multiget --- let cal_query = r#" -- cgit v1.2.3 From 1c9d2eab6976993c85eaa3e8eb4f1c433258fd16 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 28 May 2024 12:38:22 +0200 Subject: parse property for sync + versioning --- Cargo.lock | 1 + aero-dav/Cargo.toml | 1 + aero-dav/src/caldecoder.rs | 22 +++++++++++ aero-dav/src/calencoder.rs | 19 +++++++++ aero-dav/src/caltypes.rs | 7 ++++ aero-dav/src/realization.rs | 73 +++++++++++++++++++++++++++++++---- aero-dav/src/syncdecoder.rs | 68 +++++++++++++++++++++++++++++++- aero-dav/src/syncencoder.rs | 57 ++++++++++++++++++++++++++- aero-dav/src/synctypes.rs | 17 +++++++- aero-dav/src/types.rs | 1 + aero-dav/src/versioningdecoder.rs | 81 +++++++++++++++++++++++++++++++++++++-- aero-dav/src/versioningencoder.rs | 62 ++++++++++++++++++++++++++++++ aero-dav/src/versioningtypes.rs | 20 ++++++++++ 13 files changed, 414 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d22a5fc..0a159ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -70,6 +70,7 @@ dependencies = [ "http 1.1.0", "quick-xml", "tokio", + "tracing", ] [[package]] diff --git a/aero-dav/Cargo.toml b/aero-dav/Cargo.toml index 92929b1..c847f68 100644 --- a/aero-dav/Cargo.toml +++ b/aero-dav/Cargo.toml @@ -12,3 +12,4 @@ http.workspace = true chrono.workspace = true tokio.workspace = true futures.workspace = true +tracing.workspace = true diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index ff79845..0df867f 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -104,6 +104,28 @@ impl QRead for FreeBusyQuery { } } +impl QRead for ReportTypeName { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "calendar-query").await?.is_some() { + xml.close().await?; + return Ok(Self::Query); + } + if xml + .maybe_open(DAV_URN, "calendar-multiget") + .await? + .is_some() + { + xml.close().await?; + return Ok(Self::Multiget); + } + if xml.maybe_open(DAV_URN, "free-busy-query").await?.is_some() { + xml.close().await?; + return Ok(Self::FreeBusy); + } + Err(ParsingError::Recoverable) + } +} + // ---- EXTENSIONS --- impl QRead for Violation { async fn qread(xml: &mut Reader) -> Result { diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index 48c93d0..a41747f 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -33,6 +33,25 @@ impl QWrite for MkCalendarResponse { } // ----------------------- REPORT METHOD ------------------------------------- +impl QWrite for ReportTypeName { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::Query => { + let start = xml.create_dav_element("calendar-query"); + xml.q.write_event_async(Event::Empty(start)).await + } + Self::Multiget => { + let start = xml.create_dav_element("calendar-multiget"); + xml.q.write_event_async(Event::Empty(start)).await + } + Self::FreeBusy => { + let start = xml.create_dav_element("free-busy-query"); + xml.q.write_event_async(Event::Empty(start)).await + } + } + } +} + impl QWrite for ReportType { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { diff --git a/aero-dav/src/caltypes.rs b/aero-dav/src/caltypes.rs index a763653..a4f6fef 100644 --- a/aero-dav/src/caltypes.rs +++ b/aero-dav/src/caltypes.rs @@ -50,6 +50,13 @@ pub struct MkCalendar(pub dav::Set); pub struct MkCalendarResponse(pub Vec>); // --- (REPORT PART) --- +#[derive(Debug, PartialEq, Clone)] +pub enum ReportTypeName { + Query, + Multiget, + FreeBusy, +} + #[derive(Debug, PartialEq, Clone)] pub enum ReportType { Query(CalendarQuery), diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 0f3aec4..b37e0f1 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -3,6 +3,7 @@ use super::caltypes as cal; use super::error; use super::synctypes as sync; use super::types as dav; +use super::versioningtypes as vers; use super::xml; #[derive(Debug, PartialEq, Clone)] @@ -33,6 +34,7 @@ impl dav::Extension for Core { type PropertyRequest = Disabled; type ResourceType = Disabled; type ReportType = Disabled; + type ReportTypeName = Disabled; } // WebDAV with the base Calendar implementation (RFC4791) @@ -44,6 +46,7 @@ impl dav::Extension for Calendar { type PropertyRequest = cal::PropertyRequest; type ResourceType = cal::ResourceType; type ReportType = cal::ReportType; + type ReportTypeName = cal::ReportTypeName; } // ACL @@ -55,6 +58,7 @@ impl dav::Extension for Acl { type PropertyRequest = acl::PropertyRequest; type ResourceType = acl::ResourceType; type ReportType = Disabled; + type ReportTypeName = Disabled; } // All merged @@ -62,27 +66,38 @@ impl dav::Extension for Acl { pub struct All {} impl dav::Extension for All { type Error = cal::Violation; - type Property = Property; + type Property = Property; type PropertyRequest = PropertyRequest; type ResourceType = ResourceType; type ReportType = ReportType; + type ReportTypeName = ReportTypeName; } #[derive(Debug, PartialEq, Clone)] -pub enum Property { +pub enum Property { Cal(cal::Property), Acl(acl::Property), + Sync(sync::Property), + Vers(vers::Property), } -impl xml::QRead for Property { +impl xml::QRead> for Property { async fn qread(xml: &mut xml::Reader) -> Result { match cal::Property::qread(xml).await { Err(error::ParsingError::Recoverable) => (), - otherwise => return otherwise.map(Property::Cal), + otherwise => return otherwise.map(Property::::Cal), } - acl::Property::qread(xml).await.map(Property::Acl) + match acl::Property::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Property::Acl), + } + match sync::Property::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(Property::Sync), + } + vers::Property::qread(xml).await.map(Property::Vers) } } -impl xml::QWrite for Property { +impl xml::QWrite for Property { async fn qwrite( &self, xml: &mut xml::Writer, @@ -90,6 +105,8 @@ impl xml::QWrite for Property { match self { Self::Cal(c) => c.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await, + Self::Sync(s) => s.qwrite(xml).await, + Self::Vers(v) => v.qwrite(xml).await, } } } @@ -98,6 +115,8 @@ impl xml::QWrite for Property { pub enum PropertyRequest { Cal(cal::PropertyRequest), Acl(acl::PropertyRequest), + Sync(sync::PropertyRequest), + Vers(vers::PropertyRequest), } impl xml::QRead for PropertyRequest { async fn qread(xml: &mut xml::Reader) -> Result { @@ -105,9 +124,17 @@ impl xml::QRead for PropertyRequest { Err(error::ParsingError::Recoverable) => (), otherwise => return otherwise.map(PropertyRequest::Cal), } - acl::PropertyRequest::qread(xml) + match acl::PropertyRequest::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(PropertyRequest::Acl), + } + match sync::PropertyRequest::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(PropertyRequest::Sync), + } + vers::PropertyRequest::qread(xml) .await - .map(PropertyRequest::Acl) + .map(PropertyRequest::Vers) } } impl xml::QWrite for PropertyRequest { @@ -118,6 +145,8 @@ impl xml::QWrite for PropertyRequest { match self { Self::Cal(c) => c.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await, + Self::Sync(s) => s.qwrite(xml).await, + Self::Vers(v) => v.qwrite(xml).await, } } } @@ -175,3 +204,31 @@ impl xml::QWrite for ReportType { } } } + +#[derive(Debug, PartialEq, Clone)] +pub enum ReportTypeName { + Cal(cal::ReportTypeName), + Sync(sync::ReportTypeName), +} +impl xml::QRead for ReportTypeName { + async fn qread(xml: &mut xml::Reader) -> Result { + match cal::ReportTypeName::qread(xml).await { + Err(error::ParsingError::Recoverable) => (), + otherwise => return otherwise.map(ReportTypeName::Cal), + } + sync::ReportTypeName::qread(xml) + .await + .map(ReportTypeName::Sync) + } +} +impl xml::QWrite for ReportTypeName { + async fn qwrite( + &self, + xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { + match self { + Self::Cal(c) => c.qwrite(xml).await, + Self::Sync(s) => s.qwrite(xml).await, + } + } +} diff --git a/aero-dav/src/syncdecoder.rs b/aero-dav/src/syncdecoder.rs index 8e035ab..d81f8d4 100644 --- a/aero-dav/src/syncdecoder.rs +++ b/aero-dav/src/syncdecoder.rs @@ -5,6 +5,34 @@ use super::synctypes::*; use super::types as dav; use super::xml::{IRead, QRead, Reader, DAV_URN}; +impl QRead for PropertyRequest { + async fn qread(xml: &mut Reader) -> Result { + let mut dirty = false; + let mut m_cdr = None; + xml.maybe_read(&mut m_cdr, &mut dirty).await?; + m_cdr.ok_or(ParsingError::Recoverable).map(Self::SyncToken) + } +} + +impl QRead for Property { + async fn qread(xml: &mut Reader) -> Result { + let mut dirty = false; + let mut m_cdr = None; + xml.maybe_read(&mut m_cdr, &mut dirty).await?; + m_cdr.ok_or(ParsingError::Recoverable).map(Self::SyncToken) + } +} + +impl QRead for ReportTypeName { + async fn qread(xml: &mut Reader) -> Result { + if xml.maybe_open(DAV_URN, "sync-collection").await?.is_some() { + xml.close().await?; + return Ok(Self::SyncCollection); + } + Err(ParsingError::Recoverable) + } +} + impl QRead> for SyncCollection { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "sync-collection").await?; @@ -75,7 +103,7 @@ impl QRead for SyncLevel { #[cfg(test)] mod tests { use super::*; - use crate::realization::All; + use crate::realization::{self, All}; use crate::types as dav; use crate::versioningtypes as vers; use crate::xml::Node; @@ -172,4 +200,42 @@ mod tests { assert_eq!(got, expected); } } + + #[tokio::test] + async fn prop_req() { + let expected = dav::PropName::(vec![dav::PropertyRequest::Extension( + realization::PropertyRequest::Sync(PropertyRequest::SyncToken( + SyncTokenRequest::InitialSync, + )), + )]); + let src = r#""#; + let got = deserialize::>(src).await; + assert_eq!(got, expected); + } + + #[tokio::test] + async fn prop_val() { + let expected = dav::PropValue::(vec![ + dav::Property::Extension(realization::Property::Sync(Property::SyncToken(SyncToken( + "http://example.com/ns/sync/1232".into(), + )))), + dav::Property::Extension(realization::Property::Vers( + vers::Property::SupportedReportSet(vec![vers::SupportedReport( + vers::ReportName::Extension(realization::ReportTypeName::Sync( + ReportTypeName::SyncCollection, + )), + )]), + )), + ]); + let src = r#" + http://example.com/ns/sync/1232 + + + + + + "#; + let got = deserialize::>(src).await; + assert_eq!(got, expected); + } } diff --git a/aero-dav/src/syncencoder.rs b/aero-dav/src/syncencoder.rs index 22b288b..59ad6cc 100644 --- a/aero-dav/src/syncencoder.rs +++ b/aero-dav/src/syncencoder.rs @@ -5,6 +5,33 @@ use super::synctypes::*; use super::types::Extension; use super::xml::{IWrite, QWrite, Writer}; +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::SyncToken(token) => token.qwrite(xml).await, + } + } +} + +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::SyncToken(token) => token.qwrite(xml).await, + } + } +} + +impl QWrite for ReportTypeName { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::SyncCollection => { + let start = xml.create_dav_element("sync-collection"); + xml.q.write_event_async(Event::Empty(start)).await + } + } + } +} + impl QWrite for SyncCollection { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("sync-collection"); @@ -72,7 +99,7 @@ impl QWrite for SyncLevel { #[cfg(test)] mod tests { use super::*; - use crate::realization::All; + use crate::realization::{self, All}; use crate::types as dav; use crate::versioningtypes as vers; use crate::xml::Node; @@ -92,6 +119,7 @@ mod tests { src.qwrite(&mut writer).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let got = std::str::from_utf8(buffer.as_slice()).unwrap(); + println!("{:?}", got); // deserialize let mut rdr = Reader::new(quick_xml::NsReader::from_reader(got.as_bytes())) @@ -141,4 +169,31 @@ mod tests { }) .await; } + + #[tokio::test] + async fn prop_req() { + serialize_deserialize(&dav::PropName::(vec![ + dav::PropertyRequest::Extension(realization::PropertyRequest::Sync( + PropertyRequest::SyncToken(SyncTokenRequest::InitialSync), + )), + ])) + .await; + } + + #[tokio::test] + async fn prop_val() { + serialize_deserialize(&dav::PropValue::(vec![ + dav::Property::Extension(realization::Property::Sync(Property::SyncToken(SyncToken( + "http://example.com/ns/sync/1232".into(), + )))), + dav::Property::Extension(realization::Property::Vers( + vers::Property::SupportedReportSet(vec![vers::SupportedReport( + vers::ReportName::Extension(realization::ReportTypeName::Sync( + ReportTypeName::SyncCollection, + )), + )]), + )), + ])) + .await; + } } diff --git a/aero-dav/src/synctypes.rs b/aero-dav/src/synctypes.rs index a2f40bd..c127962 100644 --- a/aero-dav/src/synctypes.rs +++ b/aero-dav/src/synctypes.rs @@ -4,8 +4,21 @@ use super::versioningtypes as vers; // RFC 6578 // https://datatracker.ietf.org/doc/html/rfc6578 -//@FIXME add SyncTokenRequest to PropertyRequest -//@FIXME add SyncToken to Property +#[derive(Debug, PartialEq, Clone)] +pub enum PropertyRequest { + SyncToken(SyncTokenRequest), +} + +#[derive(Debug, PartialEq, Clone)] +pub enum Property { + SyncToken(SyncToken), +} + +#[derive(Debug, PartialEq, Clone)] +pub enum ReportTypeName { + SyncCollection, +} + //@FIXME add SyncToken to Multistatus /// Name: sync-collection diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs index 6039a26..92c63e0 100644 --- a/aero-dav/src/types.rs +++ b/aero-dav/src/types.rs @@ -12,6 +12,7 @@ pub trait Extension: std::fmt::Debug + PartialEq + Clone { type PropertyRequest: xml::Node; type ResourceType: xml::Node; type ReportType: xml::Node; + type ReportTypeName: xml::Node; } /// 14.1. activelock XML Element diff --git a/aero-dav/src/versioningdecoder.rs b/aero-dav/src/versioningdecoder.rs index 4816cf1..9e58d8c 100644 --- a/aero-dav/src/versioningdecoder.rs +++ b/aero-dav/src/versioningdecoder.rs @@ -3,12 +3,87 @@ use super::types as dav; use super::versioningtypes::*; use super::xml::{IRead, QRead, Reader, DAV_URN}; +// -- extensions --- +impl QRead for PropertyRequest { + async fn qread(xml: &mut Reader) -> Result { + if xml + .maybe_open(DAV_URN, "supported-report-set") + .await? + .is_some() + { + xml.close().await?; + return Ok(Self::SupportedReportSet); + } + return Err(ParsingError::Recoverable); + } +} + +impl QRead> for Property { + async fn qread(xml: &mut Reader) -> Result { + if xml + .maybe_open(DAV_URN, "supported-report-set") + .await? + .is_some() + { + let supported_reports = xml.collect().await?; + xml.close().await?; + return Ok(Property::SupportedReportSet(supported_reports)); + } + Err(ParsingError::Recoverable) + } +} + +impl QRead> for SupportedReport { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "supported-report").await?; + let r = xml.find().await?; + xml.close().await?; + Ok(SupportedReport(r)) + } +} + +impl QRead> for ReportName { + async fn qread(xml: &mut Reader) -> Result { + xml.open(DAV_URN, "report").await?; + + let final_result = if xml.maybe_open(DAV_URN, "version-tree").await?.is_some() { + xml.close().await?; + Ok(ReportName::VersionTree) + } else if xml.maybe_open(DAV_URN, "expand-property").await?.is_some() { + xml.close().await?; + Ok(ReportName::ExpandProperty) + } else { + let x = match xml.maybe_find().await? { + Some(v) => v, + None => return Err(ParsingError::MissingChild), + }; + Ok(ReportName::Extension(x)) + //E::ReportTypeName::qread(xml).await.map(ReportName::Extension) + }; + + xml.close().await?; + final_result + } +} + impl QRead> for Report { async fn qread(xml: &mut Reader) -> Result { - //@FIXME VersionTree not implemented - //@FIXME ExpandTree not implemented + xml.open(DAV_URN, "report").await?; - E::ReportType::qread(xml).await.map(Report::Extension) + let final_result = if xml.maybe_open(DAV_URN, "version-tree").await?.is_some() { + xml.close().await?; + tracing::warn!("version-tree is not implemented, skipping"); + Ok(Report::VersionTree) + } else if xml.maybe_open(DAV_URN, "expand-property").await?.is_some() { + xml.close().await?; + tracing::warn!("expand-property is not implemented, skipping"); + Ok(Report::ExpandProperty) + } else { + E::ReportType::qread(xml).await.map(Report::Extension) + }; + + xml.close().await?; + final_result } } diff --git a/aero-dav/src/versioningencoder.rs b/aero-dav/src/versioningencoder.rs index bd40f1b..c061f07 100644 --- a/aero-dav/src/versioningencoder.rs +++ b/aero-dav/src/versioningencoder.rs @@ -5,6 +5,67 @@ use super::types::Extension; use super::versioningtypes::*; use super::xml::{IWrite, QWrite, Writer}; +// --- extensions to PROP +impl QWrite for PropertyRequest { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::SupportedReportSet => { + let start = xml.create_dav_element("supported-report-set"); + xml.q.write_event_async(Event::Empty(start)).await + } + } + } +} + +impl QWrite for Property { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + match self { + Self::SupportedReportSet(set) => { + let start = xml.create_dav_element("supported-report-set"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + for v in set.iter() { + v.qwrite(xml).await?; + } + xml.q.write_event_async(Event::End(end)).await + } + } + } +} + +impl QWrite for SupportedReport { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("supported-report"); + let end = start.to_end(); + xml.q.write_event_async(Event::Start(start.clone())).await?; + self.0.qwrite(xml).await?; + xml.q.write_event_async(Event::End(end)).await + } +} + +impl QWrite for ReportName { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + let start = xml.create_dav_element("report"); + let end = start.to_end(); + + xml.q.write_event_async(Event::Start(start.clone())).await?; + match self { + Self::VersionTree => { + let start = xml.create_dav_element("version-tree"); + xml.q.write_event_async(Event::Empty(start)).await?; + } + Self::ExpandProperty => { + let start = xml.create_dav_element("expand-property"); + xml.q.write_event_async(Event::Empty(start)).await?; + } + Self::Extension(ext) => ext.qwrite(xml).await?, + }; + xml.q.write_event_async(Event::End(end)).await + } +} + +// --- root REPORT object --- impl QWrite for Report { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { @@ -15,6 +76,7 @@ impl QWrite for Report { } } +// --- limit REPORT parameter --- impl QWrite for Limit { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("limit"); diff --git a/aero-dav/src/versioningtypes.rs b/aero-dav/src/versioningtypes.rs index ba64b05..1f8d1cf 100644 --- a/aero-dav/src/versioningtypes.rs +++ b/aero-dav/src/versioningtypes.rs @@ -21,6 +21,26 @@ use super::types as dav; // // ANY value: a report element type +#[derive(Debug, PartialEq, Clone)] +pub enum PropertyRequest { + SupportedReportSet, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum Property { + SupportedReportSet(Vec>), +} + +#[derive(Debug, PartialEq, Clone)] +pub struct SupportedReport(pub ReportName); + +#[derive(Debug, PartialEq, Clone)] +pub enum ReportName { + VersionTree, + ExpandProperty, + Extension(E::ReportTypeName), +} + #[derive(Debug, PartialEq, Clone)] pub enum Report { VersionTree, // Not yet implemented -- cgit v1.2.3 From 10dac17ce173e584fc857f717f645c05b7cd42ac Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 28 May 2024 12:43:20 +0200 Subject: fix report --- aero-dav/src/versioningdecoder.rs | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/aero-dav/src/versioningdecoder.rs b/aero-dav/src/versioningdecoder.rs index 9e58d8c..c28c0d5 100644 --- a/aero-dav/src/versioningdecoder.rs +++ b/aero-dav/src/versioningdecoder.rs @@ -68,9 +68,7 @@ impl QRead> for ReportName { impl QRead> for Report { async fn qread(xml: &mut Reader) -> Result { - xml.open(DAV_URN, "report").await?; - - let final_result = if xml.maybe_open(DAV_URN, "version-tree").await?.is_some() { + if xml.maybe_open(DAV_URN, "version-tree").await?.is_some() { xml.close().await?; tracing::warn!("version-tree is not implemented, skipping"); Ok(Report::VersionTree) @@ -80,10 +78,7 @@ impl QRead> for Report { Ok(Report::ExpandProperty) } else { E::ReportType::qread(xml).await.map(Report::Extension) - }; - - xml.close().await?; - final_result + } } } -- cgit v1.2.3 From 410d663a5ecbb6ca46ac049227afdaaaf50ac27f Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 28 May 2024 13:04:46 +0200 Subject: add a multistatus extension entrypoint --- aero-dav/src/caldecoder.rs | 1 + aero-dav/src/calencoder.rs | 1 + aero-dav/src/decoder.rs | 5 +++++ aero-dav/src/encoder.rs | 7 +++++++ aero-dav/src/realization.rs | 4 ++++ aero-dav/src/types.rs | 2 ++ aero-proto/src/dav/controller.rs | 1 + 7 files changed, 21 insertions(+) diff --git a/aero-dav/src/caldecoder.rs b/aero-dav/src/caldecoder.rs index 0df867f..9ed783a 100644 --- a/aero-dav/src/caldecoder.rs +++ b/aero-dav/src/caldecoder.rs @@ -1198,6 +1198,7 @@ END:VCALENDAR]]> #[tokio::test] async fn rfc_calendar_query_res() { let expected = dav::Multistatus:: { + extension: None, responses: vec![ dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( diff --git a/aero-dav/src/calencoder.rs b/aero-dav/src/calencoder.rs index a41747f..15df965 100644 --- a/aero-dav/src/calencoder.rs +++ b/aero-dav/src/calencoder.rs @@ -950,6 +950,7 @@ mod tests { #[tokio::test] async fn rfc_calendar_query1_res() { let got = serialize(&dav::Multistatus:: { + extension: None, responses: vec![ dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat( diff --git a/aero-dav/src/decoder.rs b/aero-dav/src/decoder.rs index bd724e8..bb64455 100644 --- a/aero-dav/src/decoder.rs +++ b/aero-dav/src/decoder.rs @@ -67,11 +67,13 @@ impl QRead> for Multistatus { xml.open(DAV_URN, "multistatus").await?; let mut responses = Vec::new(); let mut responsedescription = None; + let mut extension = None; loop { let mut dirty = false; xml.maybe_push(&mut responses, &mut dirty).await?; xml.maybe_read(&mut responsedescription, &mut dirty).await?; + xml.maybe_read(&mut extension, &mut dirty).await?; if !dirty { match xml.peek() { Event::End(_) => break, @@ -84,6 +86,7 @@ impl QRead> for Multistatus { Ok(Multistatus { responses, responsedescription, + extension, }) } } @@ -983,6 +986,7 @@ mod tests { }, ], responsedescription: None, + extension: None, } ); } @@ -1053,6 +1057,7 @@ mod tests { assert_eq!( got, Multistatus { + extension: None, responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( diff --git a/aero-dav/src/encoder.rs b/aero-dav/src/encoder.rs index fd87e95..6c77aa6 100644 --- a/aero-dav/src/encoder.rs +++ b/aero-dav/src/encoder.rs @@ -60,6 +60,9 @@ impl QWrite for Multistatus { if let Some(description) = &self.responsedescription { description.qwrite(xml).await?; } + if let Some(extension) = &self.extension { + extension.qwrite(xml).await?; + } xml.q.write_event_async(Event::End(end)).await?; Ok(()) @@ -752,6 +755,7 @@ mod tests { #[tokio::test] async fn basic_multistatus() { let orig = Multistatus:: { + extension: None, responses: vec![], responsedescription: Some(ResponseDescription("Hello world".into())), }; @@ -808,6 +812,7 @@ mod tests { #[tokio::test] async fn rfc_propname_res() { let orig = Multistatus:: { + extension: None, responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( @@ -916,6 +921,7 @@ mod tests { use chrono::{FixedOffset, TimeZone}; let orig = Multistatus:: { + extension: None, responses: vec![ Response { status_or_propstat: StatusOrPropstat::PropStat( @@ -1140,6 +1146,7 @@ mod tests { #[tokio::test] async fn rfc_delete_locked2() { let orig = Multistatus:: { + extension: None, responses: vec![Response { status_or_propstat: StatusOrPropstat::Status( vec![Href("http://www.example.com/container/resource3".into())], diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index b37e0f1..1e6f25e 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -35,6 +35,7 @@ impl dav::Extension for Core { type ResourceType = Disabled; type ReportType = Disabled; type ReportTypeName = Disabled; + type Multistatus = Disabled; } // WebDAV with the base Calendar implementation (RFC4791) @@ -47,6 +48,7 @@ impl dav::Extension for Calendar { type ResourceType = cal::ResourceType; type ReportType = cal::ReportType; type ReportTypeName = cal::ReportTypeName; + type Multistatus = Disabled; } // ACL @@ -59,6 +61,7 @@ impl dav::Extension for Acl { type ResourceType = acl::ResourceType; type ReportType = Disabled; type ReportTypeName = Disabled; + type Multistatus = Disabled; } // All merged @@ -71,6 +74,7 @@ impl dav::Extension for All { type ResourceType = ResourceType; type ReportType = ReportType; type ReportTypeName = ReportTypeName; + type Multistatus = Disabled; } #[derive(Debug, PartialEq, Clone)] diff --git a/aero-dav/src/types.rs b/aero-dav/src/types.rs index 92c63e0..61a6fe9 100644 --- a/aero-dav/src/types.rs +++ b/aero-dav/src/types.rs @@ -13,6 +13,7 @@ pub trait Extension: std::fmt::Debug + PartialEq + Clone { type ResourceType: xml::Node; type ReportType: xml::Node; type ReportTypeName: xml::Node; + type Multistatus: xml::Node; } /// 14.1. activelock XML Element @@ -338,6 +339,7 @@ pub enum LockType { pub struct Multistatus { pub responses: Vec>, pub responsedescription: Option, + pub extension: Option, } /// 14.17. owner XML Element diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 1b7f7ee..4bae68a 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -309,6 +309,7 @@ impl Controller { dav::Multistatus:: { responses, responsedescription: None, + extension: None, } } } -- cgit v1.2.3 From 171a762768aabd799a1012d2fb939d869ff53f7b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 28 May 2024 13:59:40 +0200 Subject: implement sync multistatus extension --- aero-dav/src/realization.rs | 24 +++++++++++++++++++++++- aero-dav/src/syncdecoder.rs | 9 +++++++++ aero-dav/src/syncencoder.rs | 26 ++++++++++++++++++++++++++ aero-dav/src/synctypes.rs | 5 +++++ 4 files changed, 63 insertions(+), 1 deletion(-) diff --git a/aero-dav/src/realization.rs b/aero-dav/src/realization.rs index 1e6f25e..76170f8 100644 --- a/aero-dav/src/realization.rs +++ b/aero-dav/src/realization.rs @@ -74,7 +74,7 @@ impl dav::Extension for All { type ResourceType = ResourceType; type ReportType = ReportType; type ReportTypeName = ReportTypeName; - type Multistatus = Disabled; + type Multistatus = Multistatus; } #[derive(Debug, PartialEq, Clone)] @@ -236,3 +236,25 @@ impl xml::QWrite for ReportTypeName { } } } + +#[derive(Debug, PartialEq, Clone)] +pub enum Multistatus { + Sync(sync::Multistatus), +} + +impl xml::QWrite for Multistatus { + async fn qwrite( + &self, + xml: &mut xml::Writer, + ) -> Result<(), quick_xml::Error> { + match self { + Self::Sync(s) => s.qwrite(xml).await, + } + } +} + +impl xml::QRead for Multistatus { + async fn qread(xml: &mut xml::Reader) -> Result { + sync::Multistatus::qread(xml).await.map(Self::Sync) + } +} diff --git a/aero-dav/src/syncdecoder.rs b/aero-dav/src/syncdecoder.rs index d81f8d4..be25b79 100644 --- a/aero-dav/src/syncdecoder.rs +++ b/aero-dav/src/syncdecoder.rs @@ -33,6 +33,14 @@ impl QRead for ReportTypeName { } } +impl QRead for Multistatus { + async fn qread(xml: &mut Reader) -> Result { + SyncToken::qread(xml) + .await + .map(|sync_token| Multistatus { sync_token }) + } +} + impl QRead> for SyncCollection { async fn qread(xml: &mut Reader) -> Result { xml.open(DAV_URN, "sync-collection").await?; @@ -80,6 +88,7 @@ impl QRead for SyncTokenRequest { impl QRead for SyncToken { async fn qread(xml: &mut Reader) -> Result { + println!("sync_token {:?}", xml.peek()); xml.open(DAV_URN, "sync-token").await?; let token = xml.tag_string().await?; xml.close().await?; diff --git a/aero-dav/src/syncencoder.rs b/aero-dav/src/syncencoder.rs index 59ad6cc..8badc92 100644 --- a/aero-dav/src/syncencoder.rs +++ b/aero-dav/src/syncencoder.rs @@ -32,6 +32,12 @@ impl QWrite for ReportTypeName { } } +impl QWrite for Multistatus { + async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { + self.sync_token.qwrite(xml).await + } +} + impl QWrite for SyncCollection { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { let start = xml.create_dav_element("sync-collection"); @@ -196,4 +202,24 @@ mod tests { ])) .await; } + + #[tokio::test] + async fn multistatus_ext() { + serialize_deserialize(&dav::Multistatus:: { + responses: vec![dav::Response { + status_or_propstat: dav::StatusOrPropstat::Status( + vec![dav::Href("/".into())], + dav::Status(http::status::StatusCode::OK), + ), + error: None, + location: None, + responsedescription: None, + }], + responsedescription: None, + extension: Some(realization::Multistatus::Sync(Multistatus { + sync_token: SyncToken("http://example.com/ns/sync/1232".into()), + })), + }) + .await; + } } diff --git a/aero-dav/src/synctypes.rs b/aero-dav/src/synctypes.rs index c127962..cbd86b8 100644 --- a/aero-dav/src/synctypes.rs +++ b/aero-dav/src/synctypes.rs @@ -19,6 +19,11 @@ pub enum ReportTypeName { SyncCollection, } +#[derive(Debug, PartialEq, Clone)] +pub struct Multistatus { + pub sync_token: SyncToken, +} + //@FIXME add SyncToken to Multistatus /// Name: sync-collection -- cgit v1.2.3 From 18f2154151b2cf81e03bdda28fa2ea5d685e33d1 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 28 May 2024 16:03:25 +0200 Subject: implement propfind sync-token --- aero-collections/src/calendar/mod.rs | 14 ++++- aero-dav/src/syncdecoder.rs | 14 ++--- aero-dav/src/syncencoder.rs | 7 ++- aero-dav/src/synctypes.rs | 2 +- aero-dav/src/versioningdecoder.rs | 2 +- aero-proto/src/dav/resource.rs | 92 ++++++++++++++++++++++--------- aerogramme/tests/behavior.rs | 104 ++++++++++++++++++++++++++++++++++- 7 files changed, 195 insertions(+), 40 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index cd05328..414426a 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -56,6 +56,11 @@ impl Calendar { self.internal.read().await.davdag.state().clone() } + /// Access the current token + pub async fn token(&self) -> Result { + self.internal.write().await.current_token().await + } + /// The diff API is a write API as we might need to push a merge node /// to get a new sync token pub async fn diff(&self, sync_token: Token) -> Result<(Token, Vec)> { @@ -174,6 +179,12 @@ impl CalendarInternal { .map(|s| s.clone()) .collect(); + let token = self.current_token().await?; + Ok((token, changes)) + } + + async fn current_token(&mut self) -> Result { + let davstate = self.davdag.state(); let heads = davstate.heads_vec(); let token = match heads.as_slice() { [token] => *token, @@ -184,7 +195,6 @@ impl CalendarInternal { token } }; - - Ok((token, changes)) + Ok(token) } } diff --git a/aero-dav/src/syncdecoder.rs b/aero-dav/src/syncdecoder.rs index be25b79..2a61dea 100644 --- a/aero-dav/src/syncdecoder.rs +++ b/aero-dav/src/syncdecoder.rs @@ -7,10 +7,11 @@ use super::xml::{IRead, QRead, Reader, DAV_URN}; impl QRead for PropertyRequest { async fn qread(xml: &mut Reader) -> Result { - let mut dirty = false; - let mut m_cdr = None; - xml.maybe_read(&mut m_cdr, &mut dirty).await?; - m_cdr.ok_or(ParsingError::Recoverable).map(Self::SyncToken) + if xml.maybe_open(DAV_URN, "sync-token").await?.is_some() { + xml.close().await?; + return Ok(Self::SyncToken); + } + return Err(ParsingError::Recoverable); } } @@ -88,7 +89,6 @@ impl QRead for SyncTokenRequest { impl QRead for SyncToken { async fn qread(xml: &mut Reader) -> Result { - println!("sync_token {:?}", xml.peek()); xml.open(DAV_URN, "sync-token").await?; let token = xml.tag_string().await?; xml.close().await?; @@ -213,9 +213,7 @@ mod tests { #[tokio::test] async fn prop_req() { let expected = dav::PropName::(vec![dav::PropertyRequest::Extension( - realization::PropertyRequest::Sync(PropertyRequest::SyncToken( - SyncTokenRequest::InitialSync, - )), + realization::PropertyRequest::Sync(PropertyRequest::SyncToken), )]); let src = r#""#; let got = deserialize::>(src).await; diff --git a/aero-dav/src/syncencoder.rs b/aero-dav/src/syncencoder.rs index 8badc92..2dd50eb 100644 --- a/aero-dav/src/syncencoder.rs +++ b/aero-dav/src/syncencoder.rs @@ -16,7 +16,10 @@ impl QWrite for Property { impl QWrite for PropertyRequest { async fn qwrite(&self, xml: &mut Writer) -> Result<(), QError> { match self { - Self::SyncToken(token) => token.qwrite(xml).await, + Self::SyncToken => { + let start = xml.create_dav_element("sync-token"); + xml.q.write_event_async(Event::Empty(start)).await + } } } } @@ -180,7 +183,7 @@ mod tests { async fn prop_req() { serialize_deserialize(&dav::PropName::(vec![ dav::PropertyRequest::Extension(realization::PropertyRequest::Sync( - PropertyRequest::SyncToken(SyncTokenRequest::InitialSync), + PropertyRequest::SyncToken, )), ])) .await; diff --git a/aero-dav/src/synctypes.rs b/aero-dav/src/synctypes.rs index cbd86b8..2a14221 100644 --- a/aero-dav/src/synctypes.rs +++ b/aero-dav/src/synctypes.rs @@ -6,7 +6,7 @@ use super::versioningtypes as vers; #[derive(Debug, PartialEq, Clone)] pub enum PropertyRequest { - SyncToken(SyncTokenRequest), + SyncToken, } #[derive(Debug, PartialEq, Clone)] diff --git a/aero-dav/src/versioningdecoder.rs b/aero-dav/src/versioningdecoder.rs index c28c0d5..a0a3ddf 100644 --- a/aero-dav/src/versioningdecoder.rs +++ b/aero-dav/src/versioningdecoder.rs @@ -21,7 +21,7 @@ impl QRead for PropertyRequest { impl QRead> for Property { async fn qread(xml: &mut Reader) -> Result { if xml - .maybe_open(DAV_URN, "supported-report-set") + .maybe_open_start(DAV_URN, "supported-report-set") .await? .is_some() { diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 04bae4f..1ae766c 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -14,7 +14,9 @@ use aero_collections::{ use aero_dav::acltypes as acl; use aero_dav::caltypes as cal; use aero_dav::realization::{self as all, All}; +use aero_dav::synctypes as sync; use aero_dav::types as dav; +use aero_dav::versioningtypes as vers; use super::node::PropertyStream; use crate::dav::node::{Content, DavNode, PutPolicy}; @@ -431,38 +433,78 @@ impl DavNode for CalendarNode { dav::PropertyRequest::Extension(all::PropertyRequest::Cal( cal::PropertyRequest::SupportedCalendarComponentSet, )), + dav::PropertyRequest::Extension(all::PropertyRequest::Sync( + sync::PropertyRequest::SyncToken, + )), + dav::PropertyRequest::Extension(all::PropertyRequest::Vers( + vers::PropertyRequest::SupportedReportSet, + )), ]) } fn properties(&self, _user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static> { let calname = self.calname.to_string(); + let col = self.col.clone(); futures::stream::iter(prop.0) - .map(move |n| { - let prop = match n { - dav::PropertyRequest::DisplayName => { - dav::Property::DisplayName(format!("{} calendar", calname)) - } - dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ - dav::ResourceType::Collection, - dav::ResourceType::Extension(all::ResourceType::Cal( - cal::ResourceType::Calendar, + .then(move |n| { + let calname = calname.clone(); + let col = col.clone(); + + async move { + let prop = match n { + dav::PropertyRequest::DisplayName => { + dav::Property::DisplayName(format!("{} calendar", calname)) + } + dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ + dav::ResourceType::Collection, + dav::ResourceType::Extension(all::ResourceType::Cal( + cal::ResourceType::Calendar, + )), + ]), + //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), + //@FIXME seems wrong but seems to be what Thunderbird expects... + dav::PropertyRequest::GetContentType => { + dav::Property::GetContentType("text/calendar".into()) + } + dav::PropertyRequest::Extension(all::PropertyRequest::Cal( + cal::PropertyRequest::SupportedCalendarComponentSet, + )) => dav::Property::Extension(all::Property::Cal( + cal::Property::SupportedCalendarComponentSet(vec![ + cal::CompSupport(cal::Component::VEvent), + cal::CompSupport(cal::Component::VTodo), + cal::CompSupport(cal::Component::VJournal), + ]), )), - ]), - //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), - //@FIXME seems wrong but seems to be what Thunderbird expects... - dav::PropertyRequest::GetContentType => { - dav::Property::GetContentType("text/calendar".into()) - } - dav::PropertyRequest::Extension(all::PropertyRequest::Cal( - cal::PropertyRequest::SupportedCalendarComponentSet, - )) => dav::Property::Extension(all::Property::Cal( - cal::Property::SupportedCalendarComponentSet(vec![cal::CompSupport( - cal::Component::VEvent, - )]), - )), - v => return Err(v), - }; - Ok(prop) + dav::PropertyRequest::Extension(all::PropertyRequest::Sync( + sync::PropertyRequest::SyncToken, + )) => match col.token().await { + Ok(token) => dav::Property::Extension(all::Property::Sync( + sync::Property::SyncToken(sync::SyncToken(format!( + "https://aerogramme.0/sync/{}", + token + ))), + )), + _ => return Err(n.clone()), + }, + dav::PropertyRequest::Extension(all::PropertyRequest::Vers( + vers::PropertyRequest::SupportedReportSet, + )) => dav::Property::Extension(all::Property::Vers( + vers::Property::SupportedReportSet(vec![ + vers::SupportedReport(vers::ReportName::Extension( + all::ReportTypeName::Cal(cal::ReportTypeName::Multiget), + )), + vers::SupportedReport(vers::ReportName::Extension( + all::ReportTypeName::Cal(cal::ReportTypeName::Query), + )), + vers::SupportedReport(vers::ReportName::Extension( + all::ReportTypeName::Sync(sync::ReportTypeName::SyncCollection), + )), + ]), + )), + v => return Err(v), + }; + Ok(prop) + } }) .boxed() } diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 1097fe7..1846c92 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -20,6 +20,7 @@ fn main() { rfc4918_webdav_core(); rfc5397_webdav_principal(); rfc4791_webdav_caldav(); + rfc6578_webdav_sync(); println!("✅ SUCCESS 🌟🚀🥳🙏🥹"); } @@ -365,7 +366,9 @@ fn rfc5819_imapext_liststatus() { use aero_dav::acltypes as acl; use aero_dav::caltypes as cal; use aero_dav::realization::{self, All}; +use aero_dav::synctypes as sync; use aero_dav::types as dav; +use aero_dav::versioningtypes as vers; use crate::common::dav_deserialize; @@ -1011,4 +1014,103 @@ fn rfc4791_webdav_caldav() { .expect("test fully run") } -// @TODO SYNC +fn rfc6578_webdav_sync() { + println!("🧪 rfc6578_webdav_sync"); + common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { + // propname on a calendar node must return + (2nd element is theoretically from versioning) + let propfind_req = r#""#; + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/calendar/Personal/" => Some(x), + _ => None, + }) + .expect("propstats for target must exist"); + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::Extension( + realization::PropertyRequest::Sync(sync::PropertyRequest::SyncToken) + )))).is_some()); + assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::Extension( + realization::PropertyRequest::Vers(vers::PropertyRequest::SupportedReportSet) + )))).is_some()); + + // synctoken and supported report set must contains a meaningful value when queried + let propfind_req = r#""#; + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/calendar/Personal/" => Some(x), + _ => None, + }) + .expect("propstats for target must exist"); + + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + + let init_sync_token = root_success.prop.0.iter().find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension(realization::Property::Sync(sync::Property::SyncToken(st)))) => Some(st), + _ => None, + }).expect("sync_token exists"); + + let supported = root_success.prop.0.iter().find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension(realization::Property::Vers(vers::Property::SupportedReportSet(s)))) => Some(s), + _ => None + }).expect("supported report set exists"); + assert_eq!(&supported[..], &[ + vers::SupportedReport(vers::ReportName::Extension(realization::ReportTypeName::Cal(cal::ReportTypeName::Multiget))), + vers::SupportedReport(vers::ReportName::Extension(realization::ReportTypeName::Cal(cal::ReportTypeName::Query))), + vers::SupportedReport(vers::ReportName::Extension(realization::ReportTypeName::Sync(sync::ReportTypeName::SyncCollection))), + ]); + + + // synctoken must change if we add a file + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc1.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC1) + .send()?; + assert_eq!(resp.status(), 201); + + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/calendar/Personal/" => Some(x), + _ => None, + }) + .expect("propstats for target must exist"); + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + let rfc1_sync_token = root_success.prop.0.iter().find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension(realization::Property::Sync(sync::Property::SyncToken(st)))) => Some(st), + _ => None, + }).expect("sync_token exists"); + assert!(init_sync_token != rfc1_sync_token); + + + // synctoken must change if we delete a file + let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc1.ics").send()?; + assert_eq!(resp.status(), 204); + + let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").body(propfind_req).send()?.text()?; + let multistatus = dav_deserialize::>(&body); + + let root_propstats = multistatus.responses.iter() + .find_map(|v| match &v.status_or_propstat { + dav::StatusOrPropstat::PropStat(dav::Href(p), x) if p.as_str() == "/alice/calendar/Personal/" => Some(x), + _ => None, + }) + .expect("propstats for target must exist"); + let root_success = root_propstats.iter().find(|p| p.status.0.as_u16() == 200).expect("some propstats for root must be 200"); + let del_sync_token = root_success.prop.0.iter().find_map(|p| match p { + dav::AnyProperty::Value(dav::Property::Extension(realization::Property::Sync(sync::Property::SyncToken(st)))) => Some(st), + _ => None, + }).expect("sync_token exists"); + assert!(init_sync_token != del_sync_token); + assert!(rfc1_sync_token != del_sync_token); + + Ok(()) + }) + .expect("test fully run") +} -- cgit v1.2.3 From a2f5b451bd32780d60be69c6412cb351a54b765b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Tue, 28 May 2024 17:21:30 +0200 Subject: initial implementation of sync-collection --- aero-collections/src/davdag.rs | 5 ++- aero-proto/src/dav/controller.rs | 56 ++++++++++++++++++++--- aero-proto/src/dav/node.rs | 12 +++-- aero-proto/src/dav/resource.rs | 96 ++++++++++++++++++++++++++++++++++++++-- 4 files changed, 154 insertions(+), 15 deletions(-) diff --git a/aero-collections/src/davdag.rs b/aero-collections/src/davdag.rs index 36a9016..74e745f 100644 --- a/aero-collections/src/davdag.rs +++ b/aero-collections/src/davdag.rs @@ -42,7 +42,7 @@ pub struct DavDag { #[derive(Clone, Debug)] pub enum SyncChange { - Ok(FileName), + Ok((FileName, BlobId)), NotFound(FileName), } @@ -150,7 +150,8 @@ impl DavDag { // Record the change in the ephemeral synchronization map if let Some(sync_token) = sync_token { - self.change.insert(sync_token, SyncChange::Ok(filename)); + self.change + .insert(sync_token, SyncChange::Ok((filename, blob_id))); } } diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 4bae68a..7e1f416 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -7,9 +7,10 @@ use hyper::body::Frame; use hyper::body::Incoming; use hyper::{body::Bytes, Request, Response}; -use aero_collections::user::User; +use aero_collections::{davdag::Token, user::User}; use aero_dav::caltypes as cal; use aero_dav::realization::{self, All}; +use aero_dav::synctypes as sync; use aero_dav::types as dav; use aero_dav::versioningtypes as vers; use aero_ical::query::is_component_match; @@ -17,7 +18,7 @@ use aero_ical::query::is_component_match; use crate::dav::codec; use crate::dav::codec::{depth, deserialize, serialize, text_body}; use crate::dav::node::DavNode; -use crate::dav::resource::RootNode; +use crate::dav::resource::{RootNode, BASE_TOKEN_URI}; pub(super) type ArcUser = std::sync::Arc; pub(super) type HttpResponse = Response>; @@ -109,6 +110,7 @@ impl Controller { // Internal representation that will handle processed request let (mut ok_node, mut not_found) = (Vec::new(), Vec::new()); let calprop: Option>; + let extension: Option; // Extracting request information match cal_report { @@ -136,15 +138,54 @@ impl Controller { }; } calprop = m.selector; + extension = None; } vers::Report::Extension(realization::ReportType::Cal(cal::ReportType::Query(q))) => { calprop = q.selector; + extension = None; ok_node = apply_filter(self.node.children(&self.user).await, &q.filter) .try_collect() .await?; } - vers::Report::Extension(realization::ReportType::Sync(_sync_col)) => { - todo!() + vers::Report::Extension(realization::ReportType::Sync(sync_col)) => { + calprop = Some(cal::CalendarSelector::Prop(sync_col.prop)); + + if sync_col.limit.is_some() { + tracing::warn!("limit is not supported, ignoring"); + } + if matches!(sync_col.sync_level, sync::SyncLevel::Infinite) { + tracing::debug!("aerogramme calendar collections are not nested"); + } + + let token = match sync_col.sync_token { + sync::SyncTokenRequest::InitialSync => None, + sync::SyncTokenRequest::IncrementalSync(token_raw) => { + // parse token + if token_raw.len() != BASE_TOKEN_URI.len() + 48 { + anyhow::bail!("invalid token length") + } + let token = token_raw[BASE_TOKEN_URI.len()..] + .parse() + .or(Err(anyhow::anyhow!("can't parse token")))?; + Some(token) + } + }; + // do the diff + let new_token: Token; + (new_token, ok_node, not_found) = match self.node.diff(token).await { + Ok(t) => t, + Err(e) => match e.kind() { + std::io::ErrorKind::NotFound => return Ok(Response::builder() + .status(410) + .body(text_body("Diff failed, token might be expired"))?), + _ => return Ok(Response::builder() + .status(500) + .body(text_body("Server error, maybe this operation is not supported on this collection"))?), + }, + }; + extension = Some(realization::Multistatus::Sync(sync::Multistatus { + sync_token: sync::SyncToken(new_token.to_string()), + })); } _ => { return Ok(Response::builder() @@ -162,7 +203,7 @@ impl Controller { serialize( status, - Self::multistatus(&self.user, ok_node, not_found, props).await, + Self::multistatus(&self.user, ok_node, not_found, props, extension).await, ) } @@ -208,7 +249,7 @@ impl Controller { let not_found = vec![]; serialize( status, - Self::multistatus(&self.user, nodes, not_found, propname).await, + Self::multistatus(&self.user, nodes, not_found, propname, None).await, ) } @@ -277,6 +318,7 @@ impl Controller { nodes: Vec>, not_found: Vec, props: Option>, + extension: Option, ) -> dav::Multistatus { // Collect properties on existing objects let mut responses: Vec> = match props { @@ -309,7 +351,7 @@ impl Controller { dav::Multistatus:: { responses, responsedescription: None, - extension: None, + extension, } } } diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 877342a..0a83f8c 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -3,7 +3,7 @@ use futures::future::{BoxFuture, FutureExt}; use futures::stream::{BoxStream, StreamExt}; use hyper::body::Bytes; -use aero_collections::davdag::Etag; +use aero_collections::davdag::{Etag, Token}; use aero_dav::realization::All; use aero_dav::types as dav; @@ -55,8 +55,14 @@ pub(crate) trait DavNode: Send { fn content<'a>(&self) -> Content<'a>; /// Delete fn delete(&self) -> BoxFuture>; - - //@FIXME maybe add etag, maybe add a way to set content + /// Sync + fn diff<'a>( + &self, + sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + >; /// Utility function to get a propname response from a node fn response_propname(&self, user: &ArcUser) -> dav::Response { diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 1ae766c..297a1c1 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -8,7 +8,7 @@ use futures::{future::BoxFuture, future::FutureExt}; use aero_collections::{ calendar::Calendar, - davdag::{BlobId, Etag}, + davdag::{BlobId, Etag, SyncChange, Token}, user::User, }; use aero_dav::acltypes as acl; @@ -21,6 +21,8 @@ use aero_dav::versioningtypes as vers; use super::node::PropertyStream; use crate::dav::node::{Content, DavNode, PutPolicy}; +pub const BASE_TOKEN_URI: &str = "https://aerogramme.0/sync/"; + #[derive(Clone)] pub(crate) struct RootNode {} impl DavNode for RootNode { @@ -117,6 +119,16 @@ impl DavNode for RootNode { fn delete(&self) -> BoxFuture> { async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() } + + fn diff<'a>( + &self, + _sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + > { + async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() + } } #[derive(Clone)] @@ -229,6 +241,15 @@ impl DavNode for HomeNode { fn delete(&self) -> BoxFuture> { async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() } + fn diff<'a>( + &self, + _sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + > { + async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() + } } #[derive(Clone)] @@ -353,6 +374,15 @@ impl DavNode for CalendarListNode { fn delete(&self) -> BoxFuture> { async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() } + fn diff<'a>( + &self, + _sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + > { + async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() + } } #[derive(Clone)] @@ -480,8 +510,8 @@ impl DavNode for CalendarNode { )) => match col.token().await { Ok(token) => dav::Property::Extension(all::Property::Sync( sync::Property::SyncToken(sync::SyncToken(format!( - "https://aerogramme.0/sync/{}", - token + "{}{}", + BASE_TOKEN_URI, token ))), )), _ => return Err(n.clone()), @@ -535,6 +565,48 @@ impl DavNode for CalendarNode { fn delete(&self) -> BoxFuture> { async { Err(std::io::Error::from(std::io::ErrorKind::PermissionDenied)) }.boxed() } + fn diff<'a>( + &self, + sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + > { + let col = self.col.clone(); + let calname = self.calname.clone(); + async move { + let sync_token = sync_token.unwrap(); + let (new_token, listed_changes) = match col.diff(sync_token).await { + Ok(v) => v, + Err(e) => { + tracing::info!(err=?e, "token resolution failed, maybe a forgotten token"); + return Err(std::io::Error::from(std::io::ErrorKind::NotFound)); + } + }; + + let mut ok_nodes: Vec> = vec![]; + let mut rm_nodes: Vec = vec![]; + for change in listed_changes.into_iter() { + match change { + SyncChange::Ok((filename, blob_id)) => { + let child = Box::new(EventNode { + col: col.clone(), + calname: calname.clone(), + filename, + blob_id, + }); + ok_nodes.push(child); + } + SyncChange::NotFound(filename) => { + rm_nodes.push(dav::Href(filename)); + } + } + } + + Ok((new_token, ok_nodes, rm_nodes)) + } + .boxed() + } } #[derive(Clone)] @@ -757,6 +829,15 @@ impl DavNode for EventNode { } .boxed() } + fn diff<'a>( + &self, + _sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + > { + async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() + } } #[derive(Clone)] @@ -849,4 +930,13 @@ impl DavNode for CreateEventNode { // Nothing to delete async { Ok(()) }.boxed() } + fn diff<'a>( + &self, + _sync_token: Option, + ) -> BoxFuture< + 'a, + std::result::Result<(Token, Vec>, Vec), std::io::Error>, + > { + async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() + } } -- cgit v1.2.3 From f9fab60e5ee77c0cf57744e39b5685902189a38b Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 29 May 2024 08:47:56 +0200 Subject: test report sync-collection --- aero-collections/src/calendar/mod.rs | 4 + aero-dav/src/syncencoder.rs | 1 - aero-proto/src/dav/controller.rs | 2 +- aero-proto/src/dav/resource.rs | 36 ++++++- aerogramme/tests/behavior.rs | 188 +++++++++++++++++++++++++++++++++-- aerogramme/tests/common/mod.rs | 19 +++- 6 files changed, 240 insertions(+), 10 deletions(-) diff --git a/aero-collections/src/calendar/mod.rs b/aero-collections/src/calendar/mod.rs index 414426a..ac07842 100644 --- a/aero-collections/src/calendar/mod.rs +++ b/aero-collections/src/calendar/mod.rs @@ -177,6 +177,10 @@ impl CalendarInternal { .iter() .filter_map(|t: &Token| davstate.change.get(t)) .map(|s| s.clone()) + .filter(|s| match s { + SyncChange::Ok((filename, _)) => davstate.idx_by_filename.get(filename).is_some(), + SyncChange::NotFound(filename) => davstate.idx_by_filename.get(filename).is_none(), + }) .collect(); let token = self.current_token().await?; diff --git a/aero-dav/src/syncencoder.rs b/aero-dav/src/syncencoder.rs index 2dd50eb..55f7ad6 100644 --- a/aero-dav/src/syncencoder.rs +++ b/aero-dav/src/syncencoder.rs @@ -128,7 +128,6 @@ mod tests { src.qwrite(&mut writer).await.expect("xml serialization"); tokio_buffer.flush().await.expect("tokio buffer flush"); let got = std::str::from_utf8(buffer.as_slice()).unwrap(); - println!("{:?}", got); // deserialize let mut rdr = Reader::new(quick_xml::NsReader::from_reader(got.as_bytes())) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 7e1f416..76dd15d 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -184,7 +184,7 @@ impl Controller { }, }; extension = Some(realization::Multistatus::Sync(sync::Multistatus { - sync_token: sync::SyncToken(new_token.to_string()), + sync_token: sync::SyncToken(format!("{}{}", BASE_TOKEN_URI, new_token)), })); } _ => { diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index 297a1c1..b6c0ed5 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -21,6 +21,16 @@ use aero_dav::versioningtypes as vers; use super::node::PropertyStream; use crate::dav::node::{Content, DavNode, PutPolicy}; +/// Why "https://aerogramme.0"? +/// Because tokens must be valid URI. +/// And numeric TLD are ~mostly valid in URI (check the .42 TLD experience) +/// and at the same time, they are not used sold by the ICANN and there is no plan to use them. +/// So I am sure that the URL remains invalid, avoiding leaking requests to an hardcoded URL in the +/// future. +/// The best option would be to make it configurable ofc, so someone can put a domain name +/// that they control, it would probably improve compatibility (maybe some WebDAV spec tells us +/// how to handle/resolve this URI but I am not aware of that...). But that's not the plan for +/// now. So here we are: https://aerogramme.0. pub const BASE_TOKEN_URI: &str = "https://aerogramme.0/sync/"; #[derive(Clone)] @@ -575,7 +585,31 @@ impl DavNode for CalendarNode { let col = self.col.clone(); let calname = self.calname.clone(); async move { - let sync_token = sync_token.unwrap(); + let sync_token = match sync_token { + Some(v) => v, + None => { + let token = col + .token() + .await + .or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted)))?; + let ok_nodes = col + .dag() + .await + .idx_by_filename + .iter() + .map(|(filename, blob_id)| { + Box::new(EventNode { + col: col.clone(), + calname: calname.clone(), + filename: filename.to_string(), + blob_id: *blob_id, + }) as Box + }) + .collect(); + + return Ok((token, ok_nodes, vec![])); + } + }; let (new_token, listed_changes) = match col.diff(sync_token).await { Ok(v) => v, Err(e) => { diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index 1846c92..d7fb6e9 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -370,7 +370,7 @@ use aero_dav::synctypes as sync; use aero_dav::types as dav; use aero_dav::versioningtypes as vers; -use crate::common::dav_deserialize; +use crate::common::{dav_deserialize, dav_serialize}; fn rfc4918_webdav_core() { println!("🧪 rfc4918_webdav_core"); @@ -435,6 +435,7 @@ fn rfc4918_webdav_core() { assert!(root_success.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Value(dav::Property::GetContentType(_)))).is_none()); assert!(root_not_found.prop.0.iter().find(|p| matches!(p, dav::AnyProperty::Request(dav::PropertyRequest::GetContentLength))).is_some()); + // -- HIERARCHY EXPLORATION WITH THE DEPTH: X HEADER FIELD -- // depth 1 / -> /alice/ let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").header("Depth", "1").send()?.text()?; let multistatus = dav_deserialize::>(&body); @@ -470,7 +471,7 @@ fn rfc4918_webdav_core() { let multistatus = dav_deserialize::>(&body); assert_eq!(multistatus.responses.len(), 1); - // --- PUT --- + // --- PUT (add objets) --- // first object let resp = http.put("http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-None-Match", "*").body(ICAL_RFC2).send()?; let obj1_etag = resp.headers().get("etag").expect("etag must be set"); @@ -496,14 +497,14 @@ fn rfc4918_webdav_core() { let resp = http.put("http://localhost:8087/alice/calendar/Personal/rfc2.ics").header("If-Match", obj1_etag).body(ICAL_RFC1).send()?; assert_eq!(resp.status(), 201); - // --- GET --- + // --- GET (fetch objects) --- let body = http.get("http://localhost:8087/alice/calendar/Personal/rfc2.ics").send()?.text()?; assert_eq!(body.as_bytes(), ICAL_RFC1); let body = http.get("http://localhost:8087/alice/calendar/Personal/rfc3.ics").send()?.text()?; assert_eq!(body.as_bytes(), ICAL_RFC3); - // --- DELETE --- + // --- DELETE (delete objects) --- // delete 1st object let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc2.ics").send()?; assert_eq!(resp.status(), 204); @@ -528,7 +529,7 @@ fn rfc4918_webdav_core() { fn rfc5397_webdav_principal() { println!("🧪 rfc5397_webdav_principal"); common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { - // Find principal + // -- AUTODISCOVERY: FIND "PRINCIPAL" AS DEFINED IN WEBDAV ACL (~USER'S HOME) -- let propfind_req = r#""#; let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087").body(propfind_req).send()?.text()?; let multistatus = dav_deserialize::>(&body); @@ -1017,7 +1018,8 @@ fn rfc4791_webdav_caldav() { fn rfc6578_webdav_sync() { println!("🧪 rfc6578_webdav_sync"); common::aerogramme_provider_daemon_dev(|_imap, _lmtp, http| { - // propname on a calendar node must return + (2nd element is theoretically from versioning) + // -- PROPFIND -- + // propname must return sync-token & supported-report-set (from webdav versioning) let propfind_req = r#""#; let body = http.request(reqwest::Method::from_bytes(b"PROPFIND")?, "http://localhost:8087/alice/calendar/Personal/").body(propfind_req).send()?.text()?; let multistatus = dav_deserialize::>(&body); @@ -1110,6 +1112,180 @@ fn rfc6578_webdav_sync() { assert!(init_sync_token != del_sync_token); assert!(rfc1_sync_token != del_sync_token); + // -- TEST SYNC CUSTOM REPORT: SYNC-COLLECTION -- + // 3.8. Example: Initial DAV:sync-collection Report + // Part 1: check the empty case + let sync_query = r#" + + + 1 + + + + + "#; + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(sync_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 0); + let empty_token = match &multistatus.extension { + Some(realization::Multistatus::Sync(sync::Multistatus { sync_token: sync::SyncToken(x) } )) => x, + _ => anyhow::bail!("wrong content"), + }; + + // Part 2: check with one file + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc1.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC1) + .send()?; + assert_eq!(resp.status(), 201); + + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(sync_query) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + let initial_one_file_token = match &multistatus.extension { + Some(realization::Multistatus::Sync(sync::Multistatus { sync_token: sync::SyncToken(x) } )) => x, + _ => anyhow::bail!("wrong content"), + }; + assert!(empty_token != initial_one_file_token); + + // 3.9. Example: DAV:sync-collection Report with Token + // Part 1: nothing changed, response must be empty + let sync_query = |token: &str| vers::Report::::Extension(realization::ReportType::Sync(sync::SyncCollection { + sync_token: sync::SyncTokenRequest::IncrementalSync(token.into()), + sync_level: sync::SyncLevel::One, + limit: None, + prop: dav::PropName(vec![ + dav::PropertyRequest::GetEtag, + ]), + })); + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(dav_serialize(&sync_query(initial_one_file_token))) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 0); + let no_change = match &multistatus.extension { + Some(realization::Multistatus::Sync(sync::Multistatus { sync_token: sync::SyncToken(x) } )) => x, + _ => anyhow::bail!("wrong content"), + }; + assert_eq!(initial_one_file_token, no_change); + + // Part 2: add a new node (rfc2) + remove a node (rfc1) + // add rfc2 + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc2.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC2) + .send()?; + assert_eq!(resp.status(), 201); + + // delete rfc1 + let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc1.ics").send()?; + assert_eq!(resp.status(), 204); + + // call REPORT + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(dav_serialize(&sync_query(initial_one_file_token))) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 2); + let token_addrm = match &multistatus.extension { + Some(realization::Multistatus::Sync(sync::Multistatus { sync_token: sync::SyncToken(x) } )) => x, + _ => anyhow::bail!("wrong content"), + }; + assert!(initial_one_file_token != token_addrm); + + // Part 3: remove a node (rfc2) and add it again with new content + // delete rfc2 + let resp = http.delete("http://localhost:8087/alice/calendar/Personal/rfc2.ics").send()?; + assert_eq!(resp.status(), 204); + + // add rfc2 with ICAL_RFC3 content + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc2.ics") + .header("If-None-Match", "*") + .body(ICAL_RFC3) + .send()?; + let rfc2_etag = resp.headers().get("etag").expect("etag must be set"); + assert_eq!(resp.status(), 201); + + // call REPORT + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(dav_serialize(&sync_query(token_addrm))) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + let token_addrm_same = match &multistatus.extension { + Some(realization::Multistatus::Sync(sync::Multistatus { sync_token: sync::SyncToken(x) } )) => x, + _ => anyhow::bail!("wrong content"), + }; + assert!(token_addrm_same != token_addrm); + + // Part 4: overwrite an event (rfc1) with new content + let resp = http + .put("http://localhost:8087/alice/calendar/Personal/rfc1.ics") + .header("If-Match", rfc2_etag) + .body(ICAL_RFC4) + .send()?; + assert_eq!(resp.status(), 201); + + // call REPORT + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(dav_serialize(&sync_query(token_addrm_same))) + .send()?; + assert_eq!(resp.status(), 207); + let multistatus = dav_deserialize::>(&resp.text()?); + assert_eq!(multistatus.responses.len(), 1); + let token_addrm_same = match &multistatus.extension { + Some(realization::Multistatus::Sync(sync::Multistatus { sync_token: sync::SyncToken(x) } )) => x, + _ => anyhow::bail!("wrong content"), + }; + assert!(token_addrm_same != token_addrm); + + // Unknown token must return 410 GONE. + // Token can be forgotten as we garbage collect the DAG. + let resp = http + .request( + reqwest::Method::from_bytes(b"REPORT")?, + "http://localhost:8087/alice/calendar/Personal/", + ) + .body(dav_serialize(&sync_query("https://aerogramme.0/sync/000000000000000000000000000000000000000000000000"))) + .send()?; + assert_eq!(resp.status(), 410); + Ok(()) }) .expect("test fully run") diff --git a/aerogramme/tests/common/mod.rs b/aerogramme/tests/common/mod.rs index 12f2764..bc65305 100644 --- a/aerogramme/tests/common/mod.rs +++ b/aerogramme/tests/common/mod.rs @@ -108,7 +108,8 @@ pub fn read_first_u32(inp: &str) -> Result { .parse::()?) } -use aero_dav::xml::{Node, Reader}; +use aero_dav::xml::{Node, Reader, Writer}; +use tokio::io::AsyncWriteExt; pub fn dav_deserialize>(src: &str) -> T { futures::executor::block_on(async { let mut rdr = Reader::new(quick_xml::NsReader::from_reader(src.as_bytes())) @@ -117,3 +118,19 @@ pub fn dav_deserialize>(src: &str) -> T { rdr.find().await.expect("parse XML") }) } +pub fn dav_serialize>(src: &T) -> String { + futures::executor::block_on(async { + let mut buffer = Vec::new(); + let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); + let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); + let ns_to_apply = vec![ + ("xmlns:D".into(), "DAV:".into()), + ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), + ]; + let mut writer = Writer { q, ns_to_apply }; + + src.qwrite(&mut writer).await.expect("xml serialization"); + tokio_buffer.flush().await.expect("tokio buffer flush"); + std::str::from_utf8(buffer.as_slice()).unwrap().into() + }) +} -- cgit v1.2.3 From 3a8b45a0b1d96fb404267956c3cb723ad7d99339 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 29 May 2024 08:49:56 +0200 Subject: re-enable imap behavior tests --- aerogramme/tests/behavior.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aerogramme/tests/behavior.rs b/aerogramme/tests/behavior.rs index d7fb6e9..f8d609a 100644 --- a/aerogramme/tests/behavior.rs +++ b/aerogramme/tests/behavior.rs @@ -6,7 +6,7 @@ use crate::common::fragments::*; fn main() { // IMAP - /*rfc3501_imap4rev1_base(); + rfc3501_imap4rev1_base(); rfc6851_imapext_move(); rfc4551_imapext_condstore(); rfc2177_imapext_idle(); @@ -14,7 +14,7 @@ fn main() { rfc3691_imapext_unselect(); rfc7888_imapext_literal(); rfc4315_imapext_uidplus(); - rfc5819_imapext_liststatus();*/ + rfc5819_imapext_liststatus(); // WebDAV rfc4918_webdav_core(); -- cgit v1.2.3 From 06a24bb5598226edcedaaa1e3ba122ef6a40a793 Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 29 May 2024 09:57:34 +0200 Subject: fix DAV header for iOS --- aero-proto/src/dav/controller.rs | 14 +++++++++----- aero-proto/src/dav/mod.rs | 9 ++++++--- aero-proto/src/dav/node.rs | 4 +++- aero-proto/src/dav/resource.rs | 23 +++++++++++++++++++++++ 4 files changed, 41 insertions(+), 9 deletions(-) diff --git a/aero-proto/src/dav/controller.rs b/aero-proto/src/dav/controller.rs index 76dd15d..8c53c6b 100644 --- a/aero-proto/src/dav/controller.rs +++ b/aero-proto/src/dav/controller.rs @@ -61,18 +61,19 @@ impl Controller { } }; + let dav_hdrs = node.dav_header(); let ctrl = Self { node, user, req }; match method.as_str() { "OPTIONS" => Ok(Response::builder() .status(200) - .header("DAV", "1") + .header("DAV", dav_hdrs) .header("Allow", "HEAD,GET,PUT,OPTIONS,DELETE,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,REPORT") .body(codec::text_body(""))?), "HEAD" => { - tracing::warn!("HEAD not correctly implemented"); + tracing::warn!("HEAD might not correctly implemented: should return ETags & co"); Ok(Response::builder() - .status(404) + .status(200) .body(codec::text_body(""))?) }, "GET" => ctrl.get().await, @@ -348,11 +349,14 @@ impl Controller { } // Build response - dav::Multistatus:: { + let multistatus = dav::Multistatus:: { responses, responsedescription: None, extension, - } + }; + + tracing::debug!(multistatus=?multistatus, "multistatus response"); + multistatus } } diff --git a/aero-proto/src/dav/mod.rs b/aero-proto/src/dav/mod.rs index 43de3a5..a3dd58d 100644 --- a/aero-proto/src/dav/mod.rs +++ b/aero-proto/src/dav/mod.rs @@ -98,7 +98,7 @@ impl Server { let conn = tokio::spawn(async move { //@FIXME should create a generic "public web" server on which "routers" could be //abitrarily bound - //@FIXME replace with a handler supporting http2 and TLS + //@FIXME replace with a handler supporting http2 match http::Builder::new() .serve_connection( @@ -106,8 +106,9 @@ impl Server { service_fn(|req: Request| { let login = login.clone(); tracing::info!("{:?} {:?}", req.method(), req.uri()); + tracing::debug!(req=?req, "full request"); async { - match middleware::auth(login, req, |user, request| { + let response = match middleware::auth(login, req, |user, request| { async { Controller::route(user, request).await }.boxed() }) .await @@ -119,7 +120,9 @@ impl Server { .status(500) .body(codec::text_body("Internal error")) } - } + }; + tracing::debug!(resp=?response, "full response"); + response } }), ) diff --git a/aero-proto/src/dav/node.rs b/aero-proto/src/dav/node.rs index 0a83f8c..3af3b81 100644 --- a/aero-proto/src/dav/node.rs +++ b/aero-proto/src/dav/node.rs @@ -40,7 +40,9 @@ pub(crate) trait DavNode: Send { fn supported_properties(&self, user: &ArcUser) -> dav::PropName; /// Get the values for the given properties fn properties(&self, user: &ArcUser, prop: dav::PropName) -> PropertyStream<'static>; - //fn properties(&self, user: &ArcUser, prop: dav::PropName) -> Vec>; + /// Get the value of the DAV header to return + fn dav_header(&self) -> String; + /// Put an element (create or update) fn put<'a>( &'a self, diff --git a/aero-proto/src/dav/resource.rs b/aero-proto/src/dav/resource.rs index b6c0ed5..b5ae029 100644 --- a/aero-proto/src/dav/resource.rs +++ b/aero-proto/src/dav/resource.rs @@ -139,6 +139,10 @@ impl DavNode for RootNode { > { async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() } + + fn dav_header(&self) -> String { + "1".into() + } } #[derive(Clone)] @@ -260,6 +264,10 @@ impl DavNode for HomeNode { > { async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() } + + fn dav_header(&self) -> String { + "1, access-control, calendar-access".into() + } } #[derive(Clone)] @@ -393,6 +401,10 @@ impl DavNode for CalendarListNode { > { async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() } + + fn dav_header(&self) -> String { + "1, access-control, calendar-access".into() + } } #[derive(Clone)] @@ -641,6 +653,9 @@ impl DavNode for CalendarNode { } .boxed() } + fn dav_header(&self) -> String { + "1, access-control, calendar-access".into() + } } #[derive(Clone)] @@ -872,6 +887,10 @@ impl DavNode for EventNode { > { async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() } + + fn dav_header(&self) -> String { + "1, access-control".into() + } } #[derive(Clone)] @@ -973,4 +992,8 @@ impl DavNode for CreateEventNode { > { async { Err(std::io::Error::from(std::io::ErrorKind::Unsupported)) }.boxed() } + + fn dav_header(&self) -> String { + "1, access-control".into() + } } -- cgit v1.2.3 From 5954de6efbb040b8b47daf0c7663a60f3db1da6e Mon Sep 17 00:00:00 2001 From: Quentin Dufour Date: Wed, 29 May 2024 10:14:16 +0200 Subject: upgrade cargo2nix --- Cargo.nix | 1538 +++++++++++++++++++++++++++++++++---------------------------- README.md | 6 +- 2 files changed, 844 insertions(+), 700 deletions(-) diff --git a/Cargo.nix b/Cargo.nix index 7c184ab..a1776cf 100644 --- a/Cargo.nix +++ b/Cargo.nix @@ -4,6 +4,14 @@ args@{ release ? true, rootFeatures ? [ + "aero-user/default" + "aero-bayou/default" + "aero-sasl/default" + "aero-dav/default" + "aerogramme-fuzz/default" + "aero-collections/default" + "aero-proto/default" + "aero-ical/default" "aerogramme/default" ], rustPackages, @@ -23,7 +31,7 @@ args@{ ignoreLockHash, }: let - nixifiedLockHash = "1636bcbca38619e40eeddcb9e9c0af1240654ca176fe1a6fb3444b21526b53c2"; + nixifiedLockHash = "a4bc3889db1e92c4aa5331faefe47cd3fc7925ec548a168e1555b3a8c3eebf08"; workspaceSrc = if args.workspaceSrc == null then ./. else args.workspaceSrc; currentLockHash = builtins.hashFile "sha256" (workspaceSrc + /Cargo.lock); lockHashIgnored = if ignoreLockHash @@ -45,7 +53,15 @@ in { cargo2nixVersion = "0.11.0"; workspace = { - aerogramme = rustPackages.unknown.aerogramme."0.2.2"; + aero-user = rustPackages.unknown.aero-user."0.3.0"; + aero-bayou = rustPackages.unknown.aero-bayou."0.3.0"; + aero-sasl = rustPackages.unknown.aero-sasl."0.3.0"; + aero-dav = rustPackages.unknown.aero-dav."0.3.0"; + aerogramme-fuzz = rustPackages.unknown.aerogramme-fuzz."0.0.0"; + aero-collections = rustPackages.unknown.aero-collections."0.3.0"; + aero-proto = rustPackages.unknown.aero-proto."0.3.0"; + aero-ical = rustPackages.unknown.aero-ical."0.3.0"; + aerogramme = rustPackages.unknown.aerogramme."0.3.0"; }; "registry+https://github.com/rust-lang/crates.io-index".abnf-core."0.6.0" = overridableMkRustCrate (profileName: rec { name = "abnf-core"; @@ -57,13 +73,13 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".addr2line."0.21.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".addr2line."0.15.2" = overridableMkRustCrate (profileName: rec { name = "addr2line"; - version = "0.21.0"; + version = "0.15.2"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"; }; + src = fetchCratesIo { inherit name version; sha256 = "e7a2e47a1fbe209ee101dd6d61285226744c6c8d3c21c8dc878ba6cb9f467f3a"; }; dependencies = { - gimli = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".gimli."0.28.1" { inherit profileName; }).out; + gimli = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".gimli."0.24.0" { inherit profileName; }).out; }; }); @@ -74,72 +90,202 @@ in src = fetchCratesIo { inherit name version; sha256 = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"; }; }); - "unknown".aerogramme."0.2.2" = overridableMkRustCrate (profileName: rec { - name = "aerogramme"; - version = "0.2.2"; + "unknown".aero-bayou."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-bayou"; + version = "0.3.0"; registry = "unknown"; - src = fetchCrateLocal workspaceSrc; + src = fetchCrateLocal (workspaceSrc + "/aero-bayou"); dependencies = { + aero_user = (rustPackages."unknown".aero-user."0.3.0" { inherit profileName; }).out; + anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; + hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; + log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; + rand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out; + serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + }; + }); + + "unknown".aero-collections."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-collections"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-collections"); + dependencies = { + aero_bayou = (rustPackages."unknown".aero-bayou."0.3.0" { inherit profileName; }).out; + aero_user = (rustPackages."unknown".aero-user."0.3.0" { inherit profileName; }).out; + anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; + base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; + eml_codec = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".eml-codec."0.1.2" { inherit profileName; }).out; + futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; + hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; + icalendar = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".icalendar."0.16.1" { inherit profileName; }).out; + im = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".im."15.1.0" { inherit profileName; }).out; + lazy_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lazy_static."1.4.0" { inherit profileName; }).out; + rand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out; + serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; + sodiumoxide = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".sodiumoxide."0.2.7" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + }; + }); + + "unknown".aero-dav."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-dav"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-dav"); + dependencies = { + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; + futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; + quick_xml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.31.0" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + }; + }); + + "unknown".aero-ical."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-ical"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-ical"); + dependencies = { + aero_dav = (rustPackages."unknown".aero-dav."0.3.0" { inherit profileName; }).out; + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; + icalendar = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".icalendar."0.16.1" { inherit profileName; }).out; + nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + }; + }); + + "unknown".aero-proto."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-proto"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-proto"); + dependencies = { + aero_collections = (rustPackages."unknown".aero-collections."0.3.0" { inherit profileName; }).out; + aero_dav = (rustPackages."unknown".aero-dav."0.3.0" { inherit profileName; }).out; + aero_ical = (rustPackages."unknown".aero-ical."0.3.0" { inherit profileName; }).out; + aero_sasl = (rustPackages."unknown".aero-sasl."0.3.0" { inherit profileName; }).out; + aero_user = (rustPackages."unknown".aero-user."0.3.0" { inherit profileName; }).out; anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; - argon2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".argon2."0.5.2" { inherit profileName; }).out; async_trait = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-trait."0.1.77" { profileName = "__noProfile"; }).out; - aws_config = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-config."1.1.6" { inherit profileName; }).out; - aws_sdk_s3 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-s3."1.16.0" { inherit profileName; }).out; - aws_smithy_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime."1.1.7" { inherit profileName; }).out; - aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; - backtrace = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".backtrace."0.3.69" { inherit profileName; }).out; base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; - chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.31" { inherit profileName; }).out; - clap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".clap."3.2.25" { inherit profileName; }).out; - console_subscriber = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".console-subscriber."0.2.0" { inherit profileName; }).out; + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; duplexify = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".duplexify."1.2.2" { inherit profileName; }).out; eml_codec = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".eml-codec."0.1.2" { inherit profileName; }).out; futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; - hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; - hyper_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.26.0" { inherit profileName; }).out; + http_body_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.1" { inherit profileName; }).out; + hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.2.0" { inherit profileName; }).out; hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out; - im = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".im."15.1.0" { inherit profileName; }).out; + icalendar = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".icalendar."0.16.1" { inherit profileName; }).out; imap_codec = (rustPackages."git+https://github.com/superboum/imap-codec".imap-codec."2.0.0" { inherit profileName; }).out; imap_flow = (rustPackages."git+https://github.com/duesee/imap-flow.git".imap-flow."0.1.0" { inherit profileName; }).out; - itertools = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itertools."0.10.5" { inherit profileName; }).out; + quick_xml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.31.0" { inherit profileName; }).out; + rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.22.2" { inherit profileName; }).out; + rustls_pemfile = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.1.1" { inherit profileName; }).out; + smtp_message = (rustPackages."git+http://github.com/Alexis211/kannader".smtp-message."0.1.0" { inherit profileName; }).out; + smtp_server = (rustPackages."git+http://github.com/Alexis211/kannader".smtp-server."0.1.0" { inherit profileName; }).out; + thiserror = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tokio_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-rustls."0.25.0" { inherit profileName; }).out; + tokio_stream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-stream."0.1.14" { inherit profileName; }).out; + tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + }; + }); + + "unknown".aero-sasl."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-sasl"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-sasl"); + dependencies = { + anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; + base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; + futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; + hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; + nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; + rand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + }; + }); + + "unknown".aero-user."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aero-user"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-user"); + dependencies = { + anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; + argon2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".argon2."0.5.3" { inherit profileName; }).out; + async_trait = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-trait."0.1.77" { profileName = "__noProfile"; }).out; + aws_config = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-config."1.1.7" { inherit profileName; }).out; + aws_sdk_s3 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-s3."1.17.0" { inherit profileName; }).out; + aws_smithy_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime."1.1.7" { inherit profileName; }).out; + aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; + base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; + hyper_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.26.0" { inherit profileName; }).out; + hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out; k2v_client = (rustPackages."git+https://git.deuxfleurs.fr/Deuxfleurs/garage.git".k2v-client."0.0.4" { inherit profileName; }).out; - lazy_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lazy_static."1.4.0" { inherit profileName; }).out; ldap3 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ldap3."0.10.6" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; - nix = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nix."0.27.1" { inherit profileName; }).out; - nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; rand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out; rmp_serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rmp-serde."0.15.5" { inherit profileName; }).out; - rpassword = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rpassword."7.3.1" { inherit profileName; }).out; - rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.22.2" { inherit profileName; }).out; - rustls_pemfile = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.0.0" { inherit profileName; }).out; serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; - smtp_message = (rustPackages."git+http://github.com/Alexis211/kannader".smtp-message."0.1.0" { inherit profileName; }).out; - smtp_server = (rustPackages."git+http://github.com/Alexis211/kannader".smtp-server."0.1.0" { inherit profileName; }).out; sodiumoxide = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".sodiumoxide."0.2.7" { inherit profileName; }).out; - thiserror = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; - tokio_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-rustls."0.25.0" { inherit profileName; }).out; - tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; toml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".toml."0.5.11" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; - tracing_subscriber = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-subscriber."0.3.18" { inherit profileName; }).out; zstd = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".zstd."0.9.2+zstd.1.5.1" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aho-corasick."1.1.2" = overridableMkRustCrate (profileName: rec { - name = "aho-corasick"; - version = "1.1.2"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"; }; + "unknown".aerogramme."0.3.0" = overridableMkRustCrate (profileName: rec { + name = "aerogramme"; + version = "0.3.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aerogramme"); + dependencies = { + aero_proto = (rustPackages."unknown".aero-proto."0.3.0" { inherit profileName; }).out; + aero_user = (rustPackages."unknown".aero-user."0.3.0" { inherit profileName; }).out; + anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; + backtrace = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".backtrace."0.3.59" { inherit profileName; }).out; + clap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".clap."3.2.25" { inherit profileName; }).out; + futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; + log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; + nix = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nix."0.27.1" { inherit profileName; }).out; + rpassword = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rpassword."7.3.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + tracing_subscriber = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-subscriber."0.3.18" { inherit profileName; }).out; + }; + devDependencies = { + aero_dav = (rustPackages."unknown".aero-dav."0.3.0" { inherit profileName; }).out; + quick_xml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.31.0" { inherit profileName; }).out; + reqwest = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".reqwest."0.12.4" { inherit profileName; }).out; + }; + }); + + "unknown".aerogramme-fuzz."0.0.0" = overridableMkRustCrate (profileName: rec { + name = "aerogramme-fuzz"; + version = "0.0.0"; + registry = "unknown"; + src = fetchCrateLocal (workspaceSrc + "/aero-dav/fuzz"); features = builtins.concatLists [ - [ "default" ] - [ "perf-literal" ] - [ "std" ] + (lib.optional (rootFeatures' ? "aerogramme-fuzz/arbitrary") "arbitrary") ]; dependencies = { - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; + aero_dav = (rustPackages."unknown".aero-dav."0.3.0" { inherit profileName; }).out; + ${ if rootFeatures' ? "aerogramme-fuzz/arbitrary" then "arbitrary" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".arbitrary."1.3.2" { inherit profileName; }).out; + libfuzzer_sys = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libfuzzer-sys."0.4.7" { inherit profileName; }).out; + quick_xml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.31.0" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -171,11 +317,25 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".argon2."0.5.2" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".arbitrary."1.3.2" = overridableMkRustCrate (profileName: rec { + name = "arbitrary"; + version = "1.3.2"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110"; }; + features = builtins.concatLists [ + [ "derive" ] + [ "derive_arbitrary" ] + ]; + dependencies = { + derive_arbitrary = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".derive_arbitrary."1.3.2" { profileName = "__noProfile"; }).out; + }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".argon2."0.5.3" = overridableMkRustCrate (profileName: rec { name = "argon2"; - version = "0.5.2"; + version = "0.5.3"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "17ba4cac0a46bc1d2912652a751c47f2a9f3a7fe89bcae2275d418f5270402f9"; }; + src = fetchCratesIo { inherit name version; sha256 = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"; }; features = builtins.concatLists [ [ "alloc" ] [ "default" ] @@ -260,19 +420,19 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".async-channel."2.1.1" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".async-channel."2.2.0" = overridableMkRustCrate (profileName: rec { name = "async-channel"; - version = "2.1.1"; + version = "2.2.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "1ca33f4bc4ed1babef42cad36cc1f51fa88be00420404e5b1e80ab1b18f7678c"; }; + src = fetchCratesIo { inherit name version; sha256 = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] ]; dependencies = { concurrent_queue = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".concurrent-queue."2.4.0" { inherit profileName; }).out; - event_listener = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".event-listener."4.0.3" { inherit profileName; }).out; - event_listener_strategy = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".event-listener-strategy."0.4.0" { inherit profileName; }).out; + event_listener = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".event-listener."5.2.0" { inherit profileName; }).out; + event_listener_strategy = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".event-listener-strategy."0.5.0" { inherit profileName; }).out; futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; }; @@ -318,9 +478,9 @@ in [ "default" ] ]; dependencies = { - async_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-channel."2.1.1" { inherit profileName; }).out; + async_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-channel."2.2.0" { inherit profileName; }).out; async_executor = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-executor."1.8.0" { inherit profileName; }).out; - async_io = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-io."2.3.0" { inherit profileName; }).out; + async_io = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-io."2.3.1" { inherit profileName; }).out; async_lock = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-lock."3.3.0" { inherit profileName; }).out; blocking = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".blocking."1.5.1" { inherit profileName; }).out; futures_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-lite."2.2.0" { inherit profileName; }).out; @@ -351,11 +511,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".async-io."2.3.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".async-io."2.3.1" = overridableMkRustCrate (profileName: rec { name = "async-io"; - version = "2.3.0"; + version = "2.3.1"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "fb41eb19024a91746eba0773aa5e16036045bbf45733766661099e182ea6a744"; }; + src = fetchCratesIo { inherit name version; sha256 = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65"; }; dependencies = { async_lock = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-lock."3.3.0" { inherit profileName; }).out; cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; @@ -363,8 +523,8 @@ in futures_io = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-io."0.3.30" { inherit profileName; }).out; futures_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-lite."2.2.0" { inherit profileName; }).out; parking = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".parking."2.2.0" { inherit profileName; }).out; - polling = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".polling."3.3.2" { inherit profileName; }).out; - rustix = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.30" { inherit profileName; }).out; + polling = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".polling."3.5.0" { inherit profileName; }).out; + rustix = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.31" { inherit profileName; }).out; slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.52.0" { inherit profileName; }).out; @@ -422,7 +582,7 @@ in cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; event_listener = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".event-listener."3.1.0" { inherit profileName; }).out; futures_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-lite."1.13.0" { inherit profileName; }).out; - ${ if hostPlatform.isUnix then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.30" { inherit profileName; }).out; + ${ if hostPlatform.isUnix then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.31" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.48.0" { inherit profileName; }).out; }; }); @@ -433,13 +593,13 @@ in registry = "registry+https://github.com/rust-lang/crates.io-index"; src = fetchCratesIo { inherit name version; sha256 = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5"; }; dependencies = { - ${ if hostPlatform.isUnix then "async_io" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-io."2.3.0" { inherit profileName; }).out; + ${ if hostPlatform.isUnix then "async_io" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-io."2.3.1" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "async_lock" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-lock."2.8.0" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "atomic_waker" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".atomic-waker."1.1.2" { inherit profileName; }).out; cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; ${ if hostPlatform.isUnix then "futures_io" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-io."0.3.30" { inherit profileName; }).out; - ${ if hostPlatform.isUnix then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.30" { inherit profileName; }).out; + ${ if hostPlatform.isUnix then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.31" { inherit profileName; }).out; ${ if hostPlatform.isUnix then "signal_hook_registry" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".signal-hook-registry."1.4.1" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "slab" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.48.0" { inherit profileName; }).out; @@ -487,7 +647,7 @@ in ${ if hostPlatform.parsed.cpu.name == "wasm32" then "gloo_timers" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".gloo-timers."0.2.6" { inherit profileName; }).out; kv_log_macro = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".kv-log-macro."1.0.7" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; + memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; pin_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-utils."0.1.0" { inherit profileName; }).out; @@ -496,30 +656,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".async-stream."0.3.5" = overridableMkRustCrate (profileName: rec { - name = "async-stream"; - version = "0.3.5"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51"; }; - dependencies = { - async_stream_impl = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-stream-impl."0.3.5" { profileName = "__noProfile"; }).out; - futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; - pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".async-stream-impl."0.3.5" = overridableMkRustCrate (profileName: rec { - name = "async-stream-impl"; - version = "0.3.5"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"; }; - dependencies = { - proc_macro2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".proc-macro2."1.0.76" { inherit profileName; }).out; - quote = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quote."1.0.35" { inherit profileName; }).out; - syn = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".syn."2.0.48" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".async-task."4.7.0" = overridableMkRustCrate (profileName: rec { name = "async-task"; version = "4.7.0"; @@ -615,11 +751,11 @@ in src = fetchCratesIo { inherit name version; sha256 = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-config."1.1.6" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-config."1.1.7" = overridableMkRustCrate (profileName: rec { name = "aws-config"; - version = "1.1.6"; + version = "1.1.7"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "3182c19847238b50b62ae0383a6dbfc14514e552eb5e307e1ea83ccf5840b8a6"; }; + src = fetchCratesIo { inherit name version; sha256 = "0b96342ea8948ab9bef3e6234ea97fc32e2d8a88d8fb6a084e52267317f94b6b"; }; features = builtins.concatLists [ [ "behavior-version-latest" ] [ "client-hyper" ] @@ -630,36 +766,36 @@ in [ "sso" ] ]; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" { inherit profileName; }).out; - aws_sdk_sso = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sso."1.14.0" { inherit profileName; }).out; - aws_sdk_ssooidc = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-ssooidc."1.14.0" { inherit profileName; }).out; - aws_sdk_sts = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sts."1.14.0" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" { inherit profileName; }).out; + aws_sdk_sso = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sso."1.15.0" { inherit profileName; }).out; + aws_sdk_ssooidc = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-ssooidc."1.15.0" { inherit profileName; }).out; + aws_sdk_sts = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sts."1.15.0" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-json."0.60.6" { inherit profileName; }).out; aws_smithy_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; fastrand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fastrand."2.0.1" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; ring = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ring."0.17.7" { inherit profileName; }).out; time = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".time."0.3.31" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; zeroize = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".zeroize."1.7.0" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" = overridableMkRustCrate (profileName: rec { name = "aws-credential-types"; - version = "1.1.6"; + version = "1.1.7"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "e5635d8707f265c773282a22abe1ecd4fbe96a8eb2f0f14c0796f8016f11a41a"; }; + src = fetchCratesIo { inherit name version; sha256 = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d"; }; features = builtins.concatLists [ [ "test-util" ] ]; @@ -671,28 +807,28 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" = overridableMkRustCrate (profileName: rec { name = "aws-runtime"; - version = "1.1.6"; + version = "1.1.7"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "6f82b9ae2adfd9d6582440d0eeb394c07f74d21b4c0cc72bdb73735c9e1a9c0e"; }; + src = fetchCratesIo { inherit name version; sha256 = "6e38bab716c8bf07da24be07ecc02e0f5656ce8f30a891322ecdcb202f943b85"; }; features = builtins.concatLists [ [ "event-stream" ] [ "http-02x" ] [ "sigv4a" ] ]; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_sigv4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_sigv4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_eventstream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-eventstream."0.60.4" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; fastrand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fastrand."2.0.1" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; @@ -701,39 +837,39 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-config."1.15.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-config."1.16.0" = overridableMkRustCrate (profileName: rec { name = "aws-sdk-config"; - version = "1.15.0"; + version = "1.16.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "0cb71960e3e197c3f512f3bf0f47f444acd708db59733416107ec2ff161ff5c4"; }; + src = fetchCratesIo { inherit name version; sha256 = "07979fd68679736ba306d6ea2a4dc2fd835ac4d454942c5d8920ef83ed2f979f"; }; features = builtins.concatLists [ [ "default" ] [ "rt-tokio" ] [ "rustls" ] ]; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-json."0.60.6" { inherit profileName; }).out; aws_smithy_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; regex_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-lite."0.1.5" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-s3."1.16.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-s3."1.17.0" = overridableMkRustCrate (profileName: rec { name = "aws-sdk-s3"; - version = "1.16.0"; + version = "1.17.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "5076637347e7d0218e61facae853110682ae58efabd2f4e2a9e530c203d5fa7b"; }; + src = fetchCratesIo { inherit name version; sha256 = "93d35d39379445970fc3e4ddf7559fff2c32935ce0b279f9cb27080d6b7c6d94"; }; features = builtins.concatLists [ [ "default" ] [ "rt-tokio" ] @@ -741,9 +877,9 @@ in [ "sigv4a" ] ]; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" { inherit profileName; }).out; - aws_sigv4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" { inherit profileName; }).out; + aws_sigv4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_checksums = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-checksums."0.60.6" { inherit profileName; }).out; aws_smithy_eventstream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-eventstream."0.60.4" { inherit profileName; }).out; @@ -753,9 +889,9 @@ in aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; aws_smithy_xml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-xml."0.60.6" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; @@ -765,60 +901,60 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sso."1.14.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sso."1.15.0" = overridableMkRustCrate (profileName: rec { name = "aws-sdk-sso"; - version = "1.14.0"; + version = "1.15.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "ca7e8097448832fcd22faf6bb227e97d76b40e354509d1307653a885811c7151"; }; + src = fetchCratesIo { inherit name version; sha256 = "d84bd3925a17c9adbf6ec65d52104a44a09629d8f70290542beeee69a95aee7f"; }; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-json."0.60.6" { inherit profileName; }).out; aws_smithy_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; regex_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-lite."0.1.5" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-ssooidc."1.14.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-ssooidc."1.15.0" = overridableMkRustCrate (profileName: rec { name = "aws-sdk-ssooidc"; - version = "1.14.0"; + version = "1.15.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "a75073590e23d63044606771afae309fada8eb10ded54a1ce4598347221d3fef"; }; + src = fetchCratesIo { inherit name version; sha256 = "2c2dae39e997f58bc4d6292e6244b26ba630c01ab671b6f9f44309de3eb80ab8"; }; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-json."0.60.6" { inherit profileName; }).out; aws_smithy_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; regex_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-lite."0.1.5" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sts."1.14.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-sdk-sts."1.15.0" = overridableMkRustCrate (profileName: rec { name = "aws-sdk-sts"; - version = "1.14.0"; + version = "1.15.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "650e4aaae41547151dea4d8142f7ffcc8ab8ba76d5dccc8933936ef2102c3356"; }; + src = fetchCratesIo { inherit name version; sha256 = "17fd9a53869fee17cea77e352084e1aa71e2c5e323d974c13a9c2bcfd9544c7f"; }; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; - aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; + aws_runtime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-runtime."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-json."0.60.6" { inherit profileName; }).out; @@ -827,19 +963,19 @@ in aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; aws_smithy_xml = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-xml."0.60.6" { inherit profileName; }).out; - aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + aws_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; regex_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-lite."0.1.5" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.6" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.7" = overridableMkRustCrate (profileName: rec { name = "aws-sigv4"; - version = "1.1.6"; + version = "1.1.7"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "404c64a104188ac70dd1684718765cb5559795458e446480e41984e68e57d888"; }; + src = fetchCratesIo { inherit name version; sha256 = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1"; }; features = builtins.concatLists [ [ "default" ] [ "http0-compat" ] @@ -849,7 +985,7 @@ in [ "sigv4a" ] ]; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; aws_smithy_eventstream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-eventstream."0.60.4" { inherit profileName; }).out; aws_smithy_http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-http."0.60.6" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; @@ -859,8 +995,8 @@ in form_urlencoded = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".form_urlencoded."1.2.1" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; hmac = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hmac."0.12.1" { inherit profileName; }).out; - http0 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + http0 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; p256 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".p256."0.11.1" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; @@ -884,7 +1020,7 @@ in dependencies = { futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -898,9 +1034,9 @@ in aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; crc32c = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crc32c."0.6.5" { inherit profileName; }).out; - crc32fast = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.3.2" { inherit profileName; }).out; + crc32fast = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.4.0" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; md5 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".md-5."0.10.6" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; @@ -918,7 +1054,7 @@ in dependencies = { aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - crc32fast = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.3.2" { inherit profileName; }).out; + crc32fast = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.4.0" { inherit profileName; }).out; }; }); @@ -938,7 +1074,7 @@ in bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; bytes_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes-utils."0.1.4" { inherit profileName; }).out; futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; @@ -988,7 +1124,7 @@ in bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; fastrand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fastrand."2.0.1" { inherit profileName; }).out; h2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.3.24" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body_0_4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; hyper_0_14 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; hyper_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.24.2" { inherit profileName; }).out; @@ -996,7 +1132,7 @@ in pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; pin_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-utils."0.1.0" { inherit profileName; }).out; rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.21.10" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); @@ -1016,10 +1152,10 @@ in aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; - http1 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; + http1 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; zeroize = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".zeroize."1.7.0" { inherit profileName; }).out; }; @@ -1040,16 +1176,16 @@ in bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; bytes_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes-utils."0.1.4" { inherit profileName; }).out; futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body_0_4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out; num_integer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".num-integer."0.1.45" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; pin_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-utils."0.1.0" { inherit profileName; }).out; - ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.16" { inherit profileName; }).out; + ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.17" { inherit profileName; }).out; ${ if false then "serde" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; time = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".time."0.3.31" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; }; }); @@ -1064,17 +1200,17 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.6" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".aws-types."1.1.7" = overridableMkRustCrate (profileName: rec { name = "aws-types"; - version = "1.1.6"; + version = "1.1.7"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "8fbb5d48aae496f628e7aa2e41991dd4074f606d9e3ade1ce1059f293d40f9a2"; }; + src = fetchCratesIo { inherit name version; sha256 = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1"; }; dependencies = { - aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.6" { inherit profileName; }).out; + aws_credential_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-credential-types."1.1.7" { inherit profileName; }).out; aws_smithy_async = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-async."1.1.7" { inherit profileName; }).out; aws_smithy_runtime_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-runtime-api."1.1.7" { inherit profileName; }).out; aws_smithy_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-smithy-types."1.1.7" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; buildDependencies = { @@ -1082,72 +1218,21 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".axum."0.6.20" = overridableMkRustCrate (profileName: rec { - name = "axum"; - version = "0.6.20"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf"; }; - dependencies = { - async_trait = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-trait."0.1.77" { profileName = "__noProfile"; }).out; - axum_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".axum-core."0.3.4" { inherit profileName; }).out; - bitflags = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bitflags."1.3.2" { inherit profileName; }).out; - bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; - http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; - hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; - itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out; - matchit = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".matchit."0.7.3" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; - mime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".mime."0.3.17" { inherit profileName; }).out; - percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; - pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; - sync_wrapper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".sync_wrapper."0.1.2" { inherit profileName; }).out; - tower = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower."0.4.13" { inherit profileName; }).out; - tower_layer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.2" { inherit profileName; }).out; - tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; - }; - buildDependencies = { - rustversion = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".rustversion."1.0.14" { profileName = "__noProfile"; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".axum-core."0.3.4" = overridableMkRustCrate (profileName: rec { - name = "axum-core"; - version = "0.3.4"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"; }; - dependencies = { - async_trait = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-trait."0.1.77" { profileName = "__noProfile"; }).out; - bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; - http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; - mime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".mime."0.3.17" { inherit profileName; }).out; - tower_layer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.2" { inherit profileName; }).out; - tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; - }; - buildDependencies = { - rustversion = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".rustversion."1.0.14" { profileName = "__noProfile"; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".backtrace."0.3.69" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".backtrace."0.3.59" = overridableMkRustCrate (profileName: rec { name = "backtrace"; - version = "0.3.69"; + version = "0.3.59"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"; }; + src = fetchCratesIo { inherit name version; sha256 = "4717cfcbfaa661a0fd48f8453951837ae7e8f81e481fbb136e3202d72805a744"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] ]; dependencies = { - ${ if !(hostPlatform.isWindows && hostPlatform.parsed.abi.name == "msvc" && !(hostPlatform.parsed.vendor.name == "uwp")) then "addr2line" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".addr2line."0.21.0" { inherit profileName; }).out; + addr2line = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".addr2line."0.15.2" { inherit profileName; }).out; cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; - ${ if !(hostPlatform.isWindows && hostPlatform.parsed.abi.name == "msvc" && !(hostPlatform.parsed.vendor.name == "uwp")) then "libc" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; - ${ if !(hostPlatform.isWindows && hostPlatform.parsed.abi.name == "msvc" && !(hostPlatform.parsed.vendor.name == "uwp")) then "miniz_oxide" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".miniz_oxide."0.7.1" { inherit profileName; }).out; - ${ if !(hostPlatform.isWindows && hostPlatform.parsed.abi.name == "msvc" && !(hostPlatform.parsed.vendor.name == "uwp")) then "object" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".object."0.32.2" { inherit profileName; }).out; + libc = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; + miniz_oxide = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".miniz_oxide."0.4.4" { inherit profileName; }).out; + object = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".object."0.24.0" { inherit profileName; }).out; rustc_demangle = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustc-demangle."0.1.23" { inherit profileName; }).out; }; buildDependencies = { @@ -1188,6 +1273,18 @@ in ]; }); + "registry+https://github.com/rust-lang/crates.io-index".base64."0.22.1" = overridableMkRustCrate (profileName: rec { + name = "base64"; + version = "0.22.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"; }; + features = builtins.concatLists [ + [ "alloc" ] + [ "default" ] + [ "std" ] + ]; + }); + "registry+https://github.com/rust-lang/crates.io-index".base64-simd."0.8.0" = overridableMkRustCrate (profileName: rec { name = "base64-simd"; version = "0.8.0"; @@ -1292,7 +1389,7 @@ in registry = "registry+https://github.com/rust-lang/crates.io-index"; src = fetchCratesIo { inherit name version; sha256 = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118"; }; dependencies = { - async_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-channel."2.1.1" { inherit profileName; }).out; + async_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-channel."2.2.0" { inherit profileName; }).out; async_lock = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-lock."3.3.0" { inherit profileName; }).out; async_task = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-task."4.7.0" { inherit profileName; }).out; fastrand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fastrand."2.0.1" { inherit profileName; }).out; @@ -1376,7 +1473,7 @@ in ]; dependencies = { bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - either = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".either."1.9.0" { inherit profileName; }).out; + either = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".either."1.10.0" { inherit profileName; }).out; }; }); @@ -1402,11 +1499,11 @@ in src = fetchCratesIo { inherit name version; sha256 = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"; }; }); - "registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.31" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" = overridableMkRustCrate (profileName: rec { name = "chrono"; - version = "0.4.31"; + version = "0.4.38"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38"; }; + src = fetchCratesIo { inherit name version; sha256 = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"; }; features = builtins.concatLists [ [ "alloc" ] [ "android-tzdata" ] @@ -1414,6 +1511,7 @@ in [ "default" ] [ "iana-time-zone" ] [ "js-sys" ] + [ "now" ] [ "oldtime" ] [ "std" ] [ "wasm-bindgen" ] @@ -1423,11 +1521,11 @@ in ]; dependencies = { ${ if hostPlatform.parsed.kernel.name == "android" then "android_tzdata" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".android-tzdata."0.1.1" { inherit profileName; }).out; - ${ if hostPlatform.isUnix then "iana_time_zone" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".iana-time-zone."0.1.59" { inherit profileName; }).out; + ${ if hostPlatform.isUnix then "iana_time_zone" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".iana-time-zone."0.1.60" { inherit profileName; }).out; ${ if hostPlatform.parsed.cpu.name == "wasm32" && !(hostPlatform.parsed.kernel.name == "emscripten" || hostPlatform.parsed.kernel.name == "wasi") then "js_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".js-sys."0.3.67" { inherit profileName; }).out; num_traits = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".num-traits."0.2.17" { inherit profileName; }).out; ${ if hostPlatform.parsed.cpu.name == "wasm32" && !(hostPlatform.parsed.kernel.name == "emscripten" || hostPlatform.parsed.kernel.name == "wasi") then "wasm_bindgen" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".wasm-bindgen."0.2.90" { inherit profileName; }).out; - ${ if hostPlatform.isWindows then "windows_targets" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-targets."0.48.5" { inherit profileName; }).out; + ${ if hostPlatform.isWindows then "windows_targets" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-targets."0.52.0" { inherit profileName; }).out; }; }); @@ -1458,7 +1556,7 @@ in once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; strsim = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".strsim."0.10.0" { inherit profileName; }).out; termcolor = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".termcolor."1.4.1" { inherit profileName; }).out; - textwrap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".textwrap."0.16.0" { inherit profileName; }).out; + textwrap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".textwrap."0.16.1" { inherit profileName; }).out; }; }); @@ -1503,52 +1601,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".console-api."0.6.0" = overridableMkRustCrate (profileName: rec { - name = "console-api"; - version = "0.6.0"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787"; }; - features = builtins.concatLists [ - [ "transport" ] - ]; - dependencies = { - futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; - prost = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".prost."0.12.3" { inherit profileName; }).out; - prost_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".prost-types."0.12.3" { inherit profileName; }).out; - tonic = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tonic."0.10.2" { inherit profileName; }).out; - tracing_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.32" { inherit profileName; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".console-subscriber."0.2.0" = overridableMkRustCrate (profileName: rec { - name = "console-subscriber"; - version = "0.2.0"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e"; }; - features = builtins.concatLists [ - [ "default" ] - [ "env-filter" ] - ]; - dependencies = { - console_api = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".console-api."0.6.0" { inherit profileName; }).out; - crossbeam_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crossbeam-channel."0.5.11" { inherit profileName; }).out; - crossbeam_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crossbeam-utils."0.8.19" { inherit profileName; }).out; - futures_task = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-task."0.3.30" { inherit profileName; }).out; - hdrhistogram = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hdrhistogram."7.5.4" { inherit profileName; }).out; - humantime = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".humantime."2.1.0" { inherit profileName; }).out; - prost_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".prost-types."0.12.3" { inherit profileName; }).out; - serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; - serde_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.111" { inherit profileName; }).out; - thread_local = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thread_local."1.1.7" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; - tokio_stream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-stream."0.1.14" { inherit profileName; }).out; - tonic = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tonic."0.10.2" { inherit profileName; }).out; - tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; - tracing_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.32" { inherit profileName; }).out; - tracing_subscriber = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-subscriber."0.3.18" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".const-oid."0.9.6" = overridableMkRustCrate (profileName: rec { name = "const-oid"; version = "0.9.6"; @@ -1602,11 +1654,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.3.2" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.4.0" = overridableMkRustCrate (profileName: rec { name = "crc32fast"; - version = "1.3.2"; + version = "1.4.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"; }; + src = fetchCratesIo { inherit name version; sha256 = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] @@ -1616,20 +1668,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".crossbeam-channel."0.5.11" = overridableMkRustCrate (profileName: rec { - name = "crossbeam-channel"; - version = "0.5.11"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b"; }; - features = builtins.concatLists [ - [ "default" ] - [ "std" ] - ]; - dependencies = { - crossbeam_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crossbeam-utils."0.8.19" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".crossbeam-utils."0.8.19" = overridableMkRustCrate (profileName: rec { name = "crossbeam-utils"; version = "0.8.19"; @@ -1754,6 +1792,18 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".derive_arbitrary."1.3.2" = overridableMkRustCrate (profileName: rec { + name = "derive_arbitrary"; + version = "1.3.2"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"; }; + dependencies = { + proc_macro2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".proc-macro2."1.0.76" { inherit profileName; }).out; + quote = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quote."1.0.35" { inherit profileName; }).out; + syn = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".syn."2.0.48" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".derive_utils."0.11.2" = overridableMkRustCrate (profileName: rec { name = "derive_utils"; version = "0.11.2"; @@ -1849,14 +1899,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".either."1.9.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".either."1.10.0" = overridableMkRustCrate (profileName: rec { name = "either"; - version = "1.9.0"; + version = "1.10.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"; }; - features = builtins.concatLists [ - [ "use_std" ] - ]; + src = fetchCratesIo { inherit name version; sha256 = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"; }; }); "registry+https://github.com/rust-lang/crates.io-index".elliptic-curve."0.12.3" = overridableMkRustCrate (profileName: rec { @@ -1898,7 +1945,7 @@ in src = fetchCratesIo { inherit name version; sha256 = "d4499124d87abce26a57ef96ece800fa8babc38fbedd81c607c340ae83d46d2e"; }; dependencies = { base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; - chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.31" { inherit profileName; }).out; + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; encoding_rs = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".encoding_rs."0.8.33" { inherit profileName; }).out; nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; }; @@ -1979,6 +2026,22 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".event-listener."5.2.0" = overridableMkRustCrate (profileName: rec { + name = "event-listener"; + version = "5.2.0"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91"; }; + features = builtins.concatLists [ + [ "parking" ] + [ "std" ] + ]; + dependencies = { + concurrent_queue = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".concurrent-queue."2.4.0" { inherit profileName; }).out; + parking = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".parking."2.2.0" { inherit profileName; }).out; + pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".event-listener-strategy."0.4.0" = overridableMkRustCrate (profileName: rec { name = "event-listener-strategy"; version = "0.4.0"; @@ -1993,6 +2056,20 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".event-listener-strategy."0.5.0" = overridableMkRustCrate (profileName: rec { + name = "event-listener-strategy"; + version = "0.5.0"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291"; }; + features = builtins.concatLists [ + [ "std" ] + ]; + dependencies = { + event_listener = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".event-listener."5.2.0" { inherit profileName; }).out; + pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".fastrand."1.9.0" = overridableMkRustCrate (profileName: rec { name = "fastrand"; version = "1.9.0"; @@ -2026,23 +2103,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".flate2."1.0.28" = overridableMkRustCrate (profileName: rec { - name = "flate2"; - version = "1.0.28"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"; }; - features = builtins.concatLists [ - [ "any_impl" ] - [ "default" ] - [ "miniz_oxide" ] - [ "rust_backend" ] - ]; - dependencies = { - crc32fast = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".crc32fast."1.3.2" { inherit profileName; }).out; - miniz_oxide = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".miniz_oxide."0.7.1" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".fnv."1.0.7" = overridableMkRustCrate (profileName: rec { name = "fnv"; version = "1.0.7"; @@ -2054,6 +2114,23 @@ in ]; }); + "registry+https://github.com/rust-lang/crates.io-index".foreign-types."0.3.2" = overridableMkRustCrate (profileName: rec { + name = "foreign-types"; + version = "0.3.2"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"; }; + dependencies = { + foreign_types_shared = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".foreign-types-shared."0.1.1" { inherit profileName; }).out; + }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".foreign-types-shared."0.1.1" = overridableMkRustCrate (profileName: rec { + name = "foreign-types-shared"; + version = "0.1.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"; }; + }); + "registry+https://github.com/rust-lang/crates.io-index".form_urlencoded."1.2.1" = overridableMkRustCrate (profileName: rec { name = "form_urlencoded"; version = "1.2.1"; @@ -2176,7 +2253,7 @@ in fastrand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fastrand."1.9.0" { inherit profileName; }).out; futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; futures_io = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-io."0.3.30" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; + memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" { inherit profileName; }).out; parking = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".parking."2.2.0" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; waker_fn = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".waker-fn."1.1.1" { inherit profileName; }).out; @@ -2270,7 +2347,7 @@ in futures_macro = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-macro."0.3.30" { profileName = "__noProfile"; }).out; futures_sink = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-sink."0.3.30" { inherit profileName; }).out; futures_task = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-task."0.3.30" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; + memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; pin_utils = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-utils."0.1.0" { inherit profileName; }).out; slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; @@ -2299,23 +2376,27 @@ in registry = "registry+https://github.com/rust-lang/crates.io-index"; src = fetchCratesIo { inherit name version; sha256 = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5"; }; features = builtins.concatLists [ + [ "js" ] + [ "js-sys" ] [ "std" ] + [ "wasm-bindgen" ] ]; dependencies = { cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; + ${ if (hostPlatform.parsed.cpu.name == "wasm32" || hostPlatform.parsed.cpu.name == "wasm64") && hostPlatform.parsed.kernel.name == "unknown" then "js_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".js-sys."0.3.67" { inherit profileName; }).out; ${ if hostPlatform.isUnix then "libc" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; ${ if hostPlatform.parsed.kernel.name == "wasi" then "wasi" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".wasi."0.11.0+wasi-snapshot-preview1" { inherit profileName; }).out; + ${ if (hostPlatform.parsed.cpu.name == "wasm32" || hostPlatform.parsed.cpu.name == "wasm64") && hostPlatform.parsed.kernel.name == "unknown" then "wasm_bindgen" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".wasm-bindgen."0.2.90" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".gimli."0.28.1" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".gimli."0.24.0" = overridableMkRustCrate (profileName: rec { name = "gimli"; - version = "0.28.1"; + version = "0.24.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"; }; + src = fetchCratesIo { inherit name version; sha256 = "0e4075386626662786ddb0ec9081e7c7eeb1ba31951f447ca780ef9f5d568189"; }; features = builtins.concatLists [ [ "read" ] - [ "read-core" ] ]; }); @@ -2361,10 +2442,10 @@ in futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; futures_sink = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-sink."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; - indexmap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."2.1.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; + indexmap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."2.2.5" { inherit profileName; }).out; slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; @@ -2381,10 +2462,10 @@ in futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; futures_sink = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-sink."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; - indexmap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."2.1.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; + indexmap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."2.2.5" { inherit profileName; }).out; slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; @@ -2410,26 +2491,6 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".hdrhistogram."7.5.4" = overridableMkRustCrate (profileName: rec { - name = "hdrhistogram"; - version = "7.5.4"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d"; }; - features = builtins.concatLists [ - [ "base64" ] - [ "flate2" ] - [ "nom" ] - [ "serialization" ] - ]; - dependencies = { - base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; - byteorder = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".byteorder."1.5.0" { inherit profileName; }).out; - flate2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".flate2."1.0.28" { inherit profileName; }).out; - nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; - num_traits = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".num-traits."0.2.17" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".heck."0.4.1" = overridableMkRustCrate (profileName: rec { name = "heck"; version = "0.4.1"; @@ -2488,11 +2549,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" = overridableMkRustCrate (profileName: rec { name = "http"; - version = "0.2.11"; + version = "0.2.12"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb"; }; + src = fetchCratesIo { inherit name version; sha256 = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"; }; dependencies = { bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; fnv = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fnv."1.0.7" { inherit profileName; }).out; @@ -2500,11 +2561,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" = overridableMkRustCrate (profileName: rec { name = "http"; - version = "1.0.0"; + version = "1.1.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea"; }; + src = fetchCratesIo { inherit name version; sha256 = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] @@ -2523,7 +2584,7 @@ in src = fetchCratesIo { inherit name version; sha256 = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"; }; dependencies = { bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; }; }); @@ -2535,19 +2596,19 @@ in src = fetchCratesIo { inherit name version; sha256 = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643"; }; dependencies = { bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.1" = overridableMkRustCrate (profileName: rec { name = "http-body-util"; - version = "0.1.0"; + version = "0.1.1"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840"; }; + src = fetchCratesIo { inherit name version; sha256 = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d"; }; dependencies = { bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; }; @@ -2571,13 +2632,6 @@ in src = fetchCratesIo { inherit name version; sha256 = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"; }; }); - "registry+https://github.com/rust-lang/crates.io-index".humantime."2.1.0" = overridableMkRustCrate (profileName: rec { - name = "humantime"; - version = "2.1.0"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"; }; - }); - "registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" = overridableMkRustCrate (profileName: rec { name = "hyper"; version = "0.14.28"; @@ -2585,8 +2639,6 @@ in src = fetchCratesIo { inherit name version; sha256 = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"; }; features = builtins.concatLists [ [ "client" ] - [ "default" ] - [ "full" ] [ "h2" ] [ "http1" ] [ "http2" ] @@ -2602,14 +2654,14 @@ in futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; h2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.3.24" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; httparse = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httparse."1.8.0" { inherit profileName; }).out; httpdate = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.3" { inherit profileName; }).out; itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; socket2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.5.5" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; want = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".want."0.3.1" { inherit profileName; }).out; @@ -2633,14 +2685,14 @@ in futures_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; h2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.4.2" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out; httparse = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httparse."1.8.0" { inherit profileName; }).out; httpdate = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".httpdate."1.0.3" { inherit profileName; }).out; itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; smallvec = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".smallvec."1.13.1" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; want = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".want."0.3.1" { inherit profileName; }).out; }; }); @@ -2664,12 +2716,12 @@ in ]; dependencies = { futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.12" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.21.10" { inherit profileName; }).out; rustls_native_certs = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-native-certs."0.6.3" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-rustls."0.24.1" { inherit profileName; }).out; }; }); @@ -2692,29 +2744,33 @@ in ]; dependencies = { futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.2.0" { inherit profileName; }).out; hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.22.2" { inherit profileName; }).out; rustls_native_certs = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-native-certs."0.7.0" { inherit profileName; }).out; - pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-rustls."0.25.0" { inherit profileName; }).out; tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".hyper-timeout."0.4.1" = overridableMkRustCrate (profileName: rec { - name = "hyper-timeout"; - version = "0.4.1"; + "registry+https://github.com/rust-lang/crates.io-index".hyper-tls."0.6.0" = overridableMkRustCrate (profileName: rec { + name = "hyper-tls"; + version = "0.6.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"; }; + src = fetchCratesIo { inherit name version; sha256 = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"; }; dependencies = { - hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; - pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; - tokio_io_timeout = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-io-timeout."1.2.0" { inherit profileName; }).out; + bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; + http_body_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.1" { inherit profileName; }).out; + hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.2.0" { inherit profileName; }).out; + hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out; + native_tls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".native-tls."0.2.11" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + tokio_native_tls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-native-tls."0.3.1" { inherit profileName; }).out; + tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; }; }); @@ -2739,23 +2795,23 @@ in bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; futures_channel = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.2.0" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; socket2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.5.5" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tower = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower."0.4.13" { inherit profileName; }).out; tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".iana-time-zone."0.1.59" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".iana-time-zone."0.1.60" = overridableMkRustCrate (profileName: rec { name = "iana-time-zone"; - version = "0.1.59"; + version = "0.1.60"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "b6a67363e2aa4443928ce15e57ebae94fd8949958fd1223c4cfc0cd473ad7539"; }; + src = fetchCratesIo { inherit name version; sha256 = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141"; }; features = builtins.concatLists [ [ "fallback" ] ]; @@ -2779,6 +2835,24 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".icalendar."0.16.1" = overridableMkRustCrate (profileName: rec { + name = "icalendar"; + version = "0.16.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "cd83e81e8a329918d84e49032f8e596f4f079380942d172724cea3599a80807e"; }; + features = builtins.concatLists [ + [ "default" ] + [ "nom" ] + [ "parser" ] + ]; + dependencies = { + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; + iso8601 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".iso8601."0.6.1" { inherit profileName; }).out; + nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") || hostPlatform.parsed.cpu.name == "wasm32" then "uuid" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".uuid."1.7.0" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".idna."0.2.3" = overridableMkRustCrate (profileName: rec { name = "idna"; version = "0.2.3"; @@ -2846,7 +2920,7 @@ in abnf_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".abnf-core."0.6.0" { inherit profileName; }).out; base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; bounded_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bounded-static."0.5.0" { inherit profileName; }).out; - chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.31" { inherit profileName; }).out; + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; imap_types = (rustPackages."git+https://github.com/superboum/imap-codec".imap-types."2.0.0" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; @@ -2862,7 +2936,7 @@ in url = https://github.com/duesee/imap-flow.git; name = "imap-flow"; version = "0.1.0"; - rev = "68c1da5d1c56dbe543d9736de9683259d1d28191"; + rev = "dce759a8531f317e8d7311fb032b366db6698e38"; ref = "main";}; dependencies = { bounded_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bounded-static."0.5.0" { inherit profileName; }).out; @@ -2870,7 +2944,7 @@ in imap_codec = (rustPackages."git+https://github.com/superboum/imap-codec".imap-codec."2.0.0" { inherit profileName; }).out; imap_types = (rustPackages."git+https://github.com/superboum/imap-codec".imap-types."2.0.0" { inherit profileName; }).out; thiserror = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); @@ -2893,7 +2967,7 @@ in dependencies = { base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; bounded_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bounded-static."0.5.0" { inherit profileName; }).out; - chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.31" { inherit profileName; }).out; + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; thiserror = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" { inherit profileName; }).out; }; }); @@ -2914,11 +2988,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".indexmap."2.1.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".indexmap."2.2.5" = overridableMkRustCrate (profileName: rec { name = "indexmap"; - version = "2.1.0"; + version = "2.2.5"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"; }; + src = fetchCratesIo { inherit name version; sha256 = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] @@ -2950,25 +3024,35 @@ in [ "libc" ] [ "windows-sys" ] ]; - dependencies = { - ${ if hostPlatform.parsed.kernel.name == "hermit" then "hermit_abi" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hermit-abi."0.3.4" { inherit profileName; }).out; - ${ if !hostPlatform.isWindows then "libc" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; - ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.48.0" { inherit profileName; }).out; - }; + dependencies = { + ${ if hostPlatform.parsed.kernel.name == "hermit" then "hermit_abi" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hermit-abi."0.3.4" { inherit profileName; }).out; + ${ if !hostPlatform.isWindows then "libc" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; + ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.48.0" { inherit profileName; }).out; + }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".ipnet."2.9.0" = overridableMkRustCrate (profileName: rec { + name = "ipnet"; + version = "2.9.0"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3"; }; + features = builtins.concatLists [ + [ "default" ] + [ "std" ] + ]; }); - "registry+https://github.com/rust-lang/crates.io-index".itertools."0.10.5" = overridableMkRustCrate (profileName: rec { - name = "itertools"; - version = "0.10.5"; + "registry+https://github.com/rust-lang/crates.io-index".iso8601."0.6.1" = overridableMkRustCrate (profileName: rec { + name = "iso8601"; + version = "0.6.1"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"; }; + src = fetchCratesIo { inherit name version; sha256 = "924e5d73ea28f59011fec52a0d12185d496a9b075d360657aed2a5707f701153"; }; features = builtins.concatLists [ [ "default" ] - [ "use_alloc" ] - [ "use_std" ] + [ "std" ] ]; dependencies = { - either = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".either."1.9.0" { inherit profileName; }).out; + nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."7.1.3" { inherit profileName; }).out; }; }); @@ -3010,22 +3094,22 @@ in rev = "8b35a946d9f6b31b26b9783acbfab984316051f4"; ref = "k2v/shared_http_client";}; dependencies = { - aws_sdk_config = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-config."1.15.0" { inherit profileName; }).out; - aws_sigv4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.6" { inherit profileName; }).out; + aws_sdk_config = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sdk-config."1.16.0" { inherit profileName; }).out; + aws_sigv4 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aws-sigv4."1.1.7" { inherit profileName; }).out; base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; hex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hex."0.4.3" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.0.0" { inherit profileName; }).out; - http_body_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.0" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; + http_body_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.1" { inherit profileName; }).out; hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.2.0" { inherit profileName; }).out; hyper_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-rustls."0.26.0" { inherit profileName; }).out; hyper_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out; log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; - serde_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.111" { inherit profileName; }).out; + serde_json = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.114" { inherit profileName; }).out; sha2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".sha2."0.10.8" { inherit profileName; }).out; thiserror = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -3085,7 +3169,7 @@ in rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.20.9" { inherit profileName; }).out; rustls_native_certs = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-native-certs."0.6.3" { inherit profileName; }).out; thiserror = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tokio_rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-rustls."0.23.4" { inherit profileName; }).out; tokio_stream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-stream."0.1.14" { inherit profileName; }).out; tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; @@ -3112,7 +3196,7 @@ in arrayvec = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".arrayvec."0.5.2" { inherit profileName; }).out; bitflags = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bitflags."1.3.2" { inherit profileName; }).out; cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; - ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.16" { inherit profileName; }).out; + ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.17" { inherit profileName; }).out; static_assertions = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".static_assertions."1.1.0" { inherit profileName; }).out; }; }); @@ -3129,6 +3213,25 @@ in ]; }); + "registry+https://github.com/rust-lang/crates.io-index".libfuzzer-sys."0.4.7" = overridableMkRustCrate (profileName: rec { + name = "libfuzzer-sys"; + version = "0.4.7"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "a96cfd5557eb82f2b83fed4955246c988d331975a002961b07c81584d107e7f7"; }; + features = builtins.concatLists [ + [ "arbitrary-derive" ] + [ "default" ] + [ "link_libfuzzer" ] + ]; + dependencies = { + arbitrary = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".arbitrary."1.3.2" { inherit profileName; }).out; + once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; + }; + buildDependencies = { + cc = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".cc."1.0.83" { profileName = "__noProfile"; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".libsodium-sys."0.2.7" = overridableMkRustCrate (profileName: rec { name = "libsodium-sys"; version = "0.2.7"; @@ -3188,17 +3291,7 @@ in [ "value-bag" ] ]; dependencies = { - value_bag = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".value-bag."1.6.0" { inherit profileName; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".matchers."0.1.0" = overridableMkRustCrate (profileName: rec { - name = "matchers"; - version = "0.1.0"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"; }; - dependencies = { - regex_automata = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-automata."0.1.10" { inherit profileName; }).out; + value_bag = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".value-bag."1.7.0" { inherit profileName; }).out; }; }); @@ -3209,16 +3302,6 @@ in src = fetchCratesIo { inherit name version; sha256 = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"; }; }); - "registry+https://github.com/rust-lang/crates.io-index".matchit."0.7.3" = overridableMkRustCrate (profileName: rec { - name = "matchit"; - version = "0.7.3"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"; }; - features = builtins.concatLists [ - [ "default" ] - ]; - }); - "registry+https://github.com/rust-lang/crates.io-index".md-5."0.10.6" = overridableMkRustCrate (profileName: rec { name = "md-5"; version = "0.10.6"; @@ -3234,13 +3317,12 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" = overridableMkRustCrate (profileName: rec { name = "memchr"; - version = "2.7.1"; + version = "2.3.4"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"; }; + src = fetchCratesIo { inherit name version; sha256 = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"; }; features = builtins.concatLists [ - [ "alloc" ] [ "default" ] [ "std" ] [ "use_std" ] @@ -3264,17 +3346,17 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".miniz_oxide."0.7.1" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".miniz_oxide."0.4.4" = overridableMkRustCrate (profileName: rec { name = "miniz_oxide"; - version = "0.7.1"; + version = "0.4.4"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"; }; - features = builtins.concatLists [ - [ "with-alloc" ] - ]; + src = fetchCratesIo { inherit name version; sha256 = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"; }; dependencies = { adler = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".adler."1.0.2" { inherit profileName; }).out; }; + buildDependencies = { + autocfg = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".autocfg."1.1.0" { profileName = "__noProfile"; }).out; + }; }); "registry+https://github.com/rust-lang/crates.io-index".mio."0.8.10" = overridableMkRustCrate (profileName: rec { @@ -3294,6 +3376,25 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".native-tls."0.2.11" = overridableMkRustCrate (profileName: rec { + name = "native-tls"; + version = "0.2.11"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"; }; + dependencies = { + ${ if hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios" then "lazy_static" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lazy_static."1.4.0" { inherit profileName; }).out; + ${ if hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios" then "libc" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.kernel.name == "windows" || hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios") then "log" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.kernel.name == "windows" || hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios") then "openssl" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".openssl."0.10.64" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.kernel.name == "windows" || hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios") then "openssl_probe" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".openssl-probe."0.1.5" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.kernel.name == "windows" || hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios") then "openssl_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".openssl-sys."0.9.102" { inherit profileName; }).out; + ${ if hostPlatform.parsed.kernel.name == "windows" then "schannel" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".schannel."0.1.23" { inherit profileName; }).out; + ${ if hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios" then "security_framework" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".security-framework."2.9.2" { inherit profileName; }).out; + ${ if hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios" then "security_framework_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".security-framework-sys."2.9.1" { inherit profileName; }).out; + ${ if hostPlatform.parsed.kernel.name == "darwin" || hostPlatform.parsed.kernel.name == "ios" then "tempfile" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tempfile."3.10.1" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".nix."0.27.1" = overridableMkRustCrate (profileName: rec { name = "nix"; version = "0.27.1"; @@ -3322,11 +3423,11 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".nom."6.1.2" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".nom."6.2.2" = overridableMkRustCrate (profileName: rec { name = "nom"; - version = "6.1.2"; + version = "6.2.2"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2"; }; + src = fetchCratesIo { inherit name version; sha256 = "c6a7a9657c84d5814c6196b68bb4429df09c18b1573806259fba397ea4ad0d44"; }; features = builtins.concatLists [ [ "alloc" ] [ "bitvec" ] @@ -3340,7 +3441,7 @@ in bitvec = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bitvec."0.19.6" { inherit profileName; }).out; funty = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".funty."1.1.0" { inherit profileName; }).out; lexical_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lexical-core."0.7.6" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; + memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" { inherit profileName; }).out; }; buildDependencies = { version_check = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".version_check."0.9.4" { profileName = "__noProfile"; }).out; @@ -3358,7 +3459,7 @@ in [ "std" ] ]; dependencies = { - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; + memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" { inherit profileName; }).out; minimal_lexical = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".minimal-lexical."0.2.1" { inherit profileName; }).out; }; }); @@ -3436,11 +3537,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".object."0.32.2" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".object."0.24.0" = overridableMkRustCrate (profileName: rec { name = "object"; - version = "0.32.2"; + version = "0.24.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"; }; + src = fetchCratesIo { inherit name version; sha256 = "1a5b3dd1c072ee7963717671d1ca129f1048fda25edea6b752bfc71ac8854170"; }; features = builtins.concatLists [ [ "archive" ] [ "coff" ] @@ -3450,9 +3551,6 @@ in [ "read_core" ] [ "unaligned" ] ]; - dependencies = { - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; - }; }); "registry+https://github.com/rust-lang/crates.io-index".oid-registry."0.4.0" = overridableMkRustCrate (profileName: rec { @@ -3491,6 +3589,37 @@ in ]; }); + "registry+https://github.com/rust-lang/crates.io-index".openssl."0.10.64" = overridableMkRustCrate (profileName: rec { + name = "openssl"; + version = "0.10.64"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f"; }; + features = builtins.concatLists [ + [ "default" ] + ]; + dependencies = { + bitflags = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bitflags."2.4.2" { inherit profileName; }).out; + cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; + foreign_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".foreign-types."0.3.2" { inherit profileName; }).out; + libc = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; + once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; + openssl_macros = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".openssl-macros."0.1.1" { profileName = "__noProfile"; }).out; + ffi = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".openssl-sys."0.9.102" { inherit profileName; }).out; + }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".openssl-macros."0.1.1" = overridableMkRustCrate (profileName: rec { + name = "openssl-macros"; + version = "0.1.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"; }; + dependencies = { + proc_macro2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".proc-macro2."1.0.76" { inherit profileName; }).out; + quote = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quote."1.0.35" { inherit profileName; }).out; + syn = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".syn."2.0.48" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".openssl-probe."0.1.5" = overridableMkRustCrate (profileName: rec { name = "openssl-probe"; version = "0.1.5"; @@ -3498,6 +3627,21 @@ in src = fetchCratesIo { inherit name version; sha256 = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"; }; }); + "registry+https://github.com/rust-lang/crates.io-index".openssl-sys."0.9.102" = overridableMkRustCrate (profileName: rec { + name = "openssl-sys"; + version = "0.9.102"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2"; }; + dependencies = { + libc = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; + }; + buildDependencies = { + cc = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".cc."1.0.83" { profileName = "__noProfile"; }).out; + pkg_config = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".pkg-config."0.3.29" { profileName = "__noProfile"; }).out; + vcpkg = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".vcpkg."0.2.15" { profileName = "__noProfile"; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".os_str_bytes."6.6.1" = overridableMkRustCrate (profileName: rec { name = "os_str_bytes"; version = "6.6.1"; @@ -3588,21 +3732,21 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.3" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.5" = overridableMkRustCrate (profileName: rec { name = "pin-project"; - version = "1.1.3"; + version = "1.1.5"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"; }; + src = fetchCratesIo { inherit name version; sha256 = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3"; }; dependencies = { - pin_project_internal = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-internal."1.1.3" { profileName = "__noProfile"; }).out; + pin_project_internal = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-internal."1.1.5" { profileName = "__noProfile"; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".pin-project-internal."1.1.3" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".pin-project-internal."1.1.5" = overridableMkRustCrate (profileName: rec { name = "pin-project-internal"; - version = "1.1.3"; + version = "1.1.5"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"; }; + src = fetchCratesIo { inherit name version; sha256 = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"; }; dependencies = { proc_macro2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".proc-macro2."1.0.76" { inherit profileName; }).out; quote = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quote."1.0.35" { inherit profileName; }).out; @@ -3692,16 +3836,16 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".polling."3.3.2" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".polling."3.5.0" = overridableMkRustCrate (profileName: rec { name = "polling"; - version = "3.3.2"; + version = "3.5.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "545c980a3880efd47b2e262f6a4bb6daad6555cf3367aa9c4e52895f69537a41"; }; + src = fetchCratesIo { inherit name version; sha256 = "24f040dee2588b4963afb4e420540439d126f73fdacf4a9c486a96d840bac3c9"; }; dependencies = { cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "concurrent_queue" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".concurrent-queue."2.4.0" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "pin_project_lite" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - ${ if hostPlatform.isUnix || hostPlatform.parsed.kernel.name == "fuchsia" || hostPlatform.parsed.kernel.name == "vxworks" then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.30" { inherit profileName; }).out; + ${ if hostPlatform.isUnix || hostPlatform.parsed.kernel.name == "fuchsia" || hostPlatform.parsed.kernel.name == "vxworks" then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.31" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.52.0" { inherit profileName; }).out; }; @@ -3774,47 +3918,19 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".prost."0.12.3" = overridableMkRustCrate (profileName: rec { - name = "prost"; - version = "0.12.3"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a"; }; - features = builtins.concatLists [ - [ "default" ] - [ "prost-derive" ] - [ "std" ] - ]; - dependencies = { - bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - prost_derive = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".prost-derive."0.12.3" { profileName = "__noProfile"; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".prost-derive."0.12.3" = overridableMkRustCrate (profileName: rec { - name = "prost-derive"; - version = "0.12.3"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e"; }; - dependencies = { - anyhow = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".anyhow."1.0.79" { inherit profileName; }).out; - itertools = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itertools."0.10.5" { inherit profileName; }).out; - proc_macro2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".proc-macro2."1.0.76" { inherit profileName; }).out; - quote = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".quote."1.0.35" { inherit profileName; }).out; - syn = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".syn."2.0.48" { inherit profileName; }).out; - }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".prost-types."0.12.3" = overridableMkRustCrate (profileName: rec { - name = "prost-types"; - version = "0.12.3"; + "registry+https://github.com/rust-lang/crates.io-index".quick-xml."0.31.0" = overridableMkRustCrate (profileName: rec { + name = "quick-xml"; + version = "0.31.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e"; }; + src = fetchCratesIo { inherit name version; sha256 = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"; }; features = builtins.concatLists [ + [ "async-tokio" ] [ "default" ] - [ "std" ] + [ "tokio" ] ]; dependencies = { - prost = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".prost."0.12.3" { inherit profileName; }).out; + memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.3.4" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -3850,7 +3966,6 @@ in [ "getrandom" ] [ "libc" ] [ "rand_chacha" ] - [ "small_rng" ] [ "std" ] [ "std_rng" ] ]; @@ -3900,24 +4015,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".regex."1.10.2" = overridableMkRustCrate (profileName: rec { - name = "regex"; - version = "1.10.2"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"; }; - features = builtins.concatLists [ - [ "std" ] - [ "unicode-case" ] - [ "unicode-perl" ] - ]; - dependencies = { - aho_corasick = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aho-corasick."1.1.2" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; - regex_automata = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-automata."0.4.3" { inherit profileName; }).out; - regex_syntax = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-syntax."0.8.2" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".regex-automata."0.1.10" = overridableMkRustCrate (profileName: rec { name = "regex-automata"; version = "0.1.10"; @@ -3933,29 +4030,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".regex-automata."0.4.3" = overridableMkRustCrate (profileName: rec { - name = "regex-automata"; - version = "0.4.3"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"; }; - features = builtins.concatLists [ - [ "alloc" ] - [ "meta" ] - [ "nfa-pikevm" ] - [ "nfa-thompson" ] - [ "std" ] - [ "syntax" ] - [ "unicode-case" ] - [ "unicode-perl" ] - [ "unicode-word-boundary" ] - ]; - dependencies = { - aho_corasick = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".aho-corasick."1.1.2" { inherit profileName; }).out; - memchr = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".memchr."2.7.1" { inherit profileName; }).out; - regex_syntax = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-syntax."0.8.2" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".regex-lite."0.1.5" = overridableMkRustCrate (profileName: rec { name = "regex-lite"; version = "0.1.5"; @@ -3986,16 +4060,59 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".regex-syntax."0.8.2" = overridableMkRustCrate (profileName: rec { - name = "regex-syntax"; - version = "0.8.2"; + "registry+https://github.com/rust-lang/crates.io-index".reqwest."0.12.4" = overridableMkRustCrate (profileName: rec { + name = "reqwest"; + version = "0.12.4"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"; }; + src = fetchCratesIo { inherit name version; sha256 = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10"; }; features = builtins.concatLists [ - [ "std" ] - [ "unicode-case" ] - [ "unicode-perl" ] + [ "__tls" ] + [ "blocking" ] + [ "charset" ] + [ "default" ] + [ "default-tls" ] + [ "futures-channel" ] + [ "h2" ] + [ "http2" ] + [ "macos-system-configuration" ] ]; + dependencies = { + base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.22.1" { inherit profileName; }).out; + bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "encoding_rs" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".encoding_rs."0.8.33" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "futures_channel" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-channel."0.3.30" { inherit profileName; }).out; + futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; + futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "h2" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.4.2" { inherit profileName; }).out; + http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."1.1.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "http_body" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."1.0.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "http_body_util" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body-util."0.1.1" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "hyper" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."1.2.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "hyper_tls" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-tls."0.6.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "hyper_util" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-util."0.1.3" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "ipnet" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ipnet."2.9.0" { inherit profileName; }).out; + ${ if hostPlatform.parsed.cpu.name == "wasm32" then "js_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".js-sys."0.3.67" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "log" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "mime" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".mime."0.3.17" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "native_tls_crate" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".native-tls."0.2.11" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "once_cell" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "percent_encoding" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "pin_project_lite" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "rustls_pemfile" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.1.1" { inherit profileName; }).out; + serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; + ${ if hostPlatform.parsed.cpu.name == "wasm32" then "serde_json" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.114" { inherit profileName; }).out; + serde_urlencoded = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde_urlencoded."0.7.1" { inherit profileName; }).out; + sync_wrapper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".sync_wrapper."0.1.2" { inherit profileName; }).out; + ${ if hostPlatform.parsed.kernel.name == "darwin" then "system_configuration" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".system-configuration."0.5.1" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "tokio" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + ${ if !(hostPlatform.parsed.cpu.name == "wasm32") then "tokio_native_tls" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-native-tls."0.3.1" { inherit profileName; }).out; + tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; + url = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".url."2.5.0" { inherit profileName; }).out; + ${ if hostPlatform.parsed.cpu.name == "wasm32" then "wasm_bindgen" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".wasm-bindgen."0.2.90" { inherit profileName; }).out; + ${ if hostPlatform.parsed.cpu.name == "wasm32" then "wasm_bindgen_futures" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".wasm-bindgen-futures."0.4.40" { inherit profileName; }).out; + ${ if hostPlatform.parsed.cpu.name == "wasm32" then "web_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".web-sys."0.3.67" { inherit profileName; }).out; + ${ if hostPlatform.isWindows then "winreg" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".winreg."0.52.0" { inherit profileName; }).out; + }; }); "registry+https://github.com/rust-lang/crates.io-index".rfc6979."0.3.1" = overridableMkRustCrate (profileName: rec { @@ -4120,7 +4237,7 @@ in registry = "registry+https://github.com/rust-lang/crates.io-index"; src = fetchCratesIo { inherit name version; sha256 = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"; }; dependencies = { - semver = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".semver."1.0.21" { inherit profileName; }).out; + semver = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".semver."1.0.22" { inherit profileName; }).out; }; }); @@ -4154,13 +4271,14 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.30" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.31" = overridableMkRustCrate (profileName: rec { name = "rustix"; - version = "0.38.30"; + version = "0.38.31"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca"; }; + src = fetchCratesIo { inherit name version; sha256 = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949"; }; features = builtins.concatLists [ [ "alloc" ] + [ "default" ] [ "event" ] [ "fs" ] [ "net" ] @@ -4168,6 +4286,7 @@ in [ "process" ] [ "std" ] [ "time" ] + [ "use-libc-auxv" ] ]; dependencies = { bitflags = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bitflags."2.4.2" { inherit profileName; }).out; @@ -4232,8 +4351,8 @@ in dependencies = { log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".log."0.4.20" { inherit profileName; }).out; ring = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ring."0.17.7" { inherit profileName; }).out; - pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" { inherit profileName; }).out; - webpki = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-webpki."0.102.1" { inherit profileName; }).out; + pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" { inherit profileName; }).out; + webpki = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-webpki."0.102.2" { inherit profileName; }).out; subtle = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".subtle."2.5.0" { inherit profileName; }).out; zeroize = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".zeroize."1.7.0" { inherit profileName; }).out; }; @@ -4259,8 +4378,8 @@ in src = fetchCratesIo { inherit name version; sha256 = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792"; }; dependencies = { ${ if hostPlatform.isUnix && !(hostPlatform.parsed.kernel.name == "darwin") then "openssl_probe" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".openssl-probe."0.1.5" { inherit profileName; }).out; - rustls_pemfile = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.0.0" { inherit profileName; }).out; - pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" { inherit profileName; }).out; + rustls_pemfile = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.1.1" { inherit profileName; }).out; + pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "schannel" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".schannel."0.1.23" { inherit profileName; }).out; ${ if hostPlatform.parsed.kernel.name == "darwin" then "security_framework" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".security-framework."2.9.2" { inherit profileName; }).out; }; @@ -4276,26 +4395,26 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.0.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".rustls-pemfile."2.1.1" = overridableMkRustCrate (profileName: rec { name = "rustls-pemfile"; - version = "2.0.0"; + version = "2.1.1"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4"; }; + src = fetchCratesIo { inherit name version; sha256 = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] ]; dependencies = { base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; - pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" { inherit profileName; }).out; + pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" = overridableMkRustCrate (profileName: rec { name = "rustls-pki-types"; - version = "1.1.0"; + version = "1.3.1"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "9e9d979b3ce68192e42760c7810125eb6cf2ea10efae545a156063e61f314e2a"; }; + src = fetchCratesIo { inherit name version; sha256 = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8"; }; features = builtins.concatLists [ [ "alloc" ] [ "default" ] @@ -4319,11 +4438,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".rustls-webpki."0.102.1" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".rustls-webpki."0.102.2" = overridableMkRustCrate (profileName: rec { name = "rustls-webpki"; - version = "0.102.1"; + version = "0.102.2"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "ef4ca26037c909dedb327b48c3327d0ba91d3dd3c4e05dad328f210ffb68e95b"; }; + src = fetchCratesIo { inherit name version; sha256 = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610"; }; features = builtins.concatLists [ [ "alloc" ] [ "ring" ] @@ -4331,23 +4450,16 @@ in ]; dependencies = { ring = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ring."0.17.7" { inherit profileName; }).out; - pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" { inherit profileName; }).out; + pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" { inherit profileName; }).out; untrusted = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".untrusted."0.9.0" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".rustversion."1.0.14" = overridableMkRustCrate (profileName: rec { - name = "rustversion"; - version = "1.0.14"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"; }; - }); - - "registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.16" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.17" = overridableMkRustCrate (profileName: rec { name = "ryu"; - version = "1.0.16"; + version = "1.0.17"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c"; }; + src = fetchCratesIo { inherit name version; sha256 = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"; }; }); "registry+https://github.com/rust-lang/crates.io-index".same-file."1.0.6" = overridableMkRustCrate (profileName: rec { @@ -4432,6 +4544,7 @@ in src = fetchCratesIo { inherit name version; sha256 = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a"; }; features = builtins.concatLists [ [ "OSX_10_9" ] + [ "default" ] ]; dependencies = { core_foundation_sys = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".core-foundation-sys."0.8.6" { inherit profileName; }).out; @@ -4439,11 +4552,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".semver."1.0.21" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".semver."1.0.22" = overridableMkRustCrate (profileName: rec { name = "semver"; - version = "1.0.21"; + version = "1.0.22"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0"; }; + src = fetchCratesIo { inherit name version; sha256 = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] @@ -4483,18 +4596,31 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.111" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".serde_json."1.0.114" = overridableMkRustCrate (profileName: rec { name = "serde_json"; - version = "1.0.111"; + version = "1.0.114"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4"; }; + src = fetchCratesIo { inherit name version; sha256 = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"; }; features = builtins.concatLists [ [ "default" ] [ "std" ] ]; dependencies = { itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out; - ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.16" { inherit profileName; }).out; + ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.17" { inherit profileName; }).out; + serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; + }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".serde_urlencoded."0.7.1" = overridableMkRustCrate (profileName: rec { + name = "serde_urlencoded"; + version = "0.7.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"; }; + dependencies = { + form_urlencoded = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".form_urlencoded."1.2.1" { inherit profileName; }).out; + itoa = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".itoa."1.0.10" { inherit profileName; }).out; + ryu = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".ryu."1.0.17" { inherit profileName; }).out; serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; }; }); @@ -4646,8 +4772,8 @@ in futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; idna = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".idna."0.2.3" { inherit profileName; }).out; lazy_static = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".lazy_static."1.4.0" { inherit profileName; }).out; - nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."6.1.2" { inherit profileName; }).out; - pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.3" { inherit profileName; }).out; + nom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nom."6.2.2" { inherit profileName; }).out; + pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.5" { inherit profileName; }).out; regex_automata = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex-automata."0.1.10" { inherit profileName; }).out; serde = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".serde."1.0.195" { inherit profileName; }).out; }; @@ -4665,7 +4791,7 @@ in ref = "feature/lmtp";}; dependencies = { async_trait = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-trait."0.1.77" { profileName = "__noProfile"; }).out; - chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.31" { inherit profileName; }).out; + chrono = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".chrono."0.4.38" { inherit profileName; }).out; duplexify = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".duplexify."1.2.2" { inherit profileName; }).out; futures = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures."0.3.30" { inherit profileName; }).out; smol = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".smol."1.3.0" { inherit profileName; }).out; @@ -4868,6 +4994,29 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".system-configuration."0.5.1" = overridableMkRustCrate (profileName: rec { + name = "system-configuration"; + version = "0.5.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"; }; + dependencies = { + bitflags = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bitflags."1.3.2" { inherit profileName; }).out; + core_foundation = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".core-foundation."0.9.4" { inherit profileName; }).out; + system_configuration_sys = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".system-configuration-sys."0.5.0" { inherit profileName; }).out; + }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".system-configuration-sys."0.5.0" = overridableMkRustCrate (profileName: rec { + name = "system-configuration-sys"; + version = "0.5.0"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"; }; + dependencies = { + core_foundation_sys = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".core-foundation-sys."0.8.6" { inherit profileName; }).out; + libc = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".tap."1.0.1" = overridableMkRustCrate (profileName: rec { name = "tap"; version = "1.0.1"; @@ -4875,6 +5024,19 @@ in src = fetchCratesIo { inherit name version; sha256 = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"; }; }); + "registry+https://github.com/rust-lang/crates.io-index".tempfile."3.10.1" = overridableMkRustCrate (profileName: rec { + name = "tempfile"; + version = "3.10.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"; }; + dependencies = { + cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; + fastrand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".fastrand."2.0.1" { inherit profileName; }).out; + ${ if hostPlatform.isUnix || hostPlatform.parsed.kernel.name == "wasi" then "rustix" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustix."0.38.31" { inherit profileName; }).out; + ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.52.0" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".termcolor."1.4.1" = overridableMkRustCrate (profileName: rec { name = "termcolor"; version = "1.4.1"; @@ -4885,11 +5047,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".textwrap."0.16.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".textwrap."0.16.1" = overridableMkRustCrate (profileName: rec { name = "textwrap"; - version = "0.16.0"; + version = "0.16.1"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"; }; + src = fetchCratesIo { inherit name version; sha256 = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"; }; }); "registry+https://github.com/rust-lang/crates.io-index".thiserror."1.0.56" = overridableMkRustCrate (profileName: rec { @@ -4914,11 +5076,11 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".thread_local."1.1.7" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".thread_local."1.1.8" = overridableMkRustCrate (profileName: rec { name = "thread_local"; - version = "1.1.7"; + version = "1.1.8"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"; }; + src = fetchCratesIo { inherit name version; sha256 = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"; }; dependencies = { cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; @@ -4991,11 +5153,11 @@ in src = fetchCratesIo { inherit name version; sha256 = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"; }; }); - "registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" = overridableMkRustCrate (profileName: rec { name = "tokio"; - version = "1.35.1"; + version = "1.36.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104"; }; + src = fetchCratesIo { inherit name version; sha256 = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931"; }; features = builtins.concatLists [ [ "bytes" ] [ "default" ] @@ -5016,11 +5178,10 @@ in [ "sync" ] [ "time" ] [ "tokio-macros" ] - [ "tracing" ] [ "windows-sys" ] ]; dependencies = { - ${ if false then "backtrace" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".backtrace."0.3.69" { inherit profileName; }).out; + ${ if false then "backtrace" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".backtrace."0.3.59" { inherit profileName; }).out; bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; ${ if hostPlatform.isUnix then "libc" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".libc."0.2.152" { inherit profileName; }).out; mio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".mio."0.8.10" { inherit profileName; }).out; @@ -5029,22 +5190,10 @@ in ${ if hostPlatform.isUnix then "signal_hook_registry" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".signal-hook-registry."1.4.1" { inherit profileName; }).out; socket2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".socket2."0.5.5" { inherit profileName; }).out; tokio_macros = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-macros."2.2.0" { profileName = "__noProfile"; }).out; - ${ if false then "tracing" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; ${ if hostPlatform.isWindows then "windows_sys" else null } = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.48.0" { inherit profileName; }).out; }; }); - "registry+https://github.com/rust-lang/crates.io-index".tokio-io-timeout."1.2.0" = overridableMkRustCrate (profileName: rec { - name = "tokio-io-timeout"; - version = "1.2.0"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf"; }; - dependencies = { - pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".tokio-macros."2.2.0" = overridableMkRustCrate (profileName: rec { name = "tokio-macros"; version = "2.2.0"; @@ -5057,6 +5206,17 @@ in }; }); + "registry+https://github.com/rust-lang/crates.io-index".tokio-native-tls."0.3.1" = overridableMkRustCrate (profileName: rec { + name = "tokio-native-tls"; + version = "0.3.1"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"; }; + dependencies = { + native_tls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".native-tls."0.2.11" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".tokio-rustls."0.23.4" = overridableMkRustCrate (profileName: rec { name = "tokio-rustls"; version = "0.23.4"; @@ -5070,7 +5230,7 @@ in ]; dependencies = { rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.20.9" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; webpki = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".webpki."0.22.4" { inherit profileName; }).out; }; }); @@ -5086,7 +5246,7 @@ in ]; dependencies = { rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.21.10" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -5103,8 +5263,8 @@ in ]; dependencies = { rustls = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls."0.22.2" { inherit profileName; }).out; - pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.1.0" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + pki_types = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rustls-pki-types."1.3.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -5115,13 +5275,12 @@ in src = fetchCratesIo { inherit name version; sha256 = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842"; }; features = builtins.concatLists [ [ "default" ] - [ "net" ] [ "time" ] ]; dependencies = { futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; }; }); @@ -5144,7 +5303,7 @@ in futures_io = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-io."0.3.30" { inherit profileName; }).out; futures_sink = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-sink."0.3.30" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; }; }); @@ -5162,41 +5321,6 @@ in }; }); - "registry+https://github.com/rust-lang/crates.io-index".tonic."0.10.2" = overridableMkRustCrate (profileName: rec { - name = "tonic"; - version = "0.10.2"; - registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e"; }; - features = builtins.concatLists [ - [ "channel" ] - [ "codegen" ] - [ "default" ] - [ "prost" ] - [ "transport" ] - ]; - dependencies = { - async_stream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".async-stream."0.3.5" { inherit profileName; }).out; - async_trait = (buildRustPackages."registry+https://github.com/rust-lang/crates.io-index".async-trait."0.1.77" { profileName = "__noProfile"; }).out; - axum = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".axum."0.6.20" { inherit profileName; }).out; - base64 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".base64."0.21.7" { inherit profileName; }).out; - bytes = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".bytes."1.5.0" { inherit profileName; }).out; - h2 = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".h2."0.3.24" { inherit profileName; }).out; - http = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http."0.2.11" { inherit profileName; }).out; - http_body = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".http-body."0.4.6" { inherit profileName; }).out; - hyper = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper."0.14.28" { inherit profileName; }).out; - hyper_timeout = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".hyper-timeout."0.4.1" { inherit profileName; }).out; - percent_encoding = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".percent-encoding."2.3.1" { inherit profileName; }).out; - pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.3" { inherit profileName; }).out; - prost = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".prost."0.12.3" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; - tokio_stream = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-stream."0.1.14" { inherit profileName; }).out; - tower = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower."0.4.13" { inherit profileName; }).out; - tower_layer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.2" { inherit profileName; }).out; - tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; - tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; - }; - }); - "registry+https://github.com/rust-lang/crates.io-index".tower."0.4.13" = overridableMkRustCrate (profileName: rec { name = "tower"; version = "0.4.13"; @@ -5204,38 +5328,23 @@ in src = fetchCratesIo { inherit name version; sha256 = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"; }; features = builtins.concatLists [ [ "__common" ] - [ "balance" ] - [ "buffer" ] [ "default" ] - [ "discover" ] [ "futures-core" ] [ "futures-util" ] - [ "indexmap" ] - [ "limit" ] - [ "load" ] [ "log" ] [ "make" ] [ "pin-project" ] [ "pin-project-lite" ] - [ "rand" ] - [ "ready-cache" ] - [ "slab" ] - [ "timeout" ] [ "tokio" ] - [ "tokio-util" ] [ "tracing" ] [ "util" ] ]; dependencies = { futures_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-core."0.3.30" { inherit profileName; }).out; futures_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".futures-util."0.3.30" { inherit profileName; }).out; - indexmap = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".indexmap."1.9.3" { inherit profileName; }).out; - pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.3" { inherit profileName; }).out; + pin_project = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project."1.1.5" { inherit profileName; }).out; pin_project_lite = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".pin-project-lite."0.2.13" { inherit profileName; }).out; - rand = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".rand."0.8.5" { inherit profileName; }).out; - slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".slab."0.4.9" { inherit profileName; }).out; - tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.35.1" { inherit profileName; }).out; - tokio_util = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio-util."0.7.10" { inherit profileName; }).out; + tokio = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tokio."1.36.0" { inherit profileName; }).out; tower_layer = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-layer."0.3.2" { inherit profileName; }).out; tower_service = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tower-service."0.3.2" { inherit profileName; }).out; tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; @@ -5330,29 +5439,20 @@ in [ "alloc" ] [ "ansi" ] [ "default" ] - [ "env-filter" ] [ "fmt" ] - [ "matchers" ] [ "nu-ansi-term" ] - [ "once_cell" ] - [ "regex" ] [ "registry" ] [ "sharded-slab" ] [ "smallvec" ] [ "std" ] [ "thread_local" ] - [ "tracing" ] [ "tracing-log" ] ]; dependencies = { - matchers = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".matchers."0.1.0" { inherit profileName; }).out; nu_ansi_term = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".nu-ansi-term."0.46.0" { inherit profileName; }).out; - once_cell = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".once_cell."1.19.0" { inherit profileName; }).out; - regex = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".regex."1.10.2" { inherit profileName; }).out; sharded_slab = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".sharded-slab."0.1.7" { inherit profileName; }).out; smallvec = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".smallvec."1.13.1" { inherit profileName; }).out; - thread_local = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thread_local."1.1.7" { inherit profileName; }).out; - tracing = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing."0.1.40" { inherit profileName; }).out; + thread_local = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".thread_local."1.1.8" { inherit profileName; }).out; tracing_core = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-core."0.1.32" { inherit profileName; }).out; tracing_log = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".tracing-log."0.2.0" { inherit profileName; }).out; }; @@ -5458,8 +5558,15 @@ in src = fetchCratesIo { inherit name version; sha256 = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a"; }; features = builtins.concatLists [ [ "default" ] + [ "js" ] + [ "rng" ] [ "std" ] + [ "v4" ] ]; + dependencies = { + getrandom = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".getrandom."0.2.12" { inherit profileName; }).out; + wasm_bindgen = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".wasm-bindgen."0.2.90" { inherit profileName; }).out; + }; }); "registry+https://github.com/rust-lang/crates.io-index".valuable."0.1.0" = overridableMkRustCrate (profileName: rec { @@ -5473,11 +5580,18 @@ in ]; }); - "registry+https://github.com/rust-lang/crates.io-index".value-bag."1.6.0" = overridableMkRustCrate (profileName: rec { + "registry+https://github.com/rust-lang/crates.io-index".value-bag."1.7.0" = overridableMkRustCrate (profileName: rec { name = "value-bag"; - version = "1.6.0"; + version = "1.7.0"; registry = "registry+https://github.com/rust-lang/crates.io-index"; - src = fetchCratesIo { inherit name version; sha256 = "7cdbaf5e132e593e9fc1de6a15bbec912395b11fb9719e061cf64f804524c503"; }; + src = fetchCratesIo { inherit name version; sha256 = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b"; }; + }); + + "registry+https://github.com/rust-lang/crates.io-index".vcpkg."0.2.15" = overridableMkRustCrate (profileName: rec { + name = "vcpkg"; + version = "0.2.15"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"; }; }); "registry+https://github.com/rust-lang/crates.io-index".version_check."0.9.4" = overridableMkRustCrate (profileName: rec { @@ -5630,12 +5744,27 @@ in registry = "registry+https://github.com/rust-lang/crates.io-index"; src = fetchCratesIo { inherit name version; sha256 = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed"; }; features = builtins.concatLists [ + [ "AbortController" ] + [ "AbortSignal" ] + [ "Blob" ] + [ "BlobPropertyBag" ] [ "Crypto" ] [ "Event" ] [ "EventTarget" ] + [ "File" ] + [ "FormData" ] + [ "Headers" ] [ "MessageEvent" ] + [ "ReadableStream" ] + [ "Request" ] + [ "RequestCredentials" ] + [ "RequestInit" ] + [ "RequestMode" ] + [ "Response" ] + [ "ServiceWorkerGlobalScope" ] [ "Window" ] [ "Worker" ] + [ "WorkerGlobalScope" ] ]; dependencies = { js_sys = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".js-sys."0.3.67" { inherit profileName; }).out; @@ -5742,11 +5871,15 @@ in [ "Win32_Storage_FileSystem" ] [ "Win32_System" ] [ "Win32_System_Console" ] + [ "Win32_System_Diagnostics" ] + [ "Win32_System_Diagnostics_Debug" ] [ "Win32_System_IO" ] [ "Win32_System_LibraryLoader" ] [ "Win32_System_Pipes" ] + [ "Win32_System_Registry" ] [ "Win32_System_SystemServices" ] [ "Win32_System_Threading" ] + [ "Win32_System_Time" ] [ "Win32_System_WindowsProgramming" ] [ "default" ] ]; @@ -5923,6 +6056,17 @@ in src = fetchCratesIo { inherit name version; sha256 = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"; }; }); + "registry+https://github.com/rust-lang/crates.io-index".winreg."0.52.0" = overridableMkRustCrate (profileName: rec { + name = "winreg"; + version = "0.52.0"; + registry = "registry+https://github.com/rust-lang/crates.io-index"; + src = fetchCratesIo { inherit name version; sha256 = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5"; }; + dependencies = { + cfg_if = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".cfg-if."1.0.0" { inherit profileName; }).out; + windows_sys = (rustPackages."registry+https://github.com/rust-lang/crates.io-index".windows-sys."0.48.0" { inherit profileName; }).out; + }; + }); + "registry+https://github.com/rust-lang/crates.io-index".wyz."0.2.0" = overridableMkRustCrate (profileName: rec { name = "wyz"; version = "0.2.0"; diff --git a/README.md b/README.md index b15330d..2778ffa 100644 --- a/README.md +++ b/README.md @@ -18,9 +18,9 @@ A resilient & standards-compliant open-source IMAP server with built-in encrypti ## Roadmap - - ✅ 0.1 Better emails parsing (july '23, see [eml-codec](https://git.deuxfleurs.fr/Deuxfleurs/eml-codec)). - - ✅ 0.2 Support of IMAP4. (~january '24). - - ⌛0.3 CalDAV support. (~february '24). + - ✅ 0.1 Better emails parsing. + - ✅ 0.2 Support of IMAP4.. + - ✅ 0.3 CalDAV support. - ⌛0.4 CardDAV support. - ⌛0.5 Public beta. -- cgit v1.2.3