aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorQuentin <quentin@dufour.io>2023-11-29 16:09:56 +0000
committerQuentin <quentin@dufour.io>2023-11-29 16:09:56 +0000
commitb04c2bfb0a5776ddcf8c0bab23dd1f800e3a0df3 (patch)
tree8b0eda30cba5b2c62810a2cd71b7a93095e20205 /src
parent14325395f6b059a317df738657edec47599c291f (diff)
parentb76b6dcbcc47ebc61848389a6b0d5d4e8d8cde48 (diff)
downloadtricot-b04c2bfb0a5776ddcf8c0bab23dd1f800e3a0df3.tar.gz
tricot-b04c2bfb0a5776ddcf8c0bab23dd1f800e3a0df3.zip
Merge pull request 'New directive `tricot-add-redirect <match-prefix> <redirect-prefix> [301|302|303|307]`' (#10) from redirect into main
Reviewed-on: https://git.deuxfleurs.fr/Deuxfleurs/tricot/pulls/10
Diffstat (limited to 'src')
-rw-r--r--src/cert_store.rs4
-rw-r--r--src/https.rs72
-rw-r--r--src/main.rs2
-rw-r--r--src/proxy_config.rs305
4 files changed, 290 insertions, 93 deletions
diff --git a/src/cert_store.rs b/src/cert_store.rs
index a8072a2..0ced178 100644
--- a/src/cert_store.rs
+++ b/src/cert_store.rs
@@ -78,7 +78,7 @@ impl CertStore {
let proxy_config: Arc<ProxyConfig> = rx_proxy_config.borrow().clone();
for ent in proxy_config.entries.iter() {
- if let HostDescription::Hostname(domain) = &ent.host {
+ if let HostDescription::Hostname(domain) = &ent.url_prefix.host {
if let Some((host, _port)) = domain.split_once(':') {
domains.insert(host.to_string());
} else {
@@ -121,7 +121,7 @@ impl CertStore {
.borrow()
.entries
.iter()
- .any(|ent| ent.host.matches(domain))
+ .any(|ent| ent.url_prefix.host.matches(domain))
{
bail!("Domain {} should not have a TLS certificate.", domain);
}
diff --git a/src/https.rs b/src/https.rs
index ed98ae1..9d92470 100644
--- a/src/https.rs
+++ b/src/https.rs
@@ -24,7 +24,7 @@ use tokio_util::io::{ReaderStream, StreamReader};
use opentelemetry::{metrics, KeyValue};
use crate::cert_store::{CertStore, StoreResolver};
-use crate::proxy_config::{ProxyConfig, ProxyEntry};
+use crate::proxy_config::{HostDescription, ProxyConfig, ProxyEntry};
use crate::reverse_proxy;
const MAX_CONNECTION_LIFETIME: Duration = Duration::from_secs(24 * 3600);
@@ -234,8 +234,9 @@ async fn select_target_and_proxy(
.iter()
.filter(|ent| {
ent.flags.healthy
- && ent.host.matches(host)
+ && ent.url_prefix.host.matches(host)
&& ent
+ .url_prefix
.path_prefix
.as_ref()
.map(|prefix| path.starts_with(prefix))
@@ -244,7 +245,8 @@ async fn select_target_and_proxy(
.max_by_key(|ent| {
(
ent.priority,
- ent.path_prefix
+ ent.url_prefix
+ .path_prefix
.as_ref()
.map(|x| x.len() as i32)
.unwrap_or(0),
@@ -270,15 +272,22 @@ async fn select_target_and_proxy(
);
proxy_to.calls_in_progress.fetch_add(1, Ordering::SeqCst);
+ // Forward to backend
debug!("{}{} -> {}", host, path, proxy_to);
trace!("Request: {:?}", req);
- let response = match do_proxy(https_config, remote_addr, req, proxy_to).await {
- Ok(resp) => resp,
- Err(e) => Response::builder()
- .status(StatusCode::BAD_GATEWAY)
- .body(Body::from(format!("Proxy error: {}", e)))
- .unwrap(),
+ let response = if let Some(http_res) = try_redirect(host, path, proxy_to) {
+ // redirection middleware
+ http_res
+ } else {
+ // proxying to backend
+ match do_proxy(https_config, remote_addr, req, proxy_to).await {
+ Ok(resp) => resp,
+ Err(e) => Response::builder()
+ .status(StatusCode::BAD_GATEWAY)
+ .body(Body::from(format!("Proxy error: {}", e)))
+ .unwrap(),
+ }
};
proxy_to.calls_in_progress.fetch_sub(1, Ordering::SeqCst);
@@ -300,6 +309,51 @@ async fn select_target_and_proxy(
}
}
+fn try_redirect(req_host: &str, req_path: &str, proxy_to: &ProxyEntry) -> Option<Response<Body>> {
+ let maybe_redirect = proxy_to.redirects.iter().find(|(src, _, _)| {
+ let mut matched: bool = src.host.matches(req_host);
+
+ if let Some(path) = &src.path_prefix {
+ matched &= req_path.starts_with(path);
+ }
+
+ matched
+ });
+
+ let (src_prefix, dst_prefix, code) = match maybe_redirect {
+ None => return None,
+ Some(redirect) => redirect,
+ };
+
+ let new_host = match &dst_prefix.host {
+ HostDescription::Hostname(h) => h,
+ _ => unreachable!(), // checked when ProxyEntry is created
+ };
+
+ let new_prefix = dst_prefix.path_prefix.as_deref().unwrap_or("");
+ let original_prefix = src_prefix.path_prefix.as_deref().unwrap_or("");
+ let suffix = &req_path[original_prefix.len()..];
+
+ let uri = format!("https://{}{}{}", new_host, new_prefix, suffix);
+
+ let status = match StatusCode::from_u16(*code) {
+ Err(e) => {
+ warn!(
+ "Couldn't redirect {}{} to {} as code {} in invalid: {}",
+ req_host, req_path, uri, code, e
+ );
+ return None;
+ }
+ Ok(sc) => sc,
+ };
+
+ Response::builder()
+ .header("Location", uri.clone())
+ .status(status)
+ .body(Body::from(uri))
+ .ok()
+}
+
async fn do_proxy(
https_config: &HttpsConfig,
remote_addr: SocketAddr,
diff --git a/src/main.rs b/src/main.rs
index 2e08495..93abcca 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -239,7 +239,7 @@ async fn dump_config_on_change(
let mut cfg_map = BTreeMap::<_, Vec<_>>::new();
for ent in cfg.entries.iter() {
cfg_map
- .entry((&ent.host, &ent.path_prefix))
+ .entry((&ent.url_prefix.host, &ent.url_prefix.path_prefix))
.or_default()
.push(ent);
}
diff --git a/src/proxy_config.rs b/src/proxy_config.rs
index dab4d98..8381de2 100644
--- a/src/proxy_config.rs
+++ b/src/proxy_config.rs
@@ -13,7 +13,7 @@ use crate::consul;
// ---- Extract proxy config from Consul catalog ----
-#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
+#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum HostDescription {
Hostname(String),
Pattern(glob::Pattern),
@@ -45,12 +45,48 @@ impl std::fmt::Display for HostDescription {
}
}
-#[derive(Debug)]
-pub struct ProxyEntry {
+#[derive(Debug, Clone)]
+pub struct UrlPrefix {
/// Publicly exposed TLS hostnames for matching this rule
pub host: HostDescription,
+
/// Path prefix for matching this rule
pub path_prefix: Option<String>,
+}
+
+impl PartialEq for UrlPrefix {
+ fn eq(&self, other: &Self) -> bool {
+ self.host == other.host && self.path_prefix == other.path_prefix
+ }
+}
+impl Eq for UrlPrefix {}
+
+impl UrlPrefix {
+ fn new(raw_prefix: &str) -> Option<Self> {
+ let (raw_host, path_prefix) = match raw_prefix.find('/') {
+ Some(i) => {
+ let (host, pp) = raw_prefix.split_at(i);
+ (host, Some(pp.to_string()))
+ }
+ None => (raw_prefix, None),
+ };
+
+ let host = match HostDescription::new(raw_host) {
+ Ok(h) => h,
+ Err(e) => {
+ warn!("Invalid hostname pattern {}: {}", raw_host, e);
+ return None;
+ }
+ };
+
+ Some(Self { host, path_prefix })
+ }
+}
+
+#[derive(Debug)]
+pub struct ProxyEntry {
+ /// An Url prefix is made of a host and maybe a path prefix
+ pub url_prefix: UrlPrefix,
/// Priority with which this rule is considered (highest first)
pub priority: u32,
@@ -68,6 +104,10 @@ pub struct ProxyEntry {
/// when matching this rule
pub add_headers: Vec<(String, String)>,
+ /// Try to match all these redirection before forwarding to the backend
+ /// when matching this rule
+ pub redirects: Vec<(UrlPrefix, UrlPrefix, u16)>,
+
/// Number of calls in progress, used to deprioritize slow back-ends
pub calls_in_progress: atomic::AtomicI64,
/// Time of last call, used for round-robin selection
@@ -76,8 +116,7 @@ pub struct ProxyEntry {
impl PartialEq for ProxyEntry {
fn eq(&self, other: &Self) -> bool {
- self.host == other.host
- && self.path_prefix == other.path_prefix
+ self.url_prefix == other.url_prefix
&& self.priority == other.priority
&& self.service_name == other.service_name
&& self.target_addr == other.target_addr
@@ -88,6 +127,52 @@ impl PartialEq for ProxyEntry {
}
impl Eq for ProxyEntry {}
+impl ProxyEntry {
+ fn new(
+ service_name: String,
+ frontend: MatchTag,
+ target_addr: SocketAddr,
+ middleware: &[ConfigTag],
+ flags: ProxyEntryFlags,
+ ) -> Self {
+ let (url_prefix, priority, https_target) = match frontend {
+ MatchTag::Http(u, p) => (u, p, false),
+ MatchTag::HttpWithTls(u, p) => (u, p, true),
+ };
+
+ let mut add_headers = vec![];
+ let mut redirects = vec![];
+ for mid in middleware.into_iter() {
+ match mid {
+ ConfigTag::AddHeader(k, v) => add_headers.push((k.to_string(), v.clone())),
+ ConfigTag::AddRedirect(m, r, c) => redirects.push(((*m).clone(), (*r).clone(), *c)),
+ ConfigTag::LocalLb | ConfigTag::GlobalLb => {
+ /* handled in parent fx */
+ ()
+ }
+ };
+ }
+
+ ProxyEntry {
+ // id
+ service_name,
+ // frontend
+ url_prefix,
+ priority,
+ // backend
+ target_addr,
+ https_target,
+ // middleware
+ flags,
+ add_headers,
+ redirects,
+ // internal
+ last_call: atomic::AtomicI64::from(0),
+ calls_in_progress: atomic::AtomicI64::from(0),
+ }
+ }
+}
+
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct ProxyEntryFlags {
/// Is the target healthy?
@@ -115,8 +200,8 @@ impl std::fmt::Display for ProxyEntry {
write!(
f,
"{}{} {}",
- self.host,
- self.path_prefix.as_deref().unwrap_or_default(),
+ self.url_prefix.host,
+ self.url_prefix.path_prefix.as_deref().unwrap_or_default(),
self.priority
)?;
if !self.flags.healthy {
@@ -144,62 +229,105 @@ pub struct ProxyConfig {
pub entries: Vec<ProxyEntry>,
}
-fn parse_tricot_tag(
- service_name: String,
- tag: &str,
- target_addr: SocketAddr,
- add_headers: &[(String, String)],
- flags: ProxyEntryFlags,
-) -> Option<ProxyEntry> {
- let splits = tag.split(' ').collect::<Vec<_>>();
- if (splits.len() != 2 && splits.len() != 3)
- || (splits[0] != "tricot" && splits[0] != "tricot-https")
- {
- return None;
- }
+#[derive(Debug)]
+enum ParsedTag<'a> {
+ Frontend(MatchTag),
+ Middleware(ConfigTag<'a>),
+}
+
+#[derive(Debug)]
+enum MatchTag {
+ /// HTTP backend (plain text)
+ Http(UrlPrefix, u32),
+ /// HTTPS backend (TLS encrypted)
+ HttpWithTls(UrlPrefix, u32),
+}
+
+#[derive(Debug)]
+enum ConfigTag<'a> {
+ AddHeader(&'a str, String),
+ AddRedirect(UrlPrefix, UrlPrefix, u16),
+ GlobalLb,
+ LocalLb,
+}
- let (host, path_prefix) = match splits[1].find('/') {
- Some(i) => {
- let (host, pp) = splits[1].split_at(i);
- (host, Some(pp.to_string()))
+fn parse_tricot_tags(tag: &str) -> Option<ParsedTag> {
+ let splits = tag.splitn(4, ' ').collect::<Vec<_>>();
+ let parsed_tag = match splits.as_slice() {
+ ["tricot", raw_prefix, maybe_priority @ ..] => {
+ // priority is set to 100 when value is invalid or missing
+ let priority: u32 = maybe_priority
+ .iter()
+ .next()
+ .map_or(Ok(100), |x| x.parse::<u32>())
+ .unwrap_or(100);
+ UrlPrefix::new(raw_prefix)
+ .map(|prefix| ParsedTag::Frontend(MatchTag::Http(prefix, priority)))
}
- None => (splits[1], None),
- };
+ ["tricot-https", raw_prefix, maybe_priority @ ..] => {
+ // priority is set to 100 when value is invalid or missing
+ let priority: u32 = maybe_priority
+ .iter()
+ .next()
+ .map_or(Ok(100), |x| x.parse::<u32>())
+ .unwrap_or(100);
+ UrlPrefix::new(raw_prefix)
+ .map(|prefix| ParsedTag::Frontend(MatchTag::HttpWithTls(prefix, priority)))
+ }
+ ["tricot-add-header", header_key, header_values @ ..] => Some(ParsedTag::Middleware(
+ ConfigTag::AddHeader(header_key, header_values.join(" ")),
+ )),
+ ["tricot-add-redirect", raw_match, raw_replace, maybe_raw_code @ ..] => {
+ let (p_match, p_replace) =
+ match (UrlPrefix::new(raw_match), UrlPrefix::new(raw_replace)) {
+ (Some(m), Some(r)) => (m, r),
+ _ => {
+ debug!(
+ "tag {} is ignored, one of the url prefix can't be parsed",
+ tag
+ );
+ return None;
+ }
+ };
- let priority = match splits.len() {
- 3 => splits[2].parse().ok()?,
- _ => 100,
- };
+ if matches!(p_replace.host, HostDescription::Pattern(_)) {
+ debug!(
+ "tag {} ignored as redirect to a glob pattern is not supported",
+ tag
+ );
+ return None;
+ }
- let host = match HostDescription::new(host) {
- Ok(h) => h,
- Err(e) => {
- warn!("Invalid hostname pattern {}: {}", host, e);
- return None;
+ let maybe_parsed_code = maybe_raw_code
+ .iter()
+ .next()
+ .map(|c| c.parse::<u16>().ok())
+ .flatten();
+ let http_code = match maybe_parsed_code {
+ Some(301) => 301,
+ Some(302) => 302,
+ Some(303) => 303,
+ Some(307) => 307,
+ _ => {
+ debug!(
+ "tag {} has a missing or invalid http code, setting it to 302",
+ tag
+ );
+ 302
+ }
+ };
+
+ Some(ParsedTag::Middleware(ConfigTag::AddRedirect(
+ p_match, p_replace, http_code,
+ )))
}
+ ["tricot-global-lb", ..] => Some(ParsedTag::Middleware(ConfigTag::GlobalLb)),
+ ["tricot-local-lb", ..] => Some(ParsedTag::Middleware(ConfigTag::LocalLb)),
+ _ => None,
};
- Some(ProxyEntry {
- service_name,
- target_addr,
- https_target: (splits[0] == "tricot-https"),
- host,
- flags,
- path_prefix,
- priority,
- add_headers: add_headers.to_vec(),
- last_call: atomic::AtomicI64::from(0),
- calls_in_progress: atomic::AtomicI64::from(0),
- })
-}
-
-fn parse_tricot_add_header_tag(tag: &str) -> Option<(String, String)> {
- let splits = tag.splitn(3, ' ').collect::<Vec<_>>();
- if splits.len() == 3 && splits[0] == "tricot-add-header" {
- Some((splits[1].to_string(), splits[2].to_string()))
- } else {
- None
- }
+ trace!("tag {} parsed as {:?}", tag, parsed_tag);
+ parsed_tag
}
fn parse_consul_service(
@@ -208,8 +336,6 @@ fn parse_consul_service(
) -> Vec<ProxyEntry> {
trace!("Parsing service: {:#?}", s);
- let mut entries = vec![];
-
let ip_addr = match s.service.address.parse() {
Ok(ip) => ip,
_ => match s.node.address.parse() {
@@ -225,31 +351,48 @@ fn parse_consul_service(
};
let addr = SocketAddr::new(ip_addr, s.service.port);
- if s.service.tags.contains(&"tricot-global-lb".into()) {
- flags.global_lb = true;
- } else if s.service.tags.contains(&"tricot-site-lb".into()) {
- flags.site_lb = true;
- };
-
- let mut add_headers = vec![];
+ // tag parsing
+ let mut collected_middleware = vec![];
+ let mut collected_frontends = vec![];
for tag in s.service.tags.iter() {
- if let Some(pair) = parse_tricot_add_header_tag(tag) {
- add_headers.push(pair);
+ match parse_tricot_tags(tag) {
+ Some(ParsedTag::Frontend(x)) => collected_frontends.push(x),
+ Some(ParsedTag::Middleware(y)) => collected_middleware.push(y),
+ _ => trace!(
+ "service {}: tag '{}' could not be parsed",
+ s.service.service,
+ tag
+ ),
}
}
- for tag in s.service.tags.iter() {
- if let Some(ent) = parse_tricot_tag(
- s.service.service.clone(),
- tag,
- addr,
- &add_headers[..],
- flags,
- ) {
- entries.push(ent);
- }
+ // some legacy processing that would need a refactor later
+ for mid in collected_middleware.iter() {
+ match mid {
+ ConfigTag::AddHeader(_, _) | ConfigTag::AddRedirect(_, _, _) =>
+ /* not handled here */
+ {
+ ()
+ }
+ ConfigTag::GlobalLb => flags.global_lb = true,
+ ConfigTag::LocalLb => flags.site_lb = true,
+ };
}
+ // build proxy entries
+ let entries = collected_frontends
+ .into_iter()
+ .map(|frt| {
+ ProxyEntry::new(
+ s.service.service.clone(),
+ frt,
+ addr,
+ collected_middleware.as_ref(),
+ flags,
+ )
+ })
+ .collect::<Vec<_>>();
+
trace!("Result of parsing service:");
for ent in entries.iter() {
trace!(" {}", ent);
@@ -347,8 +490,8 @@ impl ProxyConfigMetrics {
let mut patterns = HashMap::new();
for ent in rx.borrow().entries.iter() {
let attrs = (
- ent.host.to_string(),
- ent.path_prefix.clone().unwrap_or_default(),
+ ent.url_prefix.host.to_string(),
+ ent.url_prefix.path_prefix.clone().unwrap_or_default(),
ent.service_name.clone(),
);
*patterns.entry(attrs).or_default() += 1;
@@ -378,8 +521,8 @@ mod tests {
#[test]
fn test_parse_tricot_add_header_tag() {
- match parse_tricot_add_header_tag("tricot-add-header Content-Security-Policy default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'") {
- Some((name, value)) => {
+ match parse_tricot_tags("tricot-add-header Content-Security-Policy default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'") {
+ Some(ParsedTag::Middleware(ConfigTag::AddHeader(name, value))) => {
assert_eq!(name, "Content-Security-Policy");
assert_eq!(value, "default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'");
}