diff options
author | Alex Auvolat <alex@adnab.me> | 2023-01-26 16:20:41 +0100 |
---|---|---|
committer | Alex Auvolat <alex@adnab.me> | 2023-01-26 16:20:41 +0100 |
commit | 94d559ae00bdb899c4463667a9d950b27e5bb23c (patch) | |
tree | a734a4d05cbedfaaff4f3bf63a0b659499b797bb /src | |
parent | c7d0ad0aa0e492b913c5dda8ff1a7ad5a579fb3a (diff) | |
parent | 5fb383fe4c248181e27df12a57849886e50bacb7 (diff) | |
download | garage-94d559ae00bdb899c4463667a9d950b27e5bb23c.tar.gz garage-94d559ae00bdb899c4463667a9d950b27e5bb23c.zip |
Merge branch 'main' into report-disk-usage
Diffstat (limited to 'src')
-rw-r--r-- | src/api/Cargo.toml | 12 | ||||
-rw-r--r-- | src/api/k2v/batch.rs | 11 | ||||
-rw-r--r-- | src/api/k2v/item.rs | 3 | ||||
-rw-r--r-- | src/api/s3/list.rs | 11 | ||||
-rw-r--r-- | src/api/s3/post_object.rs | 5 | ||||
-rw-r--r-- | src/api/s3/put.rs | 3 | ||||
-rw-r--r-- | src/block/Cargo.toml | 4 | ||||
-rw-r--r-- | src/db/Cargo.toml | 8 | ||||
-rw-r--r-- | src/garage/Cargo.toml | 8 | ||||
-rw-r--r-- | src/garage/tests/k2v/batch.rs | 103 | ||||
-rw-r--r-- | src/garage/tests/k2v/item.rs | 37 | ||||
-rw-r--r-- | src/k2v-client/Cargo.toml | 14 | ||||
-rw-r--r-- | src/model/Cargo.toml | 8 | ||||
-rw-r--r-- | src/model/k2v/causality.rs | 7 | ||||
-rw-r--r-- | src/model/k2v/item_table.rs | 4 | ||||
-rw-r--r-- | src/rpc/Cargo.toml | 2 | ||||
-rw-r--r-- | src/table/Cargo.toml | 2 | ||||
-rw-r--r-- | src/util/Cargo.toml | 8 | ||||
-rw-r--r-- | src/util/data.rs | 4 | ||||
-rw-r--r-- | src/util/time.rs | 2 | ||||
-rw-r--r-- | src/web/Cargo.toml | 2 |
21 files changed, 143 insertions, 115 deletions
diff --git a/src/api/Cargo.toml b/src/api/Cargo.toml index dba0bbef..24c48604 100644 --- a/src/api/Cargo.toml +++ b/src/api/Cargo.toml @@ -21,28 +21,28 @@ garage_util = { version = "0.8.1", path = "../util" } garage_rpc = { version = "0.8.1", path = "../rpc" } async-trait = "0.1.7" -base64 = "0.13" +base64 = "0.21" bytes = "1.0" chrono = "0.4" crypto-common = "0.1" err-derive = "0.3" hex = "0.4" hmac = "0.12" -idna = "0.2" -tracing = "0.1.30" +idna = "0.3" +tracing = "0.1" md-5 = "0.10" nom = "7.1" sha2 = "0.10" futures = "0.3" futures-util = "0.3" -pin-project = "1.0.11" +pin-project = "1.0.12" tokio = { version = "1.0", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } tokio-stream = "0.1" form_urlencoded = "1.0.0" http = "0.2" -httpdate = "0.3" +httpdate = "1.0" http-range = "0.1" hyper = { version = "0.14", features = ["server", "http1", "runtime", "tcp", "stream"] } multer = "2.0" @@ -52,7 +52,7 @@ serde = { version = "1.0", features = ["derive"] } serde_bytes = "0.11" serde_json = "1.0" quick-xml = { version = "0.21", features = [ "serialize" ] } -url = "2.1" +url = "2.3" opentelemetry = "0.17" opentelemetry-prometheus = { version = "0.10", optional = true } diff --git a/src/api/k2v/batch.rs b/src/api/k2v/batch.rs index 78035362..82b4f7e3 100644 --- a/src/api/k2v/batch.rs +++ b/src/api/k2v/batch.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use base64::prelude::*; use hyper::{Body, Request, Response, StatusCode}; use serde::{Deserialize, Serialize}; @@ -31,9 +32,11 @@ pub async fn handle_insert_batch( .transpose() .ok_or_bad_request("Invalid causality token")?; let v = match it.v { - Some(vs) => { - DvvsValue::Value(base64::decode(vs).ok_or_bad_request("Invalid base64 value")?) - } + Some(vs) => DvvsValue::Value( + BASE64_STANDARD + .decode(vs) + .ok_or_bad_request("Invalid base64 value")?, + ), None => DvvsValue::Deleted, }; items2.push((it.pk, it.sk, ct, v)); @@ -322,7 +325,7 @@ impl ReadBatchResponseItem { .values() .iter() .map(|v| match v { - DvvsValue::Value(x) => Some(base64::encode(x)), + DvvsValue::Value(x) => Some(BASE64_STANDARD.encode(x)), DvvsValue::Deleted => None, }) .collect::<Vec<_>>(); diff --git a/src/api/k2v/item.rs b/src/api/k2v/item.rs index f85138c7..041382c0 100644 --- a/src/api/k2v/item.rs +++ b/src/api/k2v/item.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use base64::prelude::*; use http::header; use hyper::{Body, Request, Response, StatusCode}; @@ -81,7 +82,7 @@ impl ReturnFormat { .iter() .map(|v| match v { DvvsValue::Deleted => serde_json::Value::Null, - DvvsValue::Value(v) => serde_json::Value::String(base64::encode(v)), + DvvsValue::Value(v) => serde_json::Value::String(BASE64_STANDARD.encode(v)), }) .collect::<Vec<_>>(); let json_body = diff --git a/src/api/s3/list.rs b/src/api/s3/list.rs index e5f486c8..5cb0d65a 100644 --- a/src/api/s3/list.rs +++ b/src/api/s3/list.rs @@ -3,6 +3,7 @@ use std::collections::{BTreeMap, BTreeSet}; use std::iter::{Iterator, Peekable}; use std::sync::Arc; +use base64::prelude::*; use hyper::{Body, Response}; use garage_util::data::*; @@ -129,11 +130,11 @@ pub async fn handle_list( next_continuation_token: match (query.is_v2, &pagination) { (true, Some(RangeBegin::AfterKey { key })) => Some(s3_xml::Value(format!( "]{}", - base64::encode(key.as_bytes()) + BASE64_STANDARD.encode(key.as_bytes()) ))), (true, Some(RangeBegin::IncludingKey { key, .. })) => Some(s3_xml::Value(format!( "[{}", - base64::encode(key.as_bytes()) + BASE64_STANDARD.encode(key.as_bytes()) ))), _ => None, }, @@ -583,14 +584,16 @@ impl ListObjectsQuery { (Some(token), _) => match &token[..1] { "[" => Ok(RangeBegin::IncludingKey { key: String::from_utf8( - base64::decode(token[1..].as_bytes()) + BASE64_STANDARD + .decode(token[1..].as_bytes()) .ok_or_bad_request("Invalid continuation token")?, )?, fallback_key: None, }), "]" => Ok(RangeBegin::AfterKey { key: String::from_utf8( - base64::decode(token[1..].as_bytes()) + BASE64_STANDARD + .decode(token[1..].as_bytes()) .ok_or_bad_request("Invalid continuation token")?, )?, }), diff --git a/src/api/s3/post_object.rs b/src/api/s3/post_object.rs index d063faa4..da542526 100644 --- a/src/api/s3/post_object.rs +++ b/src/api/s3/post_object.rs @@ -4,6 +4,7 @@ use std::ops::RangeInclusive; use std::sync::Arc; use std::task::{Context, Poll}; +use base64::prelude::*; use bytes::Bytes; use chrono::{DateTime, Duration, Utc}; use futures::{Stream, StreamExt}; @@ -138,7 +139,9 @@ pub async fn handle_post_object( .get_existing_bucket(bucket_id) .await?; - let decoded_policy = base64::decode(&policy).ok_or_bad_request("Invalid policy")?; + let decoded_policy = BASE64_STANDARD + .decode(&policy) + .ok_or_bad_request("Invalid policy")?; let decoded_policy: Policy = serde_json::from_slice(&decoded_policy).ok_or_bad_request("Invalid policy")?; diff --git a/src/api/s3/put.rs b/src/api/s3/put.rs index c08fe40a..350ab884 100644 --- a/src/api/s3/put.rs +++ b/src/api/s3/put.rs @@ -1,6 +1,7 @@ use std::collections::{BTreeMap, BTreeSet, HashMap}; use std::sync::Arc; +use base64::prelude::*; use futures::prelude::*; use hyper::body::{Body, Bytes}; use hyper::header::{HeaderMap, HeaderValue}; @@ -207,7 +208,7 @@ fn ensure_checksum_matches( } } if let Some(expected_md5) = content_md5 { - if expected_md5.trim_matches('"') != base64::encode(data_md5sum) { + if expected_md5.trim_matches('"') != BASE64_STANDARD.encode(data_md5sum) { return Err(Error::bad_request("Unable to validate content-md5")); } else { trace!("Successfully validated content-md5"); diff --git a/src/block/Cargo.toml b/src/block/Cargo.toml index 1e4eb64e..a7e8bc2c 100644 --- a/src/block/Cargo.toml +++ b/src/block/Cargo.toml @@ -25,11 +25,11 @@ arc-swap = "1.5" async-trait = "0.1.7" bytes = "1.0" hex = "0.4" -tracing = "0.1.30" +tracing = "0.1" rand = "0.8" async-compression = { version = "0.3", features = ["tokio", "zstd"] } -zstd = { version = "0.9", default-features = false } +zstd = { version = "0.12", default-features = false } serde = { version = "1.0", default-features = false, features = ["derive", "rc"] } serde_bytes = "0.11" diff --git a/src/db/Cargo.toml b/src/db/Cargo.toml index c479d9d1..95bde6d5 100644 --- a/src/db/Cargo.toml +++ b/src/db/Cargo.toml @@ -19,18 +19,18 @@ required-features = ["cli"] [dependencies] err-derive = "0.3" hexdump = "0.1" -tracing = "0.1.30" +tracing = "0.1" heed = { version = "0.11", default-features = false, features = ["lmdb"], optional = true } -rusqlite = { version = "0.27", optional = true } +rusqlite = { version = "0.28", optional = true } sled = { version = "0.34", optional = true } # cli deps -clap = { version = "3.1.18", optional = true, features = ["derive", "env"] } +clap = { version = "4.1", optional = true, features = ["derive", "env"] } pretty_env_logger = { version = "0.4", optional = true } [dev-dependencies] -mktemp = "0.4" +mktemp = "0.5" [features] default = [ "sled" ] diff --git a/src/garage/Cargo.toml b/src/garage/Cargo.toml index b43b0242..f938f356 100644 --- a/src/garage/Cargo.toml +++ b/src/garage/Cargo.toml @@ -33,10 +33,10 @@ garage_web = { version = "0.8.1", path = "../web" } backtrace = "0.3" bytes = "1.0" bytesize = "1.1" -timeago = "0.3" +timeago = "0.4" parse_duration = "2.1" hex = "0.4" -tracing = { version = "0.1.30" } +tracing = { version = "0.1" } tracing-subscriber = { version = "0.3", features = ["env-filter"] } rand = "0.8" async-trait = "0.1.7" @@ -45,7 +45,7 @@ sodiumoxide = { version = "0.2.5-0", package = "kuska-sodiumoxide" } serde = { version = "1.0", default-features = false, features = ["derive", "rc"] } serde_bytes = "0.11" structopt = { version = "0.3", default-features = false } -toml = "0.5" +toml = "0.6" futures = "0.3" futures-util = "0.3" @@ -69,7 +69,7 @@ sha2 = "0.10" static_init = "1.0" assert-json-diff = "2.0" serde_json = "1.0" -base64 = "0.13" +base64 = "0.21" [features] diff --git a/src/garage/tests/k2v/batch.rs b/src/garage/tests/k2v/batch.rs index 6abba1c5..595d0ba8 100644 --- a/src/garage/tests/k2v/batch.rs +++ b/src/garage/tests/k2v/batch.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use crate::common; use assert_json_diff::assert_json_eq; +use base64::prelude::*; use serde_json::json; use super::json_body; @@ -36,12 +37,12 @@ async fn test_batch() { {{"pk": "root", "sk": "d.2", "ct": null, "v": "{}"}}, {{"pk": "root", "sk": "e", "ct": null, "v": "{}"}} ]"#, - base64::encode(values.get(&"a").unwrap()), - base64::encode(values.get(&"b").unwrap()), - base64::encode(values.get(&"c").unwrap()), - base64::encode(values.get(&"d.1").unwrap()), - base64::encode(values.get(&"d.2").unwrap()), - base64::encode(values.get(&"e").unwrap()), + BASE64_STANDARD.encode(values.get(&"a").unwrap()), + BASE64_STANDARD.encode(values.get(&"b").unwrap()), + BASE64_STANDARD.encode(values.get(&"c").unwrap()), + BASE64_STANDARD.encode(values.get(&"d.1").unwrap()), + BASE64_STANDARD.encode(values.get(&"d.2").unwrap()), + BASE64_STANDARD.encode(values.get(&"e").unwrap()), ) .into_bytes(), ) @@ -120,12 +121,12 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "a", "ct": ct.get("a").unwrap(), "v": [base64::encode(values.get("a").unwrap())]}, - {"sk": "b", "ct": ct.get("b").unwrap(), "v": [base64::encode(values.get("b").unwrap())]}, - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap())]}, - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap())]}, - {"sk": "e", "ct": ct.get("e").unwrap(), "v": [base64::encode(values.get("e").unwrap())]} + {"sk": "a", "ct": ct.get("a").unwrap(), "v": [BASE64_STANDARD.encode(values.get("a").unwrap())]}, + {"sk": "b", "ct": ct.get("b").unwrap(), "v": [BASE64_STANDARD.encode(values.get("b").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap())]}, + {"sk": "e", "ct": ct.get("e").unwrap(), "v": [BASE64_STANDARD.encode(values.get("e").unwrap())]} ], "more": false, "nextStart": null, @@ -141,10 +142,10 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap())]}, - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap())]}, - {"sk": "e", "ct": ct.get("e").unwrap(), "v": [base64::encode(values.get("e").unwrap())]} + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap())]}, + {"sk": "e", "ct": ct.get("e").unwrap(), "v": [BASE64_STANDARD.encode(values.get("e").unwrap())]} ], "more": false, "nextStart": null, @@ -160,9 +161,9 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap())]}, - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap())]}, ], "more": false, "nextStart": null, @@ -178,8 +179,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap())]}, - {"sk": "b", "ct": ct.get("b").unwrap(), "v": [base64::encode(values.get("b").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap())]}, + {"sk": "b", "ct": ct.get("b").unwrap(), "v": [BASE64_STANDARD.encode(values.get("b").unwrap())]}, ], "more": false, "nextStart": null, @@ -195,8 +196,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap())]}, - {"sk": "b", "ct": ct.get("b").unwrap(), "v": [base64::encode(values.get("b").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap())]}, + {"sk": "b", "ct": ct.get("b").unwrap(), "v": [BASE64_STANDARD.encode(values.get("b").unwrap())]}, ], "more": false, "nextStart": null, @@ -212,7 +213,7 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "a", "ct": ct.get("a").unwrap(), "v": [base64::encode(values.get("a").unwrap())]} + {"sk": "a", "ct": ct.get("a").unwrap(), "v": [BASE64_STANDARD.encode(values.get("a").unwrap())]} ], "more": true, "nextStart": "b", @@ -228,8 +229,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap())]} + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap())]} ], "more": false, "nextStart": null, @@ -255,10 +256,10 @@ async fn test_batch() { {{"pk": "root", "sk": "d.2", "ct": null, "v": "{}"}} ]"#, ct.get(&"b").unwrap(), - base64::encode(values.get(&"c'").unwrap()), + BASE64_STANDARD.encode(values.get(&"c'").unwrap()), ct.get(&"d.1").unwrap(), - base64::encode(values.get(&"d.1'").unwrap()), - base64::encode(values.get(&"d.2'").unwrap()), + BASE64_STANDARD.encode(values.get(&"d.1'").unwrap()), + BASE64_STANDARD.encode(values.get(&"d.2'").unwrap()), ) .into_bytes(), ) @@ -333,11 +334,11 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "a", "ct": ct.get("a").unwrap(), "v": [base64::encode(values.get("a").unwrap())]}, - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap()), base64::encode(values.get("c'").unwrap())]}, - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap()), base64::encode(values.get("d.2'").unwrap())]}, - {"sk": "e", "ct": ct.get("e").unwrap(), "v": [base64::encode(values.get("e").unwrap())]} + {"sk": "a", "ct": ct.get("a").unwrap(), "v": [BASE64_STANDARD.encode(values.get("a").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap()), BASE64_STANDARD.encode(values.get("c'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap()), BASE64_STANDARD.encode(values.get("d.2'").unwrap())]}, + {"sk": "e", "ct": ct.get("e").unwrap(), "v": [BASE64_STANDARD.encode(values.get("e").unwrap())]} ], "more": false, "nextStart": null, @@ -353,8 +354,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap()), base64::encode(values.get("d.2'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap()), BASE64_STANDARD.encode(values.get("d.2'").unwrap())]}, ], "more": false, "nextStart": null, @@ -370,7 +371,7 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, ], "more": false, "nextStart": null, @@ -386,7 +387,7 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, ], "more": true, "nextStart": "d.2", @@ -402,7 +403,7 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap()), base64::encode(values.get("d.2'").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap()), BASE64_STANDARD.encode(values.get("d.2'").unwrap())]}, ], "more": false, "nextStart": null, @@ -418,8 +419,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap()), base64::encode(values.get("d.2'").unwrap())]}, - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap()), BASE64_STANDARD.encode(values.get("d.2'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, ], "more": false, "nextStart": null, @@ -435,8 +436,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap()), base64::encode(values.get("d.2'").unwrap())]}, - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap()), BASE64_STANDARD.encode(values.get("d.2'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, ], "more": false, "nextStart": null, @@ -452,8 +453,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [base64::encode(values.get("d.1'").unwrap())]}, - {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [base64::encode(values.get("d.2").unwrap()), base64::encode(values.get("d.2'").unwrap())]}, + {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.1'").unwrap())]}, + {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [BASE64_STANDARD.encode(values.get("d.2").unwrap()), BASE64_STANDARD.encode(values.get("d.2'").unwrap())]}, ], "more": false, "nextStart": null, @@ -563,8 +564,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap()), base64::encode(values.get("c'").unwrap())]}, - {"sk": "e", "ct": ct.get("e").unwrap(), "v": [base64::encode(values.get("e").unwrap())]} + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap()), BASE64_STANDARD.encode(values.get("c'").unwrap())]}, + {"sk": "e", "ct": ct.get("e").unwrap(), "v": [BASE64_STANDARD.encode(values.get("e").unwrap())]} ], "more": false, "nextStart": null, @@ -580,8 +581,8 @@ async fn test_batch() { "tombstones": false, "singleItem": false, "items": [ - {"sk": "e", "ct": ct.get("e").unwrap(), "v": [base64::encode(values.get("e").unwrap())]}, - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap()), base64::encode(values.get("c'").unwrap())]}, + {"sk": "e", "ct": ct.get("e").unwrap(), "v": [BASE64_STANDARD.encode(values.get("e").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap()), BASE64_STANDARD.encode(values.get("c'").unwrap())]}, ], "more": false, "nextStart": null, @@ -599,10 +600,10 @@ async fn test_batch() { "items": [ {"sk": "a", "ct": ct.get("a").unwrap(), "v": [null]}, {"sk": "b", "ct": ct.get("b").unwrap(), "v": [null]}, - {"sk": "c", "ct": ct.get("c").unwrap(), "v": [base64::encode(values.get("c").unwrap()), base64::encode(values.get("c'").unwrap())]}, + {"sk": "c", "ct": ct.get("c").unwrap(), "v": [BASE64_STANDARD.encode(values.get("c").unwrap()), BASE64_STANDARD.encode(values.get("c'").unwrap())]}, {"sk": "d.1", "ct": ct.get("d.1").unwrap(), "v": [null]}, {"sk": "d.2", "ct": ct.get("d.2").unwrap(), "v": [null]}, - {"sk": "e", "ct": ct.get("e").unwrap(), "v": [base64::encode(values.get("e").unwrap())]}, + {"sk": "e", "ct": ct.get("e").unwrap(), "v": [BASE64_STANDARD.encode(values.get("e").unwrap())]}, ], "more": false, "nextStart": null, diff --git a/src/garage/tests/k2v/item.rs b/src/garage/tests/k2v/item.rs index 2641386f..588836c7 100644 --- a/src/garage/tests/k2v/item.rs +++ b/src/garage/tests/k2v/item.rs @@ -3,6 +3,7 @@ use std::time::Duration; use crate::common; use assert_json_diff::assert_json_eq; +use base64::prelude::*; use serde_json::json; use super::json_body; @@ -222,7 +223,10 @@ async fn test_items_and_indices() { let res_json = json_body(res).await; assert_json_eq!( res_json, - [base64::encode(&content2), base64::encode(&content3)] + [ + BASE64_STANDARD.encode(&content2), + BASE64_STANDARD.encode(&content3) + ] ); // ReadIndex -- now there should be some stuff @@ -411,7 +415,7 @@ async fn test_item_return_format() { "application/json" ); let res_body = json_body(res).await; - assert_json_eq!(res_body, json!([base64::encode(&single_value)])); + assert_json_eq!(res_body, json!([BASE64_STANDARD.encode(&single_value)])); // f2: binary let res = ctx @@ -452,7 +456,7 @@ async fn test_item_return_format() { "application/json" ); let res_body = json_body(res).await; - assert_json_eq!(res_body, json!([base64::encode(&single_value)])); + assert_json_eq!(res_body, json!([BASE64_STANDARD.encode(&single_value)])); // -- Test with a second, concurrent value -- let res = ctx @@ -488,8 +492,8 @@ async fn test_item_return_format() { assert_json_eq!( res_body, json!([ - base64::encode(&single_value), - base64::encode(&concurrent_value) + BASE64_STANDARD.encode(&single_value), + BASE64_STANDARD.encode(&concurrent_value) ]) ); @@ -512,8 +516,8 @@ async fn test_item_return_format() { assert_json_eq!( res_body, json!([ - base64::encode(&single_value), - base64::encode(&concurrent_value) + BASE64_STANDARD.encode(&single_value), + BASE64_STANDARD.encode(&concurrent_value) ]) ); @@ -550,8 +554,8 @@ async fn test_item_return_format() { assert_json_eq!( res_body, json!([ - base64::encode(&single_value), - base64::encode(&concurrent_value) + BASE64_STANDARD.encode(&single_value), + BASE64_STANDARD.encode(&concurrent_value) ]) ); @@ -587,7 +591,10 @@ async fn test_item_return_format() { "application/json" ); let res_body = json_body(res).await; - assert_json_eq!(res_body, json!([base64::encode(&concurrent_value), null])); + assert_json_eq!( + res_body, + json!([BASE64_STANDARD.encode(&concurrent_value), null]) + ); // f1: not specified let res = ctx @@ -612,7 +619,10 @@ async fn test_item_return_format() { .unwrap() .to_string(); let res_body = json_body(res).await; - assert_json_eq!(res_body, json!([base64::encode(&concurrent_value), null])); + assert_json_eq!( + res_body, + json!([BASE64_STANDARD.encode(&concurrent_value), null]) + ); // f2: binary let res = ctx @@ -644,7 +654,10 @@ async fn test_item_return_format() { "application/json" ); let res_body = json_body(res).await; - assert_json_eq!(res_body, json!([base64::encode(&concurrent_value), null])); + assert_json_eq!( + res_body, + json!([BASE64_STANDARD.encode(&concurrent_value), null]) + ); // -- Delete everything -- let res = ctx diff --git a/src/k2v-client/Cargo.toml b/src/k2v-client/Cargo.toml index f57ce849..7de2a55d 100644 --- a/src/k2v-client/Cargo.toml +++ b/src/k2v-client/Cargo.toml @@ -9,19 +9,19 @@ repository = "https://git.deuxfleurs.fr/Deuxfleurs/garage" readme = "../../README.md" [dependencies] -base64 = "0.13.0" -http = "0.2.6" +base64 = "0.21" +http = "0.2" log = "0.4" rusoto_core = { version = "0.48.0", default-features = false, features = ["rustls"] } rusoto_credential = "0.48.0" rusoto_signature = "0.48.0" -serde = "1.0.137" -serde_json = "1.0.81" -thiserror = "1.0.31" -tokio = "1.17.0" +serde = "1.0" +serde_json = "1.0" +thiserror = "1.0" +tokio = "1.24" # cli deps -clap = { version = "3.1.18", optional = true, features = ["derive", "env"] } +clap = { version = "4.1", optional = true, features = ["derive", "env"] } garage_util = { version = "0.8.1", path = "../util", optional = true } diff --git a/src/model/Cargo.toml b/src/model/Cargo.toml index 323c2d64..d1c7cd29 100644 --- a/src/model/Cargo.toml +++ b/src/model/Cargo.toml @@ -22,13 +22,13 @@ garage_util = { version = "0.8.1", path = "../util" } async-trait = "0.1.7" arc-swap = "1.0" -blake2 = "0.9" +blake2 = "0.10" err-derive = "0.3" hex = "0.4" -base64 = "0.13" -tracing = "0.1.30" +base64 = "0.21" +tracing = "0.1" rand = "0.8" -zstd = { version = "0.9", default-features = false } +zstd = { version = "0.12", default-features = false } serde = { version = "1.0", default-features = false, features = ["derive", "rc"] } serde_bytes = "0.11" diff --git a/src/model/k2v/causality.rs b/src/model/k2v/causality.rs index 9a692870..62488d53 100644 --- a/src/model/k2v/causality.rs +++ b/src/model/k2v/causality.rs @@ -1,3 +1,5 @@ +use base64::prelude::*; + use std::collections::BTreeMap; use std::convert::TryInto; @@ -41,11 +43,12 @@ impl CausalContext { bytes.extend(u64::to_be_bytes(i)); } - base64::encode_config(bytes, base64::URL_SAFE_NO_PAD) + BASE64_URL_SAFE_NO_PAD.encode(bytes) } /// Parse from base64-encoded binary representation pub fn parse(s: &str) -> Result<Self, String> { - let bytes = base64::decode_config(s, base64::URL_SAFE_NO_PAD) + let bytes = BASE64_URL_SAFE_NO_PAD + .decode(s) .map_err(|e| format!("bad causality token base64: {}", e))?; if bytes.len() % 16 != 8 || bytes.len() < 8 { return Err("bad causality token length".into()); diff --git a/src/model/k2v/item_table.rs b/src/model/k2v/item_table.rs index ce3e4129..9955a9cd 100644 --- a/src/model/k2v/item_table.rs +++ b/src/model/k2v/item_table.rs @@ -173,9 +173,9 @@ impl Crdt for DvvsEntry { impl PartitionKey for K2VItemPartition { fn hash(&self) -> Hash { - use blake2::{Blake2b, Digest}; + use blake2::{Blake2b512, Digest}; - let mut hasher = Blake2b::new(); + let mut hasher = Blake2b512::new(); hasher.update(self.bucket_id.as_slice()); hasher.update(self.partition_key.as_bytes()); let mut hash = [0u8; 32]; diff --git a/src/rpc/Cargo.toml b/src/rpc/Cargo.toml index 2daa0176..87ae15ac 100644 --- a/src/rpc/Cargo.toml +++ b/src/rpc/Cargo.toml @@ -20,7 +20,7 @@ arc-swap = "1.0" bytes = "1.0" gethostname = "0.2" hex = "0.4" -tracing = "0.1.30" +tracing = "0.1" rand = "0.8" sodiumoxide = { version = "0.2.5-0", package = "kuska-sodiumoxide" } systemstat = "0.2.3" diff --git a/src/table/Cargo.toml b/src/table/Cargo.toml index 3911c945..a8127f50 100644 --- a/src/table/Cargo.toml +++ b/src/table/Cargo.toml @@ -25,7 +25,7 @@ arc-swap = "1.0" bytes = "1.0" hex = "0.4" hexdump = "0.1" -tracing = "0.1.30" +tracing = "0.1" rand = "0.8" serde = { version = "1.0", default-features = false, features = ["derive", "rc"] } diff --git a/src/util/Cargo.toml b/src/util/Cargo.toml index 1017b1ce..abeccbbd 100644 --- a/src/util/Cargo.toml +++ b/src/util/Cargo.toml @@ -18,7 +18,7 @@ garage_db = { version = "0.8.1", path = "../db" } arc-swap = "1.0" async-trait = "0.1" -blake2 = "0.9" +blake2 = "0.10" bytes = "1.0" digest = "0.10" err-derive = "0.3" @@ -27,7 +27,7 @@ hexdump = "0.1" xxhash-rust = { version = "0.8", default-features = false, features = ["xxh3"] } hex = "0.4" lazy_static = "1.4" -tracing = "0.1.30" +tracing = "0.1" rand = "0.8" sha2 = "0.10" @@ -35,7 +35,7 @@ chrono = "0.4" rmp-serde = "0.15" serde = { version = "1.0", default-features = false, features = ["derive", "rc"] } serde_json = "1.0" -toml = "0.5" +toml = "0.6" futures = "0.3" tokio = { version = "1.0", default-features = false, features = ["rt", "rt-multi-thread", "io-util", "net", "time", "macros", "sync", "signal", "fs"] } @@ -48,7 +48,7 @@ hyper = "0.14" opentelemetry = { version = "0.17", features = [ "rt-tokio", "metrics", "trace" ] } [dev-dependencies] -mktemp = "0.4" +mktemp = "0.5" [features] k2v = [] diff --git a/src/util/data.rs b/src/util/data.rs index 3f61e301..bdd8daee 100644 --- a/src/util/data.rs +++ b/src/util/data.rs @@ -115,9 +115,9 @@ pub fn sha256sum(data: &[u8]) -> Hash { /// Compute the blake2 of a slice pub fn blake2sum(data: &[u8]) -> Hash { - use blake2::{Blake2b, Digest}; + use blake2::{Blake2b512, Digest}; - let mut hasher = Blake2b::new(); + let mut hasher = Blake2b512::new(); hasher.update(data); let mut hash = [0u8; 32]; hash.copy_from_slice(&hasher.finalize()[..32]); diff --git a/src/util/time.rs b/src/util/time.rs index 257b4d2a..42f41a44 100644 --- a/src/util/time.rs +++ b/src/util/time.rs @@ -25,6 +25,6 @@ pub fn increment_logical_clock_2(prev: u64, prev2: u64) -> u64 { pub fn msec_to_rfc3339(msecs: u64) -> String { let secs = msecs as i64 / 1000; let nanos = (msecs as i64 % 1000) as u32 * 1_000_000; - let timestamp = Utc.timestamp(secs, nanos); + let timestamp = Utc.timestamp_opt(secs, nanos).unwrap(); timestamp.to_rfc3339_opts(SecondsFormat::Millis, true) } diff --git a/src/web/Cargo.toml b/src/web/Cargo.toml index dbc5e5fb..19eaed17 100644 --- a/src/web/Cargo.toml +++ b/src/web/Cargo.toml @@ -20,7 +20,7 @@ garage_util = { version = "0.8.1", path = "../util" } garage_table = { version = "0.8.1", path = "../table" } err-derive = "0.3" -tracing = "0.1.30" +tracing = "0.1" percent-encoding = "2.1.0" futures = "0.3" |