Compare commits

..

No commits in common. "1152f775b9a9c8246863791f925ad439f846e872" and "81604a7e94f20ec51e3f6b3b21ad4b347aedd347" have entirely different histories.

11 changed files with 327 additions and 446 deletions

710
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -22,7 +22,7 @@ debug = 1
[dependencies] [dependencies]
# Pin because we're using unstable versions # Pin because we're using unstable versions
actix-web = { version = "4", features = [ "rustls" ] } actix-web = { version = "=4.0.0-beta.18", features = [ "rustls" ] }
arc-swap = "1" arc-swap = "1"
async-trait = "0.1" async-trait = "0.1"
base64 = "0.13" base64 = "0.13"

2
src/cache/compat.rs vendored
View file

@ -97,7 +97,7 @@ mod parse {
metadata.content_type.map(|v| v.0), metadata.content_type.map(|v| v.0),
Some(ImageContentType::Jpeg) Some(ImageContentType::Jpeg)
); );
assert_eq!(metadata.size, Some(117_888)); assert_eq!(metadata.size, Some(117888));
assert_eq!( assert_eq!(
metadata.last_modified.map(|v| v.0), metadata.last_modified.map(|v| v.0),
Some(DateTime::parse_from_rfc2822( Some(DateTime::parse_from_rfc2822(

6
src/cache/disk.rs vendored
View file

@ -462,7 +462,7 @@ mod remove_file_handler {
use tempfile::tempdir; use tempfile::tempdir;
use tokio::fs::{create_dir_all, remove_dir_all}; use tokio::fs::{create_dir_all, remove_dir_all};
use super::{remove_file_handler, File}; use super::*;
#[tokio::test] #[tokio::test]
async fn should_not_panic_on_invalid_path() { async fn should_not_panic_on_invalid_path() {
@ -568,7 +568,7 @@ mod db {
use sqlx::{Connection, Row, SqliteConnection}; use sqlx::{Connection, Row, SqliteConnection};
use std::error::Error; use std::error::Error;
use super::{handle_db_get, handle_db_put, DiskCache, FromStr, Ordering, PathBuf, StreamExt}; use super::*;
#[tokio::test] #[tokio::test]
#[cfg_attr(miri, ignore)] #[cfg_attr(miri, ignore)]
@ -639,7 +639,7 @@ mod db {
#[cfg(test)] #[cfg(test)]
mod md5_hash { mod md5_hash {
use super::{Digest, GenericArray, Md5, Md5Hash, Path, PathBuf, TryFrom}; use super::*;
#[test] #[test]
fn to_cache_path() { fn to_cache_path() {

12
src/cache/fs.rs vendored
View file

@ -469,10 +469,7 @@ mod read_file_compat {
mod metadata_future { mod metadata_future {
use std::{collections::VecDeque, io::ErrorKind}; use std::{collections::VecDeque, io::ErrorKind};
use super::{ use super::*;
AsyncBufRead, AsyncRead, AsyncReadExt, BufReader, Context, Error, MetadataFuture, Pin,
Poll, ReadBuf,
};
use crate::cache::ImageContentType; use crate::cache::ImageContentType;
use chrono::DateTime; use chrono::DateTime;
@ -522,6 +519,7 @@ mod metadata_future {
match pinned.fill_buf_events.pop_front() { match pinned.fill_buf_events.pop_front() {
Some(Poll::Ready(Ok(bytes))) => { Some(Poll::Ready(Ok(bytes))) => {
pinned.buffer.extend_from_slice(bytes); pinned.buffer.extend_from_slice(bytes);
String::from_utf8_lossy(&pinned.buffer);
return Poll::Ready(Ok(pinned.buffer.as_ref())); return Poll::Ready(Ok(pinned.buffer.as_ref()));
} }
Some(res @ Poll::Ready(_)) => res, Some(res @ Poll::Ready(_)) => res,
@ -552,7 +550,7 @@ mod metadata_future {
})?; })?;
assert_eq!(metadata.content_type, Some(ImageContentType::Png)); assert_eq!(metadata.content_type, Some(ImageContentType::Png));
assert_eq!(metadata.content_length, Some(708_370)); assert_eq!(metadata.content_length, Some(708370));
assert_eq!( assert_eq!(
metadata.last_modified, metadata.last_modified,
Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?) Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?)
@ -581,7 +579,7 @@ mod metadata_future {
})?; })?;
assert_eq!(metadata.content_type, Some(ImageContentType::Png)); assert_eq!(metadata.content_type, Some(ImageContentType::Png));
assert_eq!(metadata.content_length, Some(708_370)); assert_eq!(metadata.content_length, Some(708370));
assert_eq!( assert_eq!(
metadata.last_modified, metadata.last_modified,
Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?) Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?)
@ -613,7 +611,7 @@ mod metadata_future {
})?; })?;
assert_eq!(metadata.content_type, Some(ImageContentType::Png)); assert_eq!(metadata.content_type, Some(ImageContentType::Png));
assert_eq!(metadata.content_length, Some(708_370)); assert_eq!(metadata.content_length, Some(708370));
assert_eq!( assert_eq!(
metadata.last_modified, metadata.last_modified,
Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?) Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?)

14
src/cache/mem.rs vendored
View file

@ -367,7 +367,7 @@ mod test_util {
metadata: ImageMetadata, metadata: ImageMetadata,
on_complete: Sender<CacheEntry>, on_complete: Sender<CacheEntry>,
) -> Result<(), CacheError> { ) -> Result<(), CacheError> {
self.put(key.clone(), data.clone(), metadata) self.put(key.clone(), data.clone(), metadata.clone())
.await?; .await?;
let on_disk_size = data.len() as u64; let on_disk_size = data.len() as u64;
let _ = on_complete let _ = on_complete
@ -438,7 +438,7 @@ mod cache_ops {
last_modified: None, last_modified: None,
}; };
let bytes = Bytes::from_static(b"abcd"); let bytes = Bytes::from_static(b"abcd");
let value = CacheValue::new(bytes.clone(), metadata, 34); let value = CacheValue::new(bytes.clone(), metadata.clone(), 34);
// Populate the cache, need to drop the lock else it's considered locked // Populate the cache, need to drop the lock else it's considered locked
// when we actually call the cache // when we actually call the cache
@ -478,7 +478,7 @@ mod cache_ops {
{ {
let cache = &mut cache.inner; let cache = &mut cache.inner;
cache cache
.put(key.clone(), bytes.clone(), metadata) .put(key.clone(), bytes.clone(), metadata.clone())
.await?; .await?;
} }
@ -511,7 +511,7 @@ mod cache_ops {
{ {
let cache = &mut cache.inner; let cache = &mut cache.inner;
cache cache
.put(key.clone(), bytes.clone(), metadata) .put(key.clone(), bytes.clone(), metadata.clone())
.await?; .await?;
} }
@ -557,7 +557,7 @@ mod cache_ops {
let bytes_len = bytes.len() as u64; let bytes_len = bytes.len() as u64;
cache cache
.put(key.clone(), bytes.clone(), metadata) .put(key.clone(), bytes.clone(), metadata.clone())
.await?; .await?;
// Because the callback is supposed to let the memory cache insert the // Because the callback is supposed to let the memory cache insert the
@ -667,7 +667,7 @@ mod db_listener {
}; };
let bytes = Bytes::from_static(b"abcde"); let bytes = Bytes::from_static(b"abcde");
cache.put(key_0, bytes.clone(), metadata).await?; cache.put(key_0, bytes.clone(), metadata.clone()).await?;
cache.put(key_1, bytes.clone(), metadata).await?; cache.put(key_1, bytes.clone(), metadata).await?;
// let the listener run first // let the listener run first
@ -717,6 +717,6 @@ mod mem_threshold {
#[test] #[test]
fn large_amount_cannot_overflow() { fn large_amount_cannot_overflow() {
assert_eq!(mem_threshold(&Bytes(usize::MAX)), 17_524_406_870_024_074_020); assert_eq!(mem_threshold(&Bytes(usize::MAX)), 17524406870024074020);
} }
} }

View file

@ -280,7 +280,6 @@ impl Display for InvalidCombination {
impl Error for InvalidCombination {} impl Error for InvalidCombination {}
#[cfg(not(tarpaulin_include))] #[cfg(not(tarpaulin_include))]
#[allow(clippy::cognitive_complexity)]
fn print_preamble_and_warnings(args: &Config) -> Result<(), Box<dyn Error>> { fn print_preamble_and_warnings(args: &Config) -> Result<(), Box<dyn Error>> {
let build_string = option_env!("VERGEN_GIT_SHA_SHORT") let build_string = option_env!("VERGEN_GIT_SHA_SHORT")
.map(|git_sha| format!(" ({})", git_sha)) .map(|git_sha| format!(" ({})", git_sha))

View file

@ -199,13 +199,14 @@ pub async fn update_server_state(
} }
if let Some(key) = resp.token_key { if let Some(key) = resp.token_key {
base64::decode(&key) if let Some(key) = base64::decode(&key)
.ok() .ok()
.and_then(|k| PrecomputedKey::from_slice(&k)) .and_then(|k| PrecomputedKey::from_slice(&k))
.map_or_else( {
|| error!("Failed to parse token key: got {}", key), write_guard.precomputed_key = key;
|key| write_guard.precomputed_key = key, } else {
); error!("Failed to parse token key: got {}", key);
}
} }
if let Some(tls) = resp.tls { if let Some(tls) = resp.tls {

View file

@ -299,7 +299,7 @@ pub fn construct_response(
#[cfg(test)] #[cfg(test)]
mod token_validation { mod token_validation {
use super::{BASE64_CONFIG, DecodeError, PrecomputedKey, TokenValidationError, Utc, validate_token}; use super::*;
use sodiumoxide::crypto::box_::precompute; use sodiumoxide::crypto::box_::precompute;
use sodiumoxide::crypto::box_::seal_precomputed; use sodiumoxide::crypto::box_::seal_precomputed;
use sodiumoxide::crypto::box_::{gen_keypair, gen_nonce, PRECOMPUTEDKEYBYTES}; use sodiumoxide::crypto::box_::{gen_keypair, gen_nonce, PRECOMPUTEDKEYBYTES};
@ -348,7 +348,7 @@ mod token_validation {
// Seal with precomputed_2, open with precomputed_1 // Seal with precomputed_2, open with precomputed_1
let data = seal_precomputed(b"hello world", &nonce, &precomputed_2); let data = seal_precomputed(b"hello world", &nonce, &precomputed_2);
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect(); let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG); let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed_1, data, "b"); let res = validate_token(&precomputed_1, data, "b");
@ -364,7 +364,7 @@ mod token_validation {
let nonce = gen_nonce(); let nonce = gen_nonce();
let data = seal_precomputed(b"hello world", &nonce, &precomputed); let data = seal_precomputed(b"hello world", &nonce, &precomputed);
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect(); let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG); let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "b"); let res = validate_token(&precomputed, data, "b");
@ -390,7 +390,7 @@ mod token_validation {
&nonce, &nonce,
&precomputed, &precomputed,
); );
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect(); let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG); let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "b"); let res = validate_token(&precomputed, data, "b");
@ -416,7 +416,7 @@ mod token_validation {
&nonce, &nonce,
&precomputed, &precomputed,
); );
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect(); let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG); let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, ""); let res = validate_token(&precomputed, data, "");
@ -442,7 +442,7 @@ mod token_validation {
&nonce, &nonce,
&precomputed, &precomputed,
); );
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect(); let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG); let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "b"); let res = validate_token(&precomputed, data, "b");

View file

@ -89,6 +89,7 @@ impl ServerState {
if let Some(ref override_url) = config.override_upstream { if let Some(ref override_url) = config.override_upstream {
resp.image_server = override_url.clone(); resp.image_server = override_url.clone();
warn!("Upstream URL overridden to: {}", resp.image_server); warn!("Upstream URL overridden to: {}", resp.image_server);
} else {
} }
info!("This client's URL has been set to {}", resp.url); info!("This client's URL has been set to {}", resp.url);

View file

@ -44,7 +44,7 @@ pub struct Mebibytes(usize);
impl Mebibytes { impl Mebibytes {
#[cfg(test)] #[cfg(test)]
pub fn new(size: usize) -> Self { pub fn new(size: usize) -> Self {
Self(size) Mebibytes(size)
} }
} }