This commit is contained in:
Edward Shen 2022-03-26 16:28:58 -07:00
parent bc6ed7d07e
commit e404d144a2
Signed by: edward
GPG key ID: 19182661E818369F
9 changed files with 28 additions and 29 deletions

2
src/cache/compat.rs vendored
View file

@ -97,7 +97,7 @@ mod parse {
metadata.content_type.map(|v| v.0),
Some(ImageContentType::Jpeg)
);
assert_eq!(metadata.size, Some(117888));
assert_eq!(metadata.size, Some(117_888));
assert_eq!(
metadata.last_modified.map(|v| v.0),
Some(DateTime::parse_from_rfc2822(

6
src/cache/disk.rs vendored
View file

@ -462,7 +462,7 @@ mod remove_file_handler {
use tempfile::tempdir;
use tokio::fs::{create_dir_all, remove_dir_all};
use super::*;
use super::{File, remove_file_handler};
#[tokio::test]
async fn should_not_panic_on_invalid_path() {
@ -568,7 +568,7 @@ mod db {
use sqlx::{Connection, Row, SqliteConnection};
use std::error::Error;
use super::*;
use super::{Cache, ConnectOptions, DiskCache, FromStr, Ordering, PathBuf, StreamExt, handle_db_get, handle_db_put};
#[tokio::test]
#[cfg_attr(miri, ignore)]
@ -639,7 +639,7 @@ mod db {
#[cfg(test)]
mod md5_hash {
use super::*;
use super::{Digest, GenericArray, Md5, Md5Hash, Path, PathBuf, TryFrom};
#[test]
fn to_cache_path() {

8
src/cache/fs.rs vendored
View file

@ -469,7 +469,7 @@ mod read_file_compat {
mod metadata_future {
use std::{collections::VecDeque, io::ErrorKind};
use super::*;
use super::{AsyncBufRead, AsyncRead, AsyncReadExt, BufReader, Context, Error, MetadataFuture, NewCipher, Pin, Poll, ReadBuf};
use crate::cache::ImageContentType;
use chrono::DateTime;
@ -550,7 +550,7 @@ mod metadata_future {
})?;
assert_eq!(metadata.content_type, Some(ImageContentType::Png));
assert_eq!(metadata.content_length, Some(708370));
assert_eq!(metadata.content_length, Some(708_370));
assert_eq!(
metadata.last_modified,
Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?)
@ -579,7 +579,7 @@ mod metadata_future {
})?;
assert_eq!(metadata.content_type, Some(ImageContentType::Png));
assert_eq!(metadata.content_length, Some(708370));
assert_eq!(metadata.content_length, Some(708_370));
assert_eq!(
metadata.last_modified,
Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?)
@ -611,7 +611,7 @@ mod metadata_future {
})?;
assert_eq!(metadata.content_type, Some(ImageContentType::Png));
assert_eq!(metadata.content_length, Some(708370));
assert_eq!(metadata.content_length, Some(708_370));
assert_eq!(
metadata.last_modified,
Some(DateTime::parse_from_rfc3339("2021-04-13T04:37:41+00:00")?)

14
src/cache/mem.rs vendored
View file

@ -367,7 +367,7 @@ mod test_util {
metadata: ImageMetadata,
on_complete: Sender<CacheEntry>,
) -> Result<(), CacheError> {
self.put(key.clone(), data.clone(), metadata.clone())
self.put(key.clone(), data.clone(), metadata)
.await?;
let on_disk_size = data.len() as u64;
let _ = on_complete
@ -438,7 +438,7 @@ mod cache_ops {
last_modified: None,
};
let bytes = Bytes::from_static(b"abcd");
let value = CacheValue::new(bytes.clone(), metadata.clone(), 34);
let value = CacheValue::new(bytes.clone(), metadata, 34);
// Populate the cache, need to drop the lock else it's considered locked
// when we actually call the cache
@ -478,7 +478,7 @@ mod cache_ops {
{
let cache = &mut cache.inner;
cache
.put(key.clone(), bytes.clone(), metadata.clone())
.put(key.clone(), bytes.clone(), metadata)
.await?;
}
@ -511,7 +511,7 @@ mod cache_ops {
{
let cache = &mut cache.inner;
cache
.put(key.clone(), bytes.clone(), metadata.clone())
.put(key.clone(), bytes.clone(), metadata)
.await?;
}
@ -557,7 +557,7 @@ mod cache_ops {
let bytes_len = bytes.len() as u64;
cache
.put(key.clone(), bytes.clone(), metadata.clone())
.put(key.clone(), bytes.clone(), metadata)
.await?;
// Because the callback is supposed to let the memory cache insert the
@ -667,7 +667,7 @@ mod db_listener {
};
let bytes = Bytes::from_static(b"abcde");
cache.put(key_0, bytes.clone(), metadata.clone()).await?;
cache.put(key_0, bytes.clone(), metadata).await?;
cache.put(key_1, bytes.clone(), metadata).await?;
// let the listener run first
@ -717,6 +717,6 @@ mod mem_threshold {
#[test]
fn large_amount_cannot_overflow() {
assert_eq!(mem_threshold(&Bytes(usize::MAX)), 17524406870024074020);
assert_eq!(mem_threshold(&Bytes(usize::MAX)), 17_524_406_870_024_074_020);
}
}

View file

@ -280,6 +280,7 @@ impl Display for InvalidCombination {
impl Error for InvalidCombination {}
#[cfg(not(tarpaulin_include))]
#[allow(clippy::cognitive_complexity)]
fn print_preamble_and_warnings(args: &Config) -> Result<(), Box<dyn Error>> {
let build_string = option_env!("VERGEN_GIT_SHA_SHORT")
.map(|git_sha| format!(" ({})", git_sha))

View file

@ -199,14 +199,13 @@ pub async fn update_server_state(
}
if let Some(key) = resp.token_key {
if let Some(key) = base64::decode(&key)
base64::decode(&key)
.ok()
.and_then(|k| PrecomputedKey::from_slice(&k))
{
write_guard.precomputed_key = key;
} else {
error!("Failed to parse token key: got {}", key);
}
.map_or_else(
|| error!("Failed to parse token key: got {}", key),
|key| write_guard.precomputed_key = key,
);
}
if let Some(tls) = resp.tls {

View file

@ -299,7 +299,7 @@ pub fn construct_response(
#[cfg(test)]
mod token_validation {
use super::*;
use super::{BASE64_CONFIG, DecodeError, PrecomputedKey, TokenValidationError, Utc, validate_token};
use sodiumoxide::crypto::box_::precompute;
use sodiumoxide::crypto::box_::seal_precomputed;
use sodiumoxide::crypto::box_::{gen_keypair, gen_nonce, PRECOMPUTEDKEYBYTES};
@ -348,7 +348,7 @@ mod token_validation {
// Seal with precomputed_2, open with precomputed_1
let data = seal_precomputed(b"hello world", &nonce, &precomputed_2);
let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed_1, data, "b");
@ -364,7 +364,7 @@ mod token_validation {
let nonce = gen_nonce();
let data = seal_precomputed(b"hello world", &nonce, &precomputed);
let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "b");
@ -390,7 +390,7 @@ mod token_validation {
&nonce,
&precomputed,
);
let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "b");
@ -416,7 +416,7 @@ mod token_validation {
&nonce,
&precomputed,
);
let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "");
@ -442,7 +442,7 @@ mod token_validation {
&nonce,
&precomputed,
);
let data: Vec<u8> = nonce.as_ref().into_iter().copied().chain(data).collect();
let data: Vec<u8> = nonce.as_ref().iter().copied().chain(data).collect();
let data = base64::encode_config(data, BASE64_CONFIG);
let res = validate_token(&precomputed, data, "b");

View file

@ -89,7 +89,6 @@ impl ServerState {
if let Some(ref override_url) = config.override_upstream {
resp.image_server = override_url.clone();
warn!("Upstream URL overridden to: {}", resp.image_server);
} else {
}
info!("This client's URL has been set to {}", resp.url);

View file

@ -44,7 +44,7 @@ pub struct Mebibytes(usize);
impl Mebibytes {
#[cfg(test)]
pub fn new(size: usize) -> Self {
Mebibytes(size)
Self(size)
}
}