Clippy lints
This commit is contained in:
parent
8f3430fb77
commit
69587b9ade
4 changed files with 99 additions and 79 deletions
9
src/cache/compat.rs
vendored
9
src/cache/compat.rs
vendored
|
@ -1,10 +1,11 @@
|
||||||
|
//! These structs have alternative deserialize and serializations
|
||||||
|
//! implementations to assist reading from the official client file format.
|
||||||
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
use serde::{
|
use serde::de::{Unexpected, Visitor};
|
||||||
de::{Unexpected, Visitor},
|
use serde::{Deserialize, Serialize};
|
||||||
Deserialize, Serialize,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::ImageContentType;
|
use super::ImageContentType;
|
||||||
|
|
||||||
|
|
126
src/cache/disk.rs
vendored
126
src/cache/disk.rs
vendored
|
@ -1,5 +1,7 @@
|
||||||
//! Low memory caching stuff
|
//! Low memory caching stuff
|
||||||
|
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
use std::hint::unreachable_unchecked;
|
||||||
use std::os::unix::prelude::OsStrExt;
|
use std::os::unix::prelude::OsStrExt;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
@ -12,7 +14,7 @@ use log::{debug, error, warn, LevelFilter};
|
||||||
use md5::digest::generic_array::GenericArray;
|
use md5::digest::generic_array::GenericArray;
|
||||||
use md5::{Digest, Md5};
|
use md5::{Digest, Md5};
|
||||||
use sqlx::sqlite::SqliteConnectOptions;
|
use sqlx::sqlite::SqliteConnectOptions;
|
||||||
use sqlx::{ConnectOptions, SqlitePool};
|
use sqlx::{ConnectOptions, Sqlite, SqlitePool, Transaction};
|
||||||
use tokio::fs::remove_file;
|
use tokio::fs::remove_file;
|
||||||
use tokio::sync::mpsc::{channel, Receiver, Sender};
|
use tokio::sync::mpsc::{channel, Receiver, Sender};
|
||||||
use tokio_stream::wrappers::ReceiverStream;
|
use tokio_stream::wrappers::ReceiverStream;
|
||||||
|
@ -112,7 +114,6 @@ async fn db_listener(
|
||||||
) {
|
) {
|
||||||
let mut recv_stream = ReceiverStream::new(db_rx).ready_chunks(128);
|
let mut recv_stream = ReceiverStream::new(db_rx).ready_chunks(128);
|
||||||
while let Some(messages) = recv_stream.next().await {
|
while let Some(messages) = recv_stream.next().await {
|
||||||
let now = chrono::Utc::now();
|
|
||||||
let mut transaction = match db_pool.begin().await {
|
let mut transaction = match db_pool.begin().await {
|
||||||
Ok(transaction) => transaction,
|
Ok(transaction) => transaction,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -120,45 +121,12 @@ async fn db_listener(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for message in messages {
|
for message in messages {
|
||||||
match message {
|
match message {
|
||||||
DbMessage::Get(entry) => {
|
DbMessage::Get(entry) => handle_db_get(entry, &mut transaction).await,
|
||||||
let hash = Md5Hash::from(entry.as_path());
|
|
||||||
let hash_str = hash.to_hex_string();
|
|
||||||
let key = entry.as_os_str().to_str();
|
|
||||||
// let legacy_key = key.map();
|
|
||||||
let query = sqlx::query!(
|
|
||||||
"update Images set accessed = ? where id = ? or id = ?",
|
|
||||||
now,
|
|
||||||
key,
|
|
||||||
hash_str
|
|
||||||
)
|
|
||||||
.execute(&mut transaction)
|
|
||||||
.await;
|
|
||||||
if let Err(e) = query {
|
|
||||||
warn!("Failed to update timestamp in db for {:?}: {}", key, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
DbMessage::Put(entry, size) => {
|
DbMessage::Put(entry, size) => {
|
||||||
let key = entry.as_os_str().to_str();
|
handle_db_put(entry, size, &cache, &mut transaction).await;
|
||||||
{
|
|
||||||
// This is intentional.
|
|
||||||
#[allow(clippy::cast_possible_wrap)]
|
|
||||||
let size = size as i64;
|
|
||||||
let query = sqlx::query!(
|
|
||||||
"insert into Images (id, size, accessed) values (?, ?, ?) on conflict do nothing",
|
|
||||||
key,
|
|
||||||
size,
|
|
||||||
now,
|
|
||||||
)
|
|
||||||
.execute(&mut transaction)
|
|
||||||
.await;
|
|
||||||
if let Err(e) = query {
|
|
||||||
warn!("Failed to add {:?} to db: {}", key, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
cache.disk_cur_size.fetch_add(size, Ordering::Release);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -233,6 +201,60 @@ async fn db_listener(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn handle_db_get(entry: Arc<PathBuf>, transaction: &mut Transaction<'_, Sqlite>) {
|
||||||
|
let hash = if let Ok(hash) = Md5Hash::try_from(entry.as_path()) {
|
||||||
|
hash
|
||||||
|
} else {
|
||||||
|
error!(
|
||||||
|
"Failed to derive hash from entry, dropping message: {}",
|
||||||
|
entry.to_string_lossy()
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let hash_str = hash.to_hex_string();
|
||||||
|
let key = entry.as_os_str().to_str();
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let query = sqlx::query!(
|
||||||
|
"update Images set accessed = ? where id = ? or id = ?",
|
||||||
|
now,
|
||||||
|
key,
|
||||||
|
hash_str
|
||||||
|
)
|
||||||
|
.execute(transaction)
|
||||||
|
.await;
|
||||||
|
if let Err(e) = query {
|
||||||
|
warn!("Failed to update timestamp in db for {:?}: {}", key, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_db_put(
|
||||||
|
entry: Arc<PathBuf>,
|
||||||
|
size: u64,
|
||||||
|
cache: &DiskCache,
|
||||||
|
transaction: &mut Transaction<'_, Sqlite>,
|
||||||
|
) {
|
||||||
|
let key = entry.as_os_str().to_str();
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
|
||||||
|
// This is intentional.
|
||||||
|
#[allow(clippy::cast_possible_wrap)]
|
||||||
|
let casted_size = size as i64;
|
||||||
|
let query = sqlx::query!(
|
||||||
|
"insert into Images (id, size, accessed) values (?, ?, ?) on conflict do nothing",
|
||||||
|
key,
|
||||||
|
casted_size,
|
||||||
|
now,
|
||||||
|
)
|
||||||
|
.execute(transaction)
|
||||||
|
.await;
|
||||||
|
if let Err(e) = query {
|
||||||
|
warn!("Failed to add {:?} to db: {}", key, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
cache.disk_cur_size.fetch_add(size, Ordering::Release);
|
||||||
|
}
|
||||||
|
|
||||||
/// Represents a Md5 hash that can be converted to and from a path. This is used
|
/// Represents a Md5 hash that can be converted to and from a path. This is used
|
||||||
/// for compatibility with the official client, where the image id and on-disk
|
/// for compatibility with the official client, where the image id and on-disk
|
||||||
/// path is determined by file path.
|
/// path is determined by file path.
|
||||||
|
@ -245,12 +267,14 @@ impl Md5Hash {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Path> for Md5Hash {
|
impl TryFrom<&Path> for Md5Hash {
|
||||||
fn from(path: &Path) -> Self {
|
type Error = ();
|
||||||
|
|
||||||
|
fn try_from(path: &Path) -> Result<Self, Self::Error> {
|
||||||
let mut iter = path.iter();
|
let mut iter = path.iter();
|
||||||
let file_name = iter.next_back().unwrap();
|
let file_name = iter.next_back().ok_or(())?;
|
||||||
let chapter_hash = iter.next_back().unwrap();
|
let chapter_hash = iter.next_back().ok_or(())?;
|
||||||
let is_data_saver = iter.next_back().unwrap() == "saver";
|
let is_data_saver = iter.next_back().ok_or(())? == "saver";
|
||||||
let mut hasher = Md5::new();
|
let mut hasher = Md5::new();
|
||||||
if is_data_saver {
|
if is_data_saver {
|
||||||
hasher.update("saver");
|
hasher.update("saver");
|
||||||
|
@ -258,23 +282,23 @@ impl From<&Path> for Md5Hash {
|
||||||
hasher.update(chapter_hash.as_bytes());
|
hasher.update(chapter_hash.as_bytes());
|
||||||
hasher.update(".");
|
hasher.update(".");
|
||||||
hasher.update(file_name.as_bytes());
|
hasher.update(file_name.as_bytes());
|
||||||
Self(hasher.finalize())
|
Ok(Self(hasher.finalize()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lint is overly aggressive here, as Md5Hash guarantees there to be at least 3
|
|
||||||
// bytes.
|
|
||||||
#[allow(clippy::fallible_impl_from)]
|
|
||||||
impl From<Md5Hash> for PathBuf {
|
impl From<Md5Hash> for PathBuf {
|
||||||
fn from(hash: Md5Hash) -> Self {
|
fn from(hash: Md5Hash) -> Self {
|
||||||
let hex_value = hash.to_hex_string();
|
let hex_value = hash.to_hex_string();
|
||||||
hex_value[0..3]
|
let path = hex_value[0..3]
|
||||||
.chars()
|
.chars()
|
||||||
.rev()
|
.rev()
|
||||||
.map(|char| Self::from(char.to_string()))
|
.map(|char| Self::from(char.to_string()))
|
||||||
.reduce(|first, second| first.join(second))
|
.reduce(|first, second| first.join(second));
|
||||||
.unwrap() // literally not possible
|
|
||||||
.join(hex_value)
|
match path {
|
||||||
|
Some(p) => p.join(hex_value),
|
||||||
|
None => unsafe { unreachable_unchecked() }, // literally not possible
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
17
src/cache/mod.rs
vendored
17
src/cache/mod.rs
vendored
|
@ -115,12 +115,11 @@ impl From<LegacyImageMetadata> for ImageMetadata {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ImageRequestError {
|
pub enum ImageRequestError {
|
||||||
InvalidContentType,
|
ContentType,
|
||||||
InvalidContentLength,
|
ContentLength,
|
||||||
InvalidLastModified,
|
LastModified,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ImageMetadata {
|
impl ImageMetadata {
|
||||||
|
@ -136,14 +135,14 @@ impl ImageMetadata {
|
||||||
Err(_) => Err(InvalidContentType),
|
Err(_) => Err(InvalidContentType),
|
||||||
})
|
})
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(|_| ImageRequestError::InvalidContentType)?,
|
.map_err(|_| ImageRequestError::ContentType)?,
|
||||||
content_length: content_length
|
content_length: content_length
|
||||||
.map(|header_val| {
|
.map(|header_val| {
|
||||||
header_val
|
header_val
|
||||||
.to_str()
|
.to_str()
|
||||||
.map_err(|_| ImageRequestError::InvalidContentLength)?
|
.map_err(|_| ImageRequestError::ContentLength)?
|
||||||
.parse()
|
.parse()
|
||||||
.map_err(|_| ImageRequestError::InvalidContentLength)
|
.map_err(|_| ImageRequestError::ContentLength)
|
||||||
})
|
})
|
||||||
.transpose()?,
|
.transpose()?,
|
||||||
last_modified: last_modified
|
last_modified: last_modified
|
||||||
|
@ -151,9 +150,9 @@ impl ImageMetadata {
|
||||||
DateTime::parse_from_rfc2822(
|
DateTime::parse_from_rfc2822(
|
||||||
header_val
|
header_val
|
||||||
.to_str()
|
.to_str()
|
||||||
.map_err(|_| ImageRequestError::InvalidLastModified)?,
|
.map_err(|_| ImageRequestError::LastModified)?,
|
||||||
)
|
)
|
||||||
.map_err(|_| ImageRequestError::InvalidLastModified)
|
.map_err(|_| ImageRequestError::LastModified)
|
||||||
})
|
})
|
||||||
.transpose()?,
|
.transpose()?,
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,24 +1,20 @@
|
||||||
use std::{collections::HashMap, sync::Arc, time::Duration};
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_web::{
|
use actix_web::http::{HeaderMap, HeaderName, HeaderValue};
|
||||||
http::{HeaderMap, HeaderName, HeaderValue},
|
use actix_web::web::Data;
|
||||||
web::Data,
|
|
||||||
};
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use log::{debug, error, warn};
|
use log::{debug, error, warn};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
use reqwest::{
|
use reqwest::header::{
|
||||||
header::{
|
ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS, CACHE_CONTROL, CONTENT_LENGTH,
|
||||||
ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS, CACHE_CONTROL, CONTENT_LENGTH,
|
CONTENT_TYPE, LAST_MODIFIED, X_CONTENT_TYPE_OPTIONS,
|
||||||
CONTENT_TYPE, LAST_MODIFIED, X_CONTENT_TYPE_OPTIONS,
|
|
||||||
},
|
|
||||||
Client, StatusCode,
|
|
||||||
};
|
|
||||||
use tokio::sync::{
|
|
||||||
watch::{channel, Receiver},
|
|
||||||
Notify,
|
|
||||||
};
|
};
|
||||||
|
use reqwest::{Client, StatusCode};
|
||||||
|
use tokio::sync::watch::{channel, Receiver};
|
||||||
|
use tokio::sync::Notify;
|
||||||
|
|
||||||
use crate::cache::{Cache, CacheKey, ImageMetadata};
|
use crate::cache::{Cache, CacheKey, ImageMetadata};
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue