Compare commits
2 commits
41d7feb4df
...
732e2bd2f3
Author | SHA1 | Date | |
---|---|---|---|
732e2bd2f3 | |||
4e2cfcfac6 |
6 changed files with 44 additions and 25 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1030,6 +1030,7 @@ dependencies = [
|
||||||
"gloo-console",
|
"gloo-console",
|
||||||
"http",
|
"http",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
|
"mime_guess",
|
||||||
"omegaupload-common 0.1.0",
|
"omegaupload-common 0.1.0",
|
||||||
"reqwasm",
|
"reqwasm",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
|
@ -162,14 +162,14 @@ fn handle_upload(
|
||||||
|
|
||||||
url.set_fragment(Some(&fragment.build().expose_secret()));
|
url.set_fragment(Some(&fragment.build().expose_secret()));
|
||||||
|
|
||||||
println!("{}", url);
|
println!("{url}");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_download(mut url: ParsedUrl) -> Result<()> {
|
fn handle_download(mut url: ParsedUrl) -> Result<()> {
|
||||||
url.sanitized_url
|
url.sanitized_url
|
||||||
.set_path(&format!("{}{}", API_ENDPOINT, url.sanitized_url.path()));
|
.set_path(&format!("{API_ENDPOINT}{}", url.sanitized_url.path()));
|
||||||
let res = Client::new()
|
let res = Client::new()
|
||||||
.get(url.sanitized_url)
|
.get(url.sanitized_url)
|
||||||
.send()
|
.send()
|
||||||
|
@ -212,7 +212,7 @@ fn handle_download(mut url: ParsedUrl) -> Result<()> {
|
||||||
std::io::stdout().write_all(&data)?;
|
std::io::stdout().write_all(&data)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("{}", expiration_text);
|
eprintln!("{expiration_text}");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,7 @@ async fn main() -> Result<()> {
|
||||||
.route("/:code", get(|| async { INDEX_PAGE }))
|
.route("/:code", get(|| async { INDEX_PAGE }))
|
||||||
.nest("/static", root_service)
|
.nest("/static", root_service)
|
||||||
.route(
|
.route(
|
||||||
&format!("{}{}", API_ENDPOINT, "/:code"),
|
&format!("{API_ENDPOINT}/:code"),
|
||||||
get(paste::<SHORT_CODE_SIZE>).delete(delete::<SHORT_CODE_SIZE>),
|
get(paste::<SHORT_CODE_SIZE>).delete(delete::<SHORT_CODE_SIZE>),
|
||||||
)
|
)
|
||||||
.layer(AddExtensionLayer::new(db))
|
.layer(AddExtensionLayer::new(db))
|
||||||
|
@ -163,11 +163,11 @@ fn set_up_expirations<const N: usize>(db: &Arc<DB>) {
|
||||||
if corrupted == 0 {
|
if corrupted == 0 {
|
||||||
info!("No corrupted pastes found.");
|
info!("No corrupted pastes found.");
|
||||||
} else {
|
} else {
|
||||||
warn!("Found {} corrupted pastes.", corrupted);
|
warn!("Found {corrupted} corrupted pastes.");
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("Found {} expired pastes.", expired);
|
info!("Found {expired} expired pastes.");
|
||||||
info!("Found {} active pastes.", pending);
|
info!("Found {pending} active pastes.");
|
||||||
info!("Cleanup timers have been initialized.");
|
info!("Cleanup timers have been initialized.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,7 +196,7 @@ async fn upload<const N: usize>(
|
||||||
if let Some(header) = maybe_expires {
|
if let Some(header) = maybe_expires {
|
||||||
if let Expiration::UnixTime(time) = header.0 {
|
if let Expiration::UnixTime(time) = header.0 {
|
||||||
if (time - Utc::now()) > *MAX_PASTE_AGE {
|
if (time - Utc::now()) > *MAX_PASTE_AGE {
|
||||||
warn!("{} exceeds allowed paste lifetime", time);
|
warn!("{time} exceeds allowed paste lifetime");
|
||||||
return Err(StatusCode::BAD_REQUEST);
|
return Err(StatusCode::BAD_REQUEST);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,7 +223,7 @@ async fn upload<const N: usize>(
|
||||||
.await;
|
.await;
|
||||||
if matches!(query, Ok(false)) {
|
if matches!(query, Ok(false)) {
|
||||||
new_key = Some(key);
|
new_key = Some(key);
|
||||||
trace!("Found new key after {} attempts.", i);
|
trace!("Found new key after {i} attempts.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -271,7 +271,7 @@ async fn upload<const N: usize>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
e => {
|
e => {
|
||||||
error!("Failed to insert paste into db: {:?}", e);
|
error!("Failed to insert paste into db: {e:?}");
|
||||||
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -289,7 +289,7 @@ async fn paste<const N: usize>(
|
||||||
let metadata: Expiration = {
|
let metadata: Expiration = {
|
||||||
let meta_cf = db.cf_handle(META_CF_NAME).unwrap();
|
let meta_cf = db.cf_handle(META_CF_NAME).unwrap();
|
||||||
let query_result = db.get_cf(meta_cf, key).map_err(|e| {
|
let query_result = db.get_cf(meta_cf, key).map_err(|e| {
|
||||||
error!("Failed to fetch initial query: {}", e);
|
error!("Failed to fetch initial query: {e}");
|
||||||
StatusCode::INTERNAL_SERVER_ERROR
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -308,7 +308,7 @@ async fn paste<const N: usize>(
|
||||||
if let Expiration::UnixTime(expires) = metadata {
|
if let Expiration::UnixTime(expires) = metadata {
|
||||||
if expires < Utc::now() {
|
if expires < Utc::now() {
|
||||||
delete_entry(db, url.as_bytes()).await.map_err(|e| {
|
delete_entry(db, url.as_bytes()).await.map_err(|e| {
|
||||||
error!("Failed to join handle: {}", e);
|
error!("Failed to join handle: {e}");
|
||||||
StatusCode::INTERNAL_SERVER_ERROR
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
})??;
|
})??;
|
||||||
return Err(StatusCode::NOT_FOUND);
|
return Err(StatusCode::NOT_FOUND);
|
||||||
|
@ -319,7 +319,7 @@ async fn paste<const N: usize>(
|
||||||
// not sure if perf of get_pinned is better than spawn_blocking
|
// not sure if perf of get_pinned is better than spawn_blocking
|
||||||
let blob_cf = db.cf_handle(BLOB_CF_NAME).unwrap();
|
let blob_cf = db.cf_handle(BLOB_CF_NAME).unwrap();
|
||||||
let query_result = db.get_pinned_cf(blob_cf, key).map_err(|e| {
|
let query_result = db.get_pinned_cf(blob_cf, key).map_err(|e| {
|
||||||
error!("Failed to fetch initial query: {}", e);
|
error!("Failed to fetch initial query: {e}");
|
||||||
StatusCode::INTERNAL_SERVER_ERROR
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -340,7 +340,7 @@ async fn paste<const N: usize>(
|
||||||
Expiration::BurnAfterReading | Expiration::BurnAfterReadingWithDeadline(_)
|
Expiration::BurnAfterReading | Expiration::BurnAfterReadingWithDeadline(_)
|
||||||
) {
|
) {
|
||||||
delete_entry(db, key).await.map_err(|e| {
|
delete_entry(db, key).await.map_err(|e| {
|
||||||
error!("Failed to join handle: {}", e);
|
error!("Failed to join handle: {e}");
|
||||||
StatusCode::INTERNAL_SERVER_ERROR
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
})??;
|
})??;
|
||||||
}
|
}
|
||||||
|
@ -367,11 +367,11 @@ fn delete_entry<const N: usize>(db: Arc<DB>, key: [u8; N]) -> JoinHandle<Result<
|
||||||
let blob_cf = db.cf_handle(BLOB_CF_NAME).unwrap();
|
let blob_cf = db.cf_handle(BLOB_CF_NAME).unwrap();
|
||||||
let meta_cf = db.cf_handle(META_CF_NAME).unwrap();
|
let meta_cf = db.cf_handle(META_CF_NAME).unwrap();
|
||||||
if let Err(e) = db.delete_cf(blob_cf, &key) {
|
if let Err(e) = db.delete_cf(blob_cf, &key) {
|
||||||
warn!("{}", e);
|
warn!("{e}");
|
||||||
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
||||||
}
|
}
|
||||||
if let Err(e) = db.delete_cf(meta_cf, &key) {
|
if let Err(e) = db.delete_cf(meta_cf, &key) {
|
||||||
warn!("{}", e);
|
warn!("{e}");
|
||||||
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -17,6 +17,7 @@ console_error_panic_hook = "0.1"
|
||||||
gloo-console = "0.1"
|
gloo-console = "0.1"
|
||||||
http = "0.2"
|
http = "0.2"
|
||||||
js-sys = "0.3"
|
js-sys = "0.3"
|
||||||
|
mime_guess = "2"
|
||||||
reqwasm = "0.2"
|
reqwasm = "0.2"
|
||||||
tree_magic_mini = { version = "3", features = ["with-gpl-data"] }
|
tree_magic_mini = { version = "3", features = ["with-gpl-data"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
|
|
@ -55,10 +55,11 @@ pub fn decrypt(
|
||||||
mut container: Vec<u8>,
|
mut container: Vec<u8>,
|
||||||
key: &Secret<Key>,
|
key: &Secret<Key>,
|
||||||
maybe_password: Option<SecretVec<u8>>,
|
maybe_password: Option<SecretVec<u8>>,
|
||||||
|
name_hint: Option<&str>,
|
||||||
) -> Result<(DecryptedData, MimeType), Error> {
|
) -> Result<(DecryptedData, MimeType), Error> {
|
||||||
open_in_place(&mut container, key, maybe_password)?;
|
open_in_place(&mut container, key, maybe_password)?;
|
||||||
|
|
||||||
let mime_type = tree_magic_mini::from_u8(&container);
|
let mime_type = guess_mime_type(name_hint, &container);
|
||||||
log!("[rs] Mime type:", mime_type);
|
log!("[rs] Mime type:", mime_type);
|
||||||
|
|
||||||
log!("[rs] Blob conversion started.");
|
log!("[rs] Blob conversion started.");
|
||||||
|
@ -112,7 +113,7 @@ fn handle_zip_archive(blob: Arc<Blob>, container: Vec<u8>) -> DecryptedData {
|
||||||
log!("Unsupported: ", s.to_string());
|
log!("Unsupported: ", s.to_string());
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
log!(format!("Error: {}", err));
|
log!(format!("Error: {err}"));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -148,6 +149,23 @@ fn handle_gzip(blob: Arc<Blob>, container: Vec<u8>) -> DecryptedData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn guess_mime_type(name_hint: Option<&str>, data: &[u8]) -> &'static str {
|
||||||
|
if let Some(name) = name_hint {
|
||||||
|
let guesses = mime_guess::from_path(name);
|
||||||
|
if let Some(mime_type) = guesses.first_raw() {
|
||||||
|
// Found at least one, but generally speaking this crate only
|
||||||
|
// uses authoritative sources (RFCs), so generally speaking
|
||||||
|
// there's only one association, and multiple are due to legacy
|
||||||
|
// support. As a result, we can probably just get the first one.
|
||||||
|
log!("[rs] Mime type inferred from extension.");
|
||||||
|
return mime_type;
|
||||||
|
} else {
|
||||||
|
log!("[rs] No mime type found for extension, falling back to introspection.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tree_magic_mini::from_u8(&data)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
enum ContentType {
|
enum ContentType {
|
||||||
Text,
|
Text,
|
||||||
|
|
|
@ -114,7 +114,7 @@ fn main() {
|
||||||
Ok(partial_parsed_url) => partial_parsed_url,
|
Ok(partial_parsed_url) => partial_parsed_url,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to parse text fragment; bailing.");
|
error!("Failed to parse text fragment; bailing.");
|
||||||
render_message(format!("Invalid paste link: {}", e).into());
|
render_message(format!("Invalid paste link: {e}").into());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -148,7 +148,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
e => {
|
e => {
|
||||||
render_message("Internal error occurred.".into());
|
render_message("Internal error occurred.".into());
|
||||||
error!(format!("Error occurred at pw prompt: {:?}", e));
|
error!(format!("Error occurred at pw prompt: {e:?}"));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -191,8 +191,7 @@ async fn fetch_resources(
|
||||||
"Network failure: Failed to completely read encryption paste.".into(),
|
"Network failure: Failed to completely read encryption paste.".into(),
|
||||||
);
|
);
|
||||||
bail!(format!(
|
bail!(format!(
|
||||||
"JsFuture returned an error while fetching resp buffer: {:?}",
|
"JsFuture returned an error while fetching resp buffer: {e:?}",
|
||||||
e
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -204,14 +203,14 @@ async fn fetch_resources(
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let (decrypted, mimetype) = match decrypt(data, &key, password) {
|
let (decrypted, mimetype) = match decrypt(data, &key, password, name.as_deref()) {
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let msg = match e {
|
let msg = match e {
|
||||||
CryptoError::Password => "The provided password was incorrect.",
|
CryptoError::Password => "The provided password was incorrect.",
|
||||||
CryptoError::SecretKey => "The secret key in the URL was incorrect.",
|
CryptoError::SecretKey => "The secret key in the URL was incorrect.",
|
||||||
ref e => {
|
ref e => {
|
||||||
log!(format!("Bad kdf or corrupted blob: {}", e));
|
log!(format!("Bad kdf or corrupted blob: {e}"));
|
||||||
"An internal error occurred."
|
"An internal error occurred."
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -248,7 +247,7 @@ async fn fetch_resources(
|
||||||
render_message(err.status_text().into());
|
render_message(err.status_text().into());
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
render_message(format!("{}", err).into());
|
render_message(format!("{err}").into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue