More work

This commit is contained in:
Edward Shen 2021-10-21 18:35:54 -07:00
parent 2c21698841
commit 5d4adc91ed
Signed by: edward
GPG key ID: 19182661E818369F
22 changed files with 340 additions and 202 deletions

12
.gitmodules vendored Normal file
View file

@ -0,0 +1,12 @@
[submodule "web/vendor/MPLUS_FONTS"]
path = web/vendor/MPLUS_FONTS
url = git@github.com:coz-m/MPLUS_FONTS.git
[submodule "web/vendor/highlight.js"]
path = web/vendor/highlight.js
url = git@github.com:highlightjs/highlight.js.git
[submodule "web/vendor/text-fragments-polyfill"]
path = web/vendor/text-fragments-polyfill
url = git@github.com:GoogleChromeLabs/text-fragments-polyfill.git
[submodule "web/vendor/highlightjs-line-numbers.js"]
path = web/vendor/highlightjs-line-numbers.js
url = git@github.com:wcoder/highlightjs-line-numbers.js.git

7
Cargo.lock generated
View file

@ -912,8 +912,13 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64", "base64",
"bytes",
"chacha20poly1305", "chacha20poly1305",
"chrono",
"headers",
"lazy_static",
"rand", "rand",
"serde",
"sha2", "sha2",
"thiserror", "thiserror",
"url", "url",
@ -929,7 +934,7 @@ dependencies = [
"bytes", "bytes",
"chrono", "chrono",
"headers", "headers",
"lazy_static", "omegaupload-common",
"rand", "rand",
"rocksdb", "rocksdb",
"serde", "serde",

View file

@ -5,3 +5,7 @@ members = [
"server", "server",
"web", "web",
] ]
[profile.release]
lto = true
codegen-units = 1

View file

@ -1,7 +1,7 @@
[package] [package]
name = "omegaupload-cli" name = "omegaupload-cli"
version = "0.1.0" version = "0.1.0"
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -7,8 +7,9 @@ use anyhow::{anyhow, bail, Context, Result};
use atty::Stream; use atty::Stream;
use clap::Clap; use clap::Clap;
use omegaupload_common::crypto::{gen_key_nonce, open, seal, Key}; use omegaupload_common::crypto::{gen_key_nonce, open, seal, Key};
use omegaupload_common::{base64, hash, ParsedUrl, Url}; use omegaupload_common::{base64, hash, Expiration, ParsedUrl, Url};
use reqwest::blocking::Client; use reqwest::blocking::Client;
use reqwest::header::EXPIRES;
use reqwest::StatusCode; use reqwest::StatusCode;
use secrecy::{ExposeSecret, SecretString}; use secrecy::{ExposeSecret, SecretString};
@ -108,6 +109,13 @@ fn handle_download(url: ParsedUrl) -> Result<()> {
bail!("Got bad response from server: {}", res.status()); bail!("Got bad response from server: {}", res.status());
} }
let expiration_text = dbg!(res.headers())
.get(EXPIRES)
.and_then(|v| Expiration::try_from(v).ok())
.as_ref()
.map(ToString::to_string)
.unwrap_or_else(|| "This paste will not expire.".to_string());
let mut data = res.bytes()?.as_ref().to_vec(); let mut data = res.bytes()?.as_ref().to_vec();
if url.needs_password { if url.needs_password {
@ -140,5 +148,7 @@ fn handle_download(url: ParsedUrl) -> Result<()> {
std::io::stdout().write_all(&data)?; std::io::stdout().write_all(&data)?;
} }
eprintln!("{}", expiration_text);
Ok(()) Ok(())
} }

View file

@ -1,15 +1,20 @@
[package] [package]
name = "omegaupload-common" name = "omegaupload-common"
version = "0.1.0" version = "0.1.0"
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
anyhow = "1" anyhow = "1"
base64 = "0.13" base64 = "0.13"
bytes = { version = "*", features = ["serde"] }
chacha20poly1305 = "0.9" chacha20poly1305 = "0.9"
chrono = { version = "0.4", features = ["serde"] }
headers = "*"
lazy_static = "1"
rand = "0.8" rand = "0.8"
serde = { version = "1", features = ["derive"] }
sha2 = "0.9" sha2 = "0.9"
thiserror = "1" thiserror = "1"
url = "2" url = "2"

View file

@ -1,10 +1,15 @@
#![warn(clippy::nursery, clippy::pedantic)] #![warn(clippy::nursery, clippy::pedantic)]
#![deny(unsafe_code)]
//! Contains common functions and structures used by multiple projects //! Contains common functions and structures used by multiple projects
use std::fmt::Display;
use std::str::FromStr; use std::str::FromStr;
use bytes::Bytes;
use chrono::{DateTime, Duration, Utc};
use headers::{Header, HeaderName, HeaderValue};
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use thiserror::Error; use thiserror::Error;
pub use url::Url; pub use url::Url;
@ -94,7 +99,9 @@ pub mod crypto {
impl Nonce { impl Nonce {
#[must_use] #[must_use]
pub fn increment(&self) -> Self { pub fn increment(&self) -> Self {
todo!() let mut inner = self.0;
inner.as_mut_slice()[0] += 1;
Self(inner)
} }
#[must_use] #[must_use]
@ -136,9 +143,7 @@ impl From<&str> for PartialParsedUrl {
for (key, value) in args { for (key, value) in args {
match (key, value) { match (key, value) {
("key", Some(value)) => { ("key", Some(value)) => {
decryption_key = base64::decode(value) decryption_key = base64::decode(value).map(|k| *Key::from_slice(&k)).ok();
.map(|k| Key::from_slice(&k).clone())
.ok();
} }
("pw", _) => { ("pw", _) => {
needs_password = true; needs_password = true;
@ -203,3 +208,96 @@ impl FromStr for ParsedUrl {
}) })
} }
} }
#[derive(Serialize, Deserialize, Clone, Copy, Debug)]
pub enum Expiration {
BurnAfterReading,
UnixTime(DateTime<Utc>),
}
impl Display for Expiration {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Expiration::BurnAfterReading => {
write!(f, "This paste has been burned. You now have the only copy.")
}
Expiration::UnixTime(time) => write!(
f,
"{}",
time.format("This paste will expire on %A, %B %-d, %Y at %T %Z.")
),
}
}
}
lazy_static! {
pub static ref EXPIRATION_HEADER_NAME: HeaderName = HeaderName::from_static("burn-after");
}
impl Header for Expiration {
fn name() -> &'static HeaderName {
&*EXPIRATION_HEADER_NAME
}
fn decode<'i, I>(values: &mut I) -> Result<Self, headers::Error>
where
Self: Sized,
I: Iterator<Item = &'i HeaderValue>,
{
match values
.next()
.ok_or_else(headers::Error::invalid)?
.as_bytes()
{
b"read" => Ok(Self::BurnAfterReading),
b"5m" => Ok(Self::UnixTime(Utc::now() + Duration::minutes(5))),
b"10m" => Ok(Self::UnixTime(Utc::now() + Duration::minutes(10))),
b"1h" => Ok(Self::UnixTime(Utc::now() + Duration::hours(1))),
b"1d" => Ok(Self::UnixTime(Utc::now() + Duration::days(1))),
// We disallow permanent pastes
_ => Err(headers::Error::invalid()),
}
}
fn encode<E: Extend<HeaderValue>>(&self, container: &mut E) {
container.extend(std::iter::once(self.into()));
}
}
impl From<&Expiration> for HeaderValue {
fn from(expiration: &Expiration) -> Self {
unsafe {
Self::from_maybe_shared_unchecked(match expiration {
Expiration::BurnAfterReading => Bytes::from_static(b"0"),
Expiration::UnixTime(duration) => Bytes::from(duration.to_rfc3339()),
})
}
}
}
impl From<Expiration> for HeaderValue {
fn from(expiration: Expiration) -> Self {
(&expiration).into()
}
}
pub struct ParseHeaderValueError;
impl TryFrom<&HeaderValue> for Expiration {
type Error = ParseHeaderValueError;
fn try_from(value: &HeaderValue) -> Result<Self, Self::Error> {
value
.to_str()
.map_err(|_| ParseHeaderValueError)?
.parse::<DateTime<Utc>>()
.map_err(|_| ParseHeaderValueError)
.map(Self::UnixTime)
}
}
impl Default for Expiration {
fn default() -> Self {
Self::UnixTime(Utc::now() + Duration::days(1))
}
}

View file

@ -1,11 +1,12 @@
[package] [package]
name = "omegaupload-server" name = "omegaupload-server"
version = "0.1.0" version = "0.1.0"
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
omegaupload-common = { path = "../common" }
anyhow = "1" anyhow = "1"
axum = { version = "0.2", features = ["http2", "headers"] } axum = { version = "0.2", features = ["http2", "headers"] }
bincode = "1" bincode = "1"
@ -15,7 +16,6 @@ bytes = { version = "*", features = ["serde"] }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
# We just need to pull in whatever axum is pulling in # We just need to pull in whatever axum is pulling in
headers = "*" headers = "*"
lazy_static = "1"
rand = "0.8" rand = "0.8"
rocksdb = { version = "0.17", default_features = false, features = ["zstd"] } rocksdb = { version = "0.17", default_features = false, features = ["zstd"] }
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }

View file

@ -1,6 +1,5 @@
#![warn(clippy::nursery, clippy::pedantic)] #![warn(clippy::nursery, clippy::pedantic)]
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
@ -10,18 +9,18 @@ use axum::handler::{get, post};
use axum::http::header::EXPIRES; use axum::http::header::EXPIRES;
use axum::http::StatusCode; use axum::http::StatusCode;
use axum::{AddExtensionLayer, Router}; use axum::{AddExtensionLayer, Router};
use chrono::Duration; use chrono::Utc;
use headers::HeaderMap; use headers::HeaderMap;
use omegaupload_common::Expiration;
use rand::thread_rng; use rand::thread_rng;
use rand::Rng; use rand::Rng;
use rocksdb::IteratorMode; use rocksdb::IteratorMode;
use rocksdb::WriteBatch;
use rocksdb::{Options, DB}; use rocksdb::{Options, DB};
use tokio::task; use tokio::task;
use tracing::warn;
use tracing::{error, instrument}; use tracing::{error, instrument};
use tracing::{info, warn};
use crate::paste::{Expiration, Paste}; use crate::paste::Paste;
use crate::short_code::ShortCode; use crate::short_code::ShortCode;
mod paste; mod paste;
@ -36,8 +35,7 @@ async fn main() -> Result<()> {
let db = Arc::new(DB::open_default(DB_PATH)?); let db = Arc::new(DB::open_default(DB_PATH)?);
let stop_signal = Arc::new(AtomicBool::new(false)); set_up_expirations(Arc::clone(&db));
task::spawn(cleanup(Arc::clone(&stop_signal), Arc::clone(&db)));
axum::Server::bind(&"0.0.0.0:8081".parse()?) axum::Server::bind(&"0.0.0.0:8081".parse()?)
.serve( .serve(
@ -52,12 +50,65 @@ async fn main() -> Result<()> {
) )
.await?; .await?;
stop_signal.store(true, Ordering::Release);
// Must be called for correct shutdown // Must be called for correct shutdown
DB::destroy(&Options::default(), DB_PATH)?; DB::destroy(&Options::default(), DB_PATH)?;
Ok(()) Ok(())
} }
fn set_up_expirations(db: Arc<DB>) {
let mut corrupted = 0;
let mut expired = 0;
let mut pending = 0;
let mut permanent = 0;
info!("Setting up cleanup timers, please wait...");
for (key, value) in db.iterator(IteratorMode::Start) {
let paste = if let Ok(value) = bincode::deserialize::<Paste>(&value) {
value
} else {
corrupted += 1;
if let Err(e) = db.delete(key) {
warn!("{}", e);
}
continue;
};
if let Some(Expiration::UnixTime(time)) = paste.expiration {
let now = Utc::now();
if time < now {
expired += 1;
if let Err(e) = db.delete(key) {
warn!("{}", e);
}
} else {
let sleep_duration = (time - now).to_std().unwrap();
pending += 1;
let db_ref = Arc::clone(&db);
task::spawn_blocking(move || async move {
tokio::time::sleep(sleep_duration).await;
if let Err(e) = db_ref.delete(key) {
warn!("{}", e);
}
});
}
} else {
permanent += 1;
}
}
if corrupted == 0 {
info!("No corrupted pastes found.");
} else {
warn!("Found {} corrupted pastes.", corrupted);
}
info!("Found {} expired pastes.", expired);
info!("Found {} active pastes.", pending);
info!("Found {} permanent pastes.", permanent);
info!("Cleanup timers have been initialized.");
}
#[instrument(skip(db), err)] #[instrument(skip(db), err)]
async fn upload<const N: usize>( async fn upload<const N: usize>(
Extension(db): Extension<Arc<DB>>, Extension(db): Extension<Arc<DB>>,
@ -102,8 +153,30 @@ async fn upload<const N: usize>(
return Err(StatusCode::INTERNAL_SERVER_ERROR); return Err(StatusCode::INTERNAL_SERVER_ERROR);
}; };
match task::spawn_blocking(move || db.put(key, value)).await { let db_ref = Arc::clone(&db);
Ok(Ok(_)) => (), match task::spawn_blocking(move || db_ref.put(key, value)).await {
Ok(Ok(_)) => {
if let Some(expires) = maybe_expires {
if let Expiration::UnixTime(time) = expires.0 {
let now = Utc::now();
if time < now {
if let Err(e) = db.delete(key) {
warn!("{}", e);
}
} else {
let sleep_duration = (time - now).to_std().unwrap();
task::spawn_blocking(move || async move {
tokio::time::sleep(sleep_duration).await;
if let Err(e) = db.delete(key) {
warn!("{}", e);
}
});
}
}
}
}
e => { e => {
error!("Failed to insert paste into db: {:?}", e); error!("Failed to insert paste into db: {:?}", e);
return Err(StatusCode::INTERNAL_SERVER_ERROR); return Err(StatusCode::INTERNAL_SERVER_ERROR);
@ -185,47 +258,3 @@ async fn delete<const N: usize>(
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
} }
} }
/// Periodic clean-up task that deletes expired entries.
async fn cleanup(stop_signal: Arc<AtomicBool>, db: Arc<DB>) {
while !stop_signal.load(Ordering::Acquire) {
tokio::time::sleep(Duration::minutes(5).to_std().expect("infallible")).await;
let mut batch = WriteBatch::default();
for (key, value) in db.snapshot().iterator(IteratorMode::Start) {
// TODO: only partially decode struct for max perf
let join_handle = task::spawn_blocking(move || {
bincode::deserialize::<Paste>(&value)
.as_ref()
.map(Paste::expired)
.unwrap_or_default()
})
.await;
let should_delete = match join_handle {
Ok(should_delete) => should_delete,
Err(e) => {
error!("Failed to join thread?! {}", e);
false
}
};
if should_delete {
batch.delete(key);
}
}
let db = Arc::clone(&db);
let join_handle = task::spawn_blocking(move || db.write(batch)).await;
let db_op_res = match join_handle {
Ok(res) => res,
Err(e) => {
error!("Failed to join handle?! {}", e);
continue;
}
};
if let Err(e) = db_op_res {
warn!("Failed to cleanup db: {}", e);
}
}
}

View file

@ -1,7 +1,6 @@
use axum::body::Bytes; use axum::body::Bytes;
use chrono::{DateTime, Duration, Utc}; use chrono::Utc;
use headers::{Header, HeaderName, HeaderValue}; use omegaupload_common::Expiration;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -31,65 +30,3 @@ impl Paste {
matches!(self.expiration, Some(Expiration::BurnAfterReading)) matches!(self.expiration, Some(Expiration::BurnAfterReading))
} }
} }
#[derive(Serialize, Deserialize, Clone, Copy, Debug)]
pub enum Expiration {
BurnAfterReading,
UnixTime(DateTime<Utc>),
}
lazy_static! {
pub static ref EXPIRATION_HEADER_NAME: HeaderName = HeaderName::from_static("burn-after");
}
impl Header for Expiration {
fn name() -> &'static HeaderName {
&*EXPIRATION_HEADER_NAME
}
fn decode<'i, I>(values: &mut I) -> Result<Self, headers::Error>
where
Self: Sized,
I: Iterator<Item = &'i HeaderValue>,
{
match values
.next()
.ok_or_else(headers::Error::invalid)?
.as_bytes()
{
b"read" => Ok(Self::BurnAfterReading),
b"5m" => Ok(Self::UnixTime(Utc::now() + Duration::minutes(5))),
b"10m" => Ok(Self::UnixTime(Utc::now() + Duration::minutes(10))),
b"1h" => Ok(Self::UnixTime(Utc::now() + Duration::hours(1))),
b"1d" => Ok(Self::UnixTime(Utc::now() + Duration::days(1))),
_ => Err(headers::Error::invalid()),
}
}
fn encode<E: Extend<HeaderValue>>(&self, container: &mut E) {
container.extend(std::iter::once(self.into()));
}
}
impl From<&Expiration> for HeaderValue {
fn from(expiration: &Expiration) -> Self {
unsafe {
HeaderValue::from_maybe_shared_unchecked(match expiration {
Expiration::BurnAfterReading => Bytes::from_static(b"0"),
Expiration::UnixTime(duration) => Bytes::from(duration.to_rfc3339()),
})
}
}
}
impl From<Expiration> for HeaderValue {
fn from(expiration: Expiration) -> Self {
(&expiration).into()
}
}
impl Default for Expiration {
fn default() -> Self {
Self::UnixTime(Utc::now() + Duration::days(1))
}
}

View file

@ -1,6 +1,4 @@
use std::convert::{TryFrom, TryInto};
use std::fmt::Debug; use std::fmt::Debug;
use std::iter::FromIterator;
use rand::prelude::Distribution; use rand::prelude::Distribution;
use serde::de::{Unexpected, Visitor}; use serde::de::{Unexpected, Visitor};
@ -125,6 +123,7 @@ impl<const N: usize> Distribution<ShortCode<N>> for Generator {
for c in arr.iter_mut() { for c in arr.iter_mut() {
*c = self.sample(rng); *c = self.sample(rng);
} }
ShortCode(arr) ShortCode(arr)
} }
} }

View file

@ -1,7 +1,7 @@
[package] [package]
name = "omegaupload-web" name = "omegaupload-web"
version = "0.1.0" version = "0.1.0"
edition = "2018" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -16,7 +16,7 @@ downcast-rs = "1"
gloo-console = "0.1" gloo-console = "0.1"
http = "0.2" http = "0.2"
reqwest = { version = "0.11", default_features = false, features = ["tokio-rustls"] } reqwest = { version = "0.11", default_features = false, features = ["tokio-rustls"] }
web-sys = { version = "0.3", features = ["Request", "Window"] } web-sys = { version = "0.3" }
yew = { version = "0.18", features = ["wasm-bindgen-futures"] } yew = { version = "0.18", features = ["wasm-bindgen-futures"] }
yew-router = "0.15" yew-router = "0.15"
yewtil = "0.4" yewtil = "0.4"

View file

@ -5,33 +5,28 @@
<meta charset="utf-8" /> <meta charset="utf-8" />
<title>Omegaupload</title> <title>Omegaupload</title>
<link data-trunk rel="copy-file" href="src/Mplus2-Regular.ttf" dest="/" /> <link data-trunk rel="copy-file" href="vendor/MPLUS_FONTS/fonts/ttf/MplusCodeLatin[wdth,wght].ttf" dest="/" />
<link data-trunk rel="copy-file" href="src/MplusCodeLatin-varwidthweight.ttf" dest="/" /> <link data-trunk rel="copy-file" href="vendor/highlight.min.js" dest="/" />
<link data-trunk rel="copy-file" href="src/highlight.min.js" dest="/" /> <link data-trunk rel="copy-file" href="vendor/highlightjs-line-numbers.js/dist/highlightjs-line-numbers.min.js"
<link data-trunk rel="css" href="src/github-dark.min.css" /> dest="/" />
<link data-trunk rel="copy-file" href="src/reload_on_hash_change.js" dest="/" />
<link rel="preload" href="highlight.min.js" as="script" type="application/javascript"> <link data-trunk rel="css" href="vendor/highlight.js/src/styles/github-dark.css" />
<link rel="preload" href="Mplus2-Regular.ttf" as="font" type="font/ttf" crossorigin>
<link rel="preload" href="MplusCodeLatin-varwidthweight.ttf" as="font" type="font/ttf" crossorigin> <script src="reload_on_hash_change.js" async></script>
<script src="highlight.min.js" defer></script>
<script src="highlightjs-line-numbers.min.js" defer></script>
<script src="highlight.min.js"></script>
<style> <style>
@font-face {
font-family: "Mplus2 Regular";
src: url("./Mplus2-Regular.ttf") format("truetype");
}
@font-face { @font-face {
font-family: "Mplus Code"; font-family: "Mplus Code";
src: url("./MplusCodeLatin-varwidthweight.ttf") format("truetype"); src: url("./MplusCodeLatin[wdth,wght].ttf") format("truetype");
} }
body { body {
background-color: #404040; background-color: #404040;
} font-family: 'Mplus Code', sans-serif;
margin: 0;
header.banner {
font-family: 'Mplus2 Regular', sans-serif;
} }
.paste { .paste {
@ -46,6 +41,28 @@
.hljs { .hljs {
font-family: 'Mplus Code', sans-serif; font-family: 'Mplus Code', sans-serif;
} }
.hljs-ln td.hljs-ln-numbers {
text-align: right;
padding-right: 1em;
}
pre header {
user-select: none;
margin: 1em;
}
hr {
margin: 1em;
}
.error {
height: 100vh;
margin: 0;
display: flex;
align-items: center;
justify-content: center;
}
</style> </style>
</head> </head>

Binary file not shown.

View file

@ -1,10 +0,0 @@
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
Theme: GitHub Dark
Description: Dark theme as seen on github.com
Author: github.com
Maintainer: @Hirse
Updated: 2021-05-15
Outdated base version: https://github.com/primer/github-syntax-dark
Current colors taken from GitHub's CSS
*/.hljs{color:#c9d1d9;background:#0d1117}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#ff7b72}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#d2a8ff}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#79c0ff}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#a5d6ff}.hljs-built_in,.hljs-symbol{color:#ffa657}.hljs-code,.hljs-comment,.hljs-formula{color:#8b949e}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#7ee787}.hljs-subst{color:#c9d1d9}.hljs-section{color:#1f6feb;font-weight:700}.hljs-bullet{color:#f2cc60}.hljs-emphasis{color:#c9d1d9;font-style:italic}.hljs-strong{color:#c9d1d9;font-weight:700}.hljs-addition{color:#aff5b4;background-color:#033a16}.hljs-deletion{color:#ffdcd7;background-color:#67060c}

View file

@ -1,4 +1,5 @@
use std::convert::TryFrom; #![warn(clippy::nursery, clippy::pedantic)]
use std::fmt::Debug; use std::fmt::Debug;
use std::str::FromStr; use std::str::FromStr;
@ -54,6 +55,7 @@ enum Route {
Path(String), Path(String),
} }
#[allow(clippy::needless_pass_by_value)]
fn render_route(route: Route) -> Html { fn render_route(route: Route) -> Html {
match route { match route {
Route::Index => html! { Route::Index => html! {
@ -82,9 +84,9 @@ impl Component for Paste {
let url = String::from(window().location().to_string()); let url = String::from(window().location().to_string());
let request_uri = { let request_uri = {
let mut uri_parts = url.parse::<Uri>().unwrap().into_parts(); let mut uri_parts = url.parse::<Uri>().unwrap().into_parts();
uri_parts.path_and_query.as_mut().map(|parts| { if let Some(parts) = uri_parts.path_and_query.as_mut() {
*parts = PathAndQuery::from_str(&format!("/api{}", parts.path())).unwrap() *parts = PathAndQuery::from_str(&format!("/api{}", parts.path())).unwrap();
}); }
Uri::from_parts(uri_parts).unwrap() Uri::from_parts(uri_parts).unwrap()
}; };
@ -100,13 +102,13 @@ impl Component for Paste {
Ok(bytes) => PastePartial::new( Ok(bytes) => PastePartial::new(
bytes, bytes,
expires, expires,
url.split_once('#') &url.split_once('#')
.map(|(_, fragment)| PartialParsedUrl::from(fragment)) .map(|(_, fragment)| PartialParsedUrl::from(fragment))
.unwrap_or_default(), .unwrap_or_default(),
link_clone, link_clone,
), ),
Err(e) => { Err(e) => {
return Box::new(PasteError(anyhow!("Got resp error: {}", e))) return Box::new(PasteError(anyhow!("Got {}.", e)))
as Box<dyn PasteState> as Box<dyn PasteState>
} }
}; };
@ -117,11 +119,16 @@ impl Component for Paste {
Box::new(partial) as Box<dyn PasteState> Box::new(partial) as Box<dyn PasteState>
} }
} }
Ok(err) => Box::new(PasteError(anyhow!("Got resp error: {}", err.status()))) Ok(resp) if resp.status() == StatusCode::NOT_FOUND => {
as Box<dyn PasteState>, Box::new(PasteNotFound) as Box<dyn PasteState>
Err(err) => {
Box::new(PasteError(anyhow!("Got resp error: {}", err))) as Box<dyn PasteState>
} }
Ok(resp) if resp.status() == StatusCode::BAD_REQUEST => {
Box::new(PasteBadRequest) as Box<dyn PasteState>
}
Ok(err) => {
Box::new(PasteError(anyhow!("Got {}.", err.status()))) as Box<dyn PasteState>
}
Err(err) => Box::new(PasteError(anyhow!("Got {}.", err))) as Box<dyn PasteState>,
} }
}); });
Self { Self {
@ -147,13 +154,23 @@ impl Component for Paste {
if self.state.is::<PasteNotFound>() { if self.state.is::<PasteNotFound>() {
return html! { return html! {
<p>{ "Either the paste has been burned or one never existed." }</p> <section class={"hljs error"}>
<p>{ "Either the paste has been burned or one never existed." }</p>
</section>
};
}
if self.state.is::<PasteBadRequest>() {
return html! {
<section class={"hljs error"}>
<p>{ "Bad Request. Is this a valid paste URL?" }</p>
</section>
}; };
} }
if let Some(error) = self.state.downcast_ref::<PasteError>() { if let Some(error) = self.state.downcast_ref::<PasteError>() {
return html! { return html! {
<p>{ error.0.to_string() }</p> <section class={"hljs error"}><p>{ error.0.to_string() }</p></section>
}; };
} }
@ -171,9 +188,6 @@ impl Component for Paste {
} }
} }
struct PasteLoading;
struct PasteNotFound;
struct PasteError(anyhow::Error); struct PasteError(anyhow::Error);
#[derive(Properties, Clone, Debug)] #[derive(Properties, Clone, Debug)]
@ -198,17 +212,29 @@ struct PasteComplete {
trait PasteState: Downcast {} trait PasteState: Downcast {}
impl_downcast!(PasteState); impl_downcast!(PasteState);
impl PasteState for PasteLoading {}
impl PasteState for PasteNotFound {}
impl PasteState for PasteError {} impl PasteState for PasteError {}
impl PasteState for PastePartial {} impl PasteState for PastePartial {}
impl PasteState for PasteComplete {} impl PasteState for PasteComplete {}
macro_rules! impl_paste_type_state {
(
$($state:ident),* $(,)?
) => {
$(
struct $state;
impl PasteState for $state {}
)*
};
}
impl_paste_type_state!(PasteLoading, PasteNotFound, PasteBadRequest);
impl PastePartial { impl PastePartial {
fn new( fn new(
data: Bytes, data: Bytes,
expires: Option<Expiration>, expires: Option<Expiration>,
partial_parsed_url: PartialParsedUrl, partial_parsed_url: &PartialParsedUrl,
parent: ComponentLink<Paste>, parent: ComponentLink<Paste>,
) -> Self { ) -> Self {
Self { Self {
@ -251,7 +277,7 @@ impl Component for PastePartial {
|| (!self.needs_pw && maybe_password.is_none()) => || (!self.needs_pw && maybe_password.is_none()) =>
{ {
let data = self.data.clone(); let data = self.data.clone();
let expires = self.expires.clone(); let expires = self.expires;
self.parent.callback_once(move |Nothing| { self.parent.callback_once(move |Nothing| {
Box::new(PasteComplete::new( Box::new(PasteComplete::new(
data, data,
@ -265,7 +291,8 @@ impl Component for PastePartial {
_ => (), _ => (),
} }
// parent should re-render so this element should be dropped. // parent should re-render so this element should be dropped; no point
// in saying this needs to be re-rendered.
false false
} }
@ -293,7 +320,7 @@ impl TryFrom<PastePartial> for PasteComplete {
password: Some(password), password: Some(password),
needs_pw: true, needs_pw: true,
.. ..
} => Ok(PasteComplete { } => Ok(Self {
data, data,
expires, expires,
key, key,
@ -307,7 +334,7 @@ impl TryFrom<PastePartial> for PasteComplete {
nonce: Some(nonce), nonce: Some(nonce),
needs_pw: false, needs_pw: false,
.. ..
} => Ok(PasteComplete { } => Ok(Self {
data, data,
key, key,
expires, expires,
@ -337,30 +364,31 @@ impl PasteComplete {
} }
fn view(&self) -> Html { fn view(&self) -> Html {
let stage_one = if let Some(password) = self.password { let stage_one = self.password.map_or_else(
open(&self.data, &self.nonce.increment(), &password).unwrap() || self.data.to_vec(),
} else { |password| open(&self.data, &self.nonce.increment(), &password).unwrap(),
self.data.to_vec() );
};
let decrypted = open(&stage_one, &self.nonce, &self.key).unwrap(); let decrypted = open(&stage_one, &self.nonce, &self.key).unwrap();
if let Ok(str) = String::from_utf8(decrypted) { if let Ok(str) = String::from_utf8(decrypted) {
html! { html! {
<> <>
<header class={"hljs paste banner"}>{ <pre class={"paste"}>
if let Some(expires) = &self.expires { <header class={"hljs"}>
match expires { {
Expiration::BurnAfterReading => "This paste has been burned. You now have the only copy.".to_string(), self.expires.as_ref().map(ToString::to_string).unwrap_or_else(||
Expiration::UnixTime(time) => time.format("This paste will expire on %A, %B %-d, %Y at %T %Z.").to_string(), "This paste will not expire.".to_string()
} )
} else {
"This paste will not expire.".to_string()
} }
}</header> </header>
<pre class={"paste"}><code>{str}</code></pre> <hr class={"hljs"} />
<code>{str}</code>
</pre>
<script>{"hljs.highlightAll();"}</script> <script>{"
hljs.highlightAll();
hljs.initLineNumbersOnLoad();
"}</script>
</> </>
} }
} else { } else {

View file

@ -0,0 +1 @@
window.addEventListener("hashchange", () => location.reload());

1
web/vendor/MPLUS_FONTS vendored Submodule

@ -0,0 +1 @@
Subproject commit 6ee9e7ca06f40f2303d839ccac8bfb8b56d2b3cd

1
web/vendor/highlight.js vendored Submodule

@ -0,0 +1 @@
Subproject commit 257cfee803426333af25b68da17601aec2663172

@ -0,0 +1 @@
Subproject commit 8480334a29f01ad8b7fb0497c65285872781ee96