misc: try to be more strict in URL filter

This commit is contained in:
meeg_leeto 2022-04-30 01:57:29 +01:00
parent df0cff37af
commit a9714fe51a
1 changed files with 22 additions and 2 deletions

View File

@ -223,7 +223,7 @@ mod service {
use validators::prelude::*;
#[derive(Validator)]
#[validator(http_url(local(Allow)))]
#[validator(http_url(local(NotAllow)))]
#[derive(Clone, Debug)]
/// A struct representing a URL.
pub struct HttpUrl {
@ -237,6 +237,24 @@ mod service {
}
}
impl HttpUrl {
/// Transform this into an `Err(())` if the url does not match more
/// criteria.
pub fn strict(self) -> Result<Self, ()> {
// Don't even bother with URLs that don't have hosts.
if !self.url.has_host() {
return Err(());
}
// URLs that cannot be a base are weird (UNIX sockets, data types)
if self.url.cannot_be_a_base() {
return Err(())
}
Ok(self)
}
}
/// Database management, including messaging and work stealing.
pub mod db {
use super::{slug::Slug, HttpUrl};
@ -679,7 +697,9 @@ async fn shorten(
.into(),
)
})?;
HttpUrl::parse_str(url_str)
HttpUrl::parse_string(url_str)
.map_err(|_| (warp::http::StatusCode::BAD_REQUEST, "Invalid URL.".into()))?
.strict()
.map_err(|_| (warp::http::StatusCode::BAD_REQUEST, "Invalid URL.".into()))?
};