Browse Source
More fixes - fixed docker builds - fixed mentions regex test - fixed DATABASE_URL stuff - change schema path in diesel.toml Address review comments - add jsonb column back into activity table - remove authors field from cargo.toml - adjust LEMMY_DATABASE_URL env var usage - rename all occurences of LEMMY_DATABASE_URL to DATABASE_URL Decouple utils and db Split code into cargo workspaces Co-authored-by: Felix Ableitner <me@nutomic.com> Reviewed-on: https://yerbamate.dev/LemmyNet/lemmy/pulls/67admin_sort_order

committed by
dessalines

63 changed files with 1071 additions and 921 deletions
@ -0,0 +1,15 @@ |
|||
[package] |
|||
name = "lemmy_db" |
|||
version = "0.1.0" |
|||
edition = "2018" |
|||
|
|||
[dependencies] |
|||
diesel = { version = "1.4.4", features = ["postgres","chrono","r2d2","64-column-tables","serde_json"] } |
|||
chrono = { version = "0.4.7", features = ["serde"] } |
|||
serde = { version = "1.0.105", features = ["derive"] } |
|||
serde_json = { version = "1.0.52", features = ["preserve_order"]} |
|||
strum = "0.18.0" |
|||
strum_macros = "0.18.0" |
|||
log = "0.4.0" |
|||
sha2 = "0.9" |
|||
bcrypt = "0.8.0" |
@ -1,5 +1,5 @@ |
|||
use super::community_view::community_fast_view::BoxedQuery; |
|||
use crate::db::{fuzzy_search, limit_and_offset, MaybeOptional, SortType}; |
|||
use crate::{fuzzy_search, limit_and_offset, MaybeOptional, SortType}; |
|||
use diesel::{pg::Pg, result::Error, *}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
@ -1,4 +1,4 @@ |
|||
use crate::db::limit_and_offset; |
|||
use crate::limit_and_offset; |
|||
use diesel::{result::Error, *}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
@ -1,4 +1,4 @@ |
|||
use crate::db::{limit_and_offset, MaybeOptional}; |
|||
use crate::{limit_and_offset, MaybeOptional}; |
|||
use diesel::{pg::Pg, result::Error, *}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
@ -1,4 +1,4 @@ |
|||
use crate::{db::Crud, schema::site}; |
|||
use crate::{schema::site, Crud}; |
|||
use diesel::{dsl::*, result::Error, *}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
@ -1,4 +1,4 @@ |
|||
use crate::db::{limit_and_offset, MaybeOptional, SortType}; |
|||
use crate::{limit_and_offset, MaybeOptional, SortType}; |
|||
use diesel::{dsl::*, pg::Pg, result::Error, *}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
@ -1,5 +1,5 @@ |
|||
use super::user_view::user_fast::BoxedQuery; |
|||
use crate::db::{fuzzy_search, limit_and_offset, MaybeOptional, SortType}; |
|||
use crate::{fuzzy_search, limit_and_offset, MaybeOptional, SortType}; |
|||
use diesel::{dsl::*, pg::Pg, result::Error, *}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
@ -0,0 +1,22 @@ |
|||
[package] |
|||
name = "lemmy_utils" |
|||
version = "0.1.0" |
|||
edition = "2018" |
|||
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html |
|||
|
|||
[dependencies] |
|||
regex = "1.3.5" |
|||
config = { version = "0.10.1", default-features = false, features = ["hjson"] } |
|||
chrono = { version = "0.4.7", features = ["serde"] } |
|||
lettre = "0.9.3" |
|||
lettre_email = "0.9.4" |
|||
log = "0.4.0" |
|||
itertools = "0.9.0" |
|||
rand = "0.7.3" |
|||
serde = { version = "1.0.105", features = ["derive"] } |
|||
serde_json = { version = "1.0.52", features = ["preserve_order"]} |
|||
comrak = "0.7" |
|||
lazy_static = "1.3.0" |
|||
openssl = "0.10" |
|||
url = { version = "2.1.1", features = ["serde"] } |
@ -0,0 +1,324 @@ |
|||
#[macro_use] |
|||
pub extern crate lazy_static; |
|||
pub extern crate comrak; |
|||
pub extern crate lettre; |
|||
pub extern crate lettre_email; |
|||
pub extern crate openssl; |
|||
pub extern crate rand; |
|||
pub extern crate regex; |
|||
pub extern crate serde_json; |
|||
pub extern crate url; |
|||
|
|||
pub mod settings; |
|||
|
|||
use crate::settings::Settings; |
|||
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, Utc}; |
|||
use itertools::Itertools; |
|||
use lettre::{ |
|||
smtp::{ |
|||
authentication::{Credentials, Mechanism}, |
|||
extension::ClientId, |
|||
ConnectionReuseParameters, |
|||
}, |
|||
ClientSecurity, |
|||
SmtpClient, |
|||
Transport, |
|||
}; |
|||
use lettre_email::Email; |
|||
use openssl::{pkey::PKey, rsa::Rsa}; |
|||
use rand::{distributions::Alphanumeric, thread_rng, Rng}; |
|||
use regex::{Regex, RegexBuilder}; |
|||
use std::io::{Error, ErrorKind}; |
|||
use url::Url; |
|||
|
|||
pub fn to_datetime_utc(ndt: NaiveDateTime) -> DateTime<Utc> { |
|||
DateTime::<Utc>::from_utc(ndt, Utc) |
|||
} |
|||
|
|||
pub fn naive_from_unix(time: i64) -> NaiveDateTime { |
|||
NaiveDateTime::from_timestamp(time, 0) |
|||
} |
|||
|
|||
pub fn convert_datetime(datetime: NaiveDateTime) -> DateTime<FixedOffset> { |
|||
let now = Local::now(); |
|||
DateTime::<FixedOffset>::from_utc(datetime, *now.offset()) |
|||
} |
|||
|
|||
pub fn is_email_regex(test: &str) -> bool { |
|||
EMAIL_REGEX.is_match(test) |
|||
} |
|||
|
|||
pub fn remove_slurs(test: &str) -> String { |
|||
SLUR_REGEX.replace_all(test, "*removed*").to_string() |
|||
} |
|||
|
|||
pub fn slur_check(test: &str) -> Result<(), Vec<&str>> { |
|||
let mut matches: Vec<&str> = SLUR_REGEX.find_iter(test).map(|mat| mat.as_str()).collect(); |
|||
|
|||
// Unique
|
|||
matches.sort_unstable(); |
|||
matches.dedup(); |
|||
|
|||
if matches.is_empty() { |
|||
Ok(()) |
|||
} else { |
|||
Err(matches) |
|||
} |
|||
} |
|||
|
|||
pub fn slurs_vec_to_str(slurs: Vec<&str>) -> String { |
|||
let start = "No slurs - "; |
|||
let combined = &slurs.join(", "); |
|||
[start, combined].concat() |
|||
} |
|||
|
|||
pub fn generate_random_string() -> String { |
|||
thread_rng().sample_iter(&Alphanumeric).take(30).collect() |
|||
} |
|||
|
|||
pub fn send_email( |
|||
subject: &str, |
|||
to_email: &str, |
|||
to_username: &str, |
|||
html: &str, |
|||
) -> Result<(), String> { |
|||
let email_config = Settings::get().email.ok_or("no_email_setup")?; |
|||
|
|||
let email = Email::builder() |
|||
.to((to_email, to_username)) |
|||
.from(email_config.smtp_from_address.to_owned()) |
|||
.subject(subject) |
|||
.html(html) |
|||
.build() |
|||
.unwrap(); |
|||
|
|||
let mailer = if email_config.use_tls { |
|||
SmtpClient::new_simple(&email_config.smtp_server).unwrap() |
|||
} else { |
|||
SmtpClient::new(&email_config.smtp_server, ClientSecurity::None).unwrap() |
|||
} |
|||
.hello_name(ClientId::Domain(Settings::get().hostname)) |
|||
.smtp_utf8(true) |
|||
.authentication_mechanism(Mechanism::Plain) |
|||
.connection_reuse(ConnectionReuseParameters::ReuseUnlimited); |
|||
let mailer = if let (Some(login), Some(password)) = |
|||
(&email_config.smtp_login, &email_config.smtp_password) |
|||
{ |
|||
mailer.credentials(Credentials::new(login.to_owned(), password.to_owned())) |
|||
} else { |
|||
mailer |
|||
}; |
|||
|
|||
let mut transport = mailer.transport(); |
|||
let result = transport.send(email.into()); |
|||
transport.close(); |
|||
|
|||
match result { |
|||
Ok(_) => Ok(()), |
|||
Err(e) => Err(e.to_string()), |
|||
} |
|||
} |
|||
|
|||
pub fn markdown_to_html(text: &str) -> String { |
|||
comrak::markdown_to_html(text, &comrak::ComrakOptions::default()) |
|||
} |
|||
|
|||
// TODO nothing is done with community / group webfingers yet, so just ignore those for now
|
|||
#[derive(Clone, PartialEq, Eq, Hash)] |
|||
pub struct MentionData { |
|||
pub name: String, |
|||
pub domain: String, |
|||
} |
|||
|
|||
impl MentionData { |
|||
pub fn is_local(&self) -> bool { |
|||
Settings::get().hostname.eq(&self.domain) |
|||
} |
|||
pub fn full_name(&self) -> String { |
|||
format!("@{}@{}", &self.name, &self.domain) |
|||
} |
|||
} |
|||
|
|||
pub fn scrape_text_for_mentions(text: &str) -> Vec<MentionData> { |
|||
let mut out: Vec<MentionData> = Vec::new(); |
|||
for caps in MENTIONS_REGEX.captures_iter(text) { |
|||
out.push(MentionData { |
|||
name: caps["name"].to_string(), |
|||
domain: caps["domain"].to_string(), |
|||
}); |
|||
} |
|||
out.into_iter().unique().collect() |
|||
} |
|||
|
|||
pub fn is_valid_username(name: &str) -> bool { |
|||
VALID_USERNAME_REGEX.is_match(name) |
|||
} |
|||
|
|||
pub fn is_valid_community_name(name: &str) -> bool { |
|||
VALID_COMMUNITY_NAME_REGEX.is_match(name) |
|||
} |
|||
|
|||
#[cfg(test)] |
|||
mod tests { |
|||
use crate::{ |
|||
is_email_regex, |
|||
is_valid_community_name, |
|||
is_valid_username, |
|||
remove_slurs, |
|||
scrape_text_for_mentions, |
|||
slur_check, |
|||
slurs_vec_to_str, |
|||
}; |
|||
|
|||
#[test] |
|||
fn test_mentions_regex() { |
|||
let text = "Just read a great blog post by [@tedu@honk.teduangst.com](/u/test). And another by !test_community@fish.teduangst.com . Another [@lemmy@lemmy-alpha:8540](/u/fish)"; |
|||
let mentions = scrape_text_for_mentions(text); |
|||
|
|||
assert_eq!(mentions[0].name, "tedu".to_string()); |
|||
assert_eq!(mentions[0].domain, "honk.teduangst.com".to_string()); |
|||
assert_eq!(mentions[1].domain, "lemmy-alpha:8540".to_string()); |
|||
} |
|||
|
|||
#[test] |
|||
fn test_email() { |
|||
assert!(is_email_regex("gush@gmail.com")); |
|||
assert!(!is_email_regex("nada_neutho")); |
|||
} |
|||
|
|||
#[test] |
|||
fn test_valid_register_username() { |
|||
assert!(is_valid_username("Hello_98")); |
|||
assert!(is_valid_username("ten")); |
|||
assert!(!is_valid_username("Hello-98")); |
|||
assert!(!is_valid_username("a")); |
|||
assert!(!is_valid_username("")); |
|||
} |
|||
|
|||
#[test] |
|||
fn test_valid_community_name() { |
|||
assert!(is_valid_community_name("example")); |
|||
assert!(is_valid_community_name("example_community")); |
|||
assert!(!is_valid_community_name("Example")); |
|||
assert!(!is_valid_community_name("Ex")); |
|||
assert!(!is_valid_community_name("")); |
|||
} |
|||
|
|||
#[test] |
|||
fn test_slur_filter() { |
|||
let test = |
|||
"coons test dindu ladyboy tranny retardeds. Capitalized Niggerz. This is a bunch of other safe text."; |
|||
let slur_free = "No slurs here"; |
|||
assert_eq!( |
|||
remove_slurs(&test), |
|||
"*removed* test *removed* *removed* *removed* *removed*. Capitalized *removed*. This is a bunch of other safe text." |
|||
.to_string() |
|||
); |
|||
|
|||
let has_slurs_vec = vec![ |
|||
"Niggerz", |
|||
"coons", |
|||
"dindu", |
|||
"ladyboy", |
|||
"retardeds", |
|||
"tranny", |
|||
]; |
|||
let has_slurs_err_str = "No slurs - Niggerz, coons, dindu, ladyboy, retardeds, tranny"; |
|||
|
|||
assert_eq!(slur_check(test), Err(has_slurs_vec)); |
|||
assert_eq!(slur_check(slur_free), Ok(())); |
|||
if let Err(slur_vec) = slur_check(test) { |
|||
assert_eq!(&slurs_vec_to_str(slur_vec), has_slurs_err_str); |
|||
} |
|||
} |
|||
|
|||
// These helped with testing
|
|||
// #[test]
|
|||
// fn test_send_email() {
|
|||
// let result = send_email("not a subject", "test_email@gmail.com", "ur user", "<h1>HI there</h1>");
|
|||
// assert!(result.is_ok());
|
|||
// }
|
|||
} |
|||
|
|||
lazy_static! { |
|||
static ref EMAIL_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9.!#$%&โ*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap(); |
|||
static ref SLUR_REGEX: Regex = RegexBuilder::new(r"(fag(g|got|tard)?|maricos?|cock\s?sucker(s|ing)?|\bn(i|1)g(\b|g?(a|er)?(s|z)?)\b|dindu(s?)|mudslime?s?|kikes?|mongoloids?|towel\s*heads?|\bspi(c|k)s?\b|\bchinks?|niglets?|beaners?|\bnips?\b|\bcoons?\b|jungle\s*bunn(y|ies?)|jigg?aboo?s?|\bpakis?\b|rag\s*heads?|gooks?|cunts?|bitch(es|ing|y)?|puss(y|ies?)|twats?|feminazis?|whor(es?|ing)|\bslut(s|t?y)?|\btr(a|@)nn?(y|ies?)|ladyboy(s?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap(); |
|||
static ref USERNAME_MATCHES_REGEX: Regex = Regex::new(r"/u/[a-zA-Z][0-9a-zA-Z_]*").unwrap(); |
|||
// TODO keep this old one, it didn't work with port well tho
|
|||
// static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)").unwrap();
|
|||
static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._:-]+)").unwrap(); |
|||
static ref VALID_USERNAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_]{3,20}$").unwrap(); |
|||
static ref VALID_COMMUNITY_NAME_REGEX: Regex = Regex::new(r"^[a-z0-9_]{3,20}$").unwrap(); |
|||
pub static ref WEBFINGER_COMMUNITY_REGEX: Regex = Regex::new(&format!( |
|||
"^group:([a-z0-9_]{{3, 20}})@{}$", |
|||
Settings::get().hostname |
|||
)) |
|||
.unwrap(); |
|||
pub static ref WEBFINGER_USER_REGEX: Regex = Regex::new(&format!( |
|||
"^acct:([a-z0-9_]{{3, 20}})@{}$", |
|||
Settings::get().hostname |
|||
)) |
|||
.unwrap(); |
|||
pub static ref CACHE_CONTROL_REGEX: Regex = |
|||
Regex::new("^((text|image)/.+|application/javascript)$").unwrap(); |
|||
} |
|||
|
|||
pub struct Keypair { |
|||
pub private_key: String, |
|||
pub public_key: String, |
|||
} |
|||
|
|||
/// Generate the asymmetric keypair for ActivityPub HTTP signatures.
|
|||
pub fn generate_actor_keypair() -> Result<Keypair, Error> { |
|||
let rsa = Rsa::generate(2048)?; |
|||
let pkey = PKey::from_rsa(rsa)?; |
|||
let public_key = pkey.public_key_to_pem()?; |
|||
let private_key = pkey.private_key_to_pem_pkcs8()?; |
|||
let key_to_string = |key| match String::from_utf8(key) { |
|||
Ok(s) => Ok(s), |
|||
Err(e) => Err(Error::new( |
|||
ErrorKind::Other, |
|||
format!("Failed converting key to string: {}", e), |
|||
)), |
|||
}; |
|||
Ok(Keypair { |
|||
private_key: key_to_string(private_key)?, |
|||
public_key: key_to_string(public_key)?, |
|||
}) |
|||
} |
|||
|
|||
pub enum EndpointType { |
|||
Community, |
|||
User, |
|||
Post, |
|||
Comment, |
|||
PrivateMessage, |
|||
} |
|||
|
|||
pub fn get_apub_protocol_string() -> &'static str { |
|||
if Settings::get().federation.tls_enabled { |
|||
"https" |
|||
} else { |
|||
"http" |
|||
} |
|||
} |
|||
|
|||
/// Generates the ActivityPub ID for a given object type and ID.
|
|||
pub fn make_apub_endpoint(endpoint_type: EndpointType, name: &str) -> Url { |
|||
let point = match endpoint_type { |
|||
EndpointType::Community => "c", |
|||
EndpointType::User => "u", |
|||
EndpointType::Post => "post", |
|||
EndpointType::Comment => "comment", |
|||
EndpointType::PrivateMessage => "private_message", |
|||
}; |
|||
|
|||
Url::parse(&format!( |
|||
"{}://{}/{}/{}", |
|||
get_apub_protocol_string(), |
|||
Settings::get().hostname, |
|||
point, |
|||
name |
|||
)) |
|||
.unwrap() |
|||
} |
@ -0,0 +1,73 @@ |
|||
use diesel::{result::Error, PgConnection}; |
|||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, TokenData, Validation}; |
|||
use lemmy_db::{user::User_, Crud}; |
|||
use lemmy_utils::{is_email_regex, settings::Settings}; |
|||
use serde::{Deserialize, Serialize}; |
|||
|
|||
type Jwt = String; |
|||
|
|||
#[derive(Debug, Serialize, Deserialize)] |
|||
pub struct Claims { |
|||
pub id: i32, |
|||
pub username: String, |
|||
pub iss: String, |
|||
pub show_nsfw: bool, |
|||
pub theme: String, |
|||
pub default_sort_type: i16, |
|||
pub default_listing_type: i16, |
|||
pub lang: String, |
|||
pub avatar: Option<String>, |
|||
pub show_avatars: bool, |
|||
} |
|||
|
|||
impl Claims { |
|||
pub fn decode(jwt: &str) -> Result<TokenData<Claims>, jsonwebtoken::errors::Error> { |
|||
let v = Validation { |
|||
validate_exp: false, |
|||
..Validation::default() |
|||
}; |
|||
decode::<Claims>( |
|||
&jwt, |
|||
&DecodingKey::from_secret(Settings::get().jwt_secret.as_ref()), |
|||
&v, |
|||
) |
|||
} |
|||
|
|||
pub fn jwt(user: User_, hostname: String) -> Jwt { |
|||
let my_claims = Claims { |
|||
id: user.id, |
|||
username: user.name.to_owned(), |
|||
iss: hostname, |
|||
show_nsfw: user.show_nsfw, |
|||
theme: user.theme.to_owned(), |
|||
default_sort_type: user.default_sort_type, |
|||
default_listing_type: user.default_listing_type, |
|||
lang: user.lang.to_owned(), |
|||
avatar: user.avatar.to_owned(), |
|||
show_avatars: user.show_avatars.to_owned(), |
|||
}; |
|||
encode( |
|||
&Header::default(), |
|||
&my_claims, |
|||
&EncodingKey::from_secret(Settings::get().jwt_secret.as_ref()), |
|||
) |
|||
.unwrap() |
|||
} |
|||
|
|||
// TODO: move these into user?
|
|||
pub fn find_by_email_or_username( |
|||
conn: &PgConnection, |
|||
username_or_email: &str, |
|||
) -> Result<User_, Error> { |
|||
if is_email_regex(username_or_email) { |
|||
User_::find_by_email(conn, username_or_email) |
|||
} else { |
|||
User_::find_by_username(conn, username_or_email) |
|||
} |
|||
} |
|||
|
|||
pub fn find_by_jwt(conn: &PgConnection, jwt: &str) -> Result<User_, Error> { |
|||
let claims: Claims = Claims::decode(&jwt).expect("Invalid token").claims; |
|||
User_::read(&conn, claims.id) |
|||
} |
|||
} |