diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..8ad74f78d --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Normalize EOL for all files that Git considers text files. +* text=auto eol=lf diff --git a/.woodpecker.yml b/.woodpecker.yml index c2f6505c9..ec3436def 100644 --- a/.woodpecker.yml +++ b/.woodpecker.yml @@ -60,6 +60,9 @@ pipeline: -D clippy::unused_self -A clippy::uninlined_format_args -D clippy::get_first + -D clippy::explicit_into_iter_loop + -D clippy::explicit_iter_loop + -D clippy::needless_collect - cargo clippy --workspace --features console -- -D clippy::unwrap_used -D clippy::indexing_slicing diff --git a/Cargo.lock b/Cargo.lock index 9590e8f13..1fb6746e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,9 +14,9 @@ dependencies = [ [[package]] name = "activitypub_federation" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27540f6c4b72c91176610ed5279061a021387f972c7c6f42c41032b78a808267" +checksum = "4ab3ac148d9c0b4163a6d41040c17de7558a42224b9ecbd4e8f033aef6c254d9" dependencies = [ "activitystreams-kinds", "actix-web", @@ -122,10 +122,12 @@ dependencies = [ "ahash 0.8.3", "base64 0.21.2", "bitflags 1.3.2", + "brotli", "bytes", "bytestring", "derive_more", "encoding_rs", + "flate2", "futures-core", "h2", "http", @@ -143,6 +145,7 @@ dependencies = [ "tokio", "tokio-util 0.7.4", "tracing", + "zstd", ] [[package]] @@ -215,7 +218,7 @@ dependencies = [ "futures-util", "mio", "num_cpus", - "socket2", + "socket2 0.4.9", "tokio", "tracing", ] @@ -245,7 +248,7 @@ dependencies = [ "http", "log", "pin-project-lite", - "tokio-rustls", + "tokio-rustls 0.23.4", "tokio-util 0.7.4", "webpki-roots", ] @@ -297,7 +300,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "smallvec", - "socket2", + "socket2 0.4.9", "time 0.3.15", "url", ] @@ -361,6 +364,21 @@ dependencies = [ "memchr", ] +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -496,7 +514,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "rand 0.8.5", - "rustls", + "rustls 0.20.7", "serde", "serde_json", "serde_urlencoded", @@ -714,6 +732,27 @@ dependencies = [ "cipher", ] +[[package]] +name = "brotli" +version = "3.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + [[package]] name = "bumpalo" version = "3.11.1" @@ -766,6 +805,9 @@ name = "cc" version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" +dependencies = [ + "jobserver", +] [[package]] name = "cesu8" @@ -1328,6 +1370,7 @@ dependencies = [ "itoa", "pq-sys", "serde_json", + "uuid", ] [[package]] @@ -2262,7 +2305,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2470,6 +2513,15 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" +[[package]] +name = "jobserver" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +dependencies = [ + "libc", +] + [[package]] name = "js-sys" version = "0.3.60" @@ -2518,7 +2570,7 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "lemmy_api" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "actix-web", "anyhow", @@ -2526,6 +2578,7 @@ dependencies = [ "base64 0.13.1", "bcrypt", "captcha", + "chrono", "lemmy_api_common", "lemmy_db_schema", "lemmy_db_views", @@ -2541,9 +2594,8 @@ dependencies = [ [[package]] name = "lemmy_api_common" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ - "actix-rt", "actix-web", "anyhow", "chrono", @@ -2561,6 +2613,7 @@ dependencies = [ "rosetta-i18n", "serde", "serde_with", + "tokio", "tracing", "ts-rs", "url", @@ -2570,12 +2623,13 @@ dependencies = [ [[package]] name = "lemmy_api_crud" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "activitypub_federation", "actix-web", "async-trait", "bcrypt", + "chrono", "lemmy_api_common", "lemmy_db_schema", "lemmy_db_views", @@ -2584,15 +2638,15 @@ dependencies = [ "serde", "tracing", "url", + "uuid", "webmention", ] [[package]] name = "lemmy_apub" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "activitypub_federation", - "actix-rt", "actix-web", "anyhow", "assert-json-diff", @@ -2620,6 +2674,7 @@ dependencies = [ "sha2", "strum_macros", "task-local-extensions", + "tokio", "tracing", "url", "uuid", @@ -2627,7 +2682,7 @@ dependencies = [ [[package]] name = "lemmy_db_schema" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "activitypub_federation", "async-trait", @@ -2640,9 +2695,11 @@ dependencies = [ "diesel-derive-newtype", "diesel_ltree", "diesel_migrations", + "futures-util", "lemmy_utils", "once_cell", "regex", + "rustls 0.21.2", "serde", "serde_json", "serde_with", @@ -2651,15 +2708,18 @@ dependencies = [ "strum", "strum_macros", "tokio", + "tokio-postgres", + "tokio-postgres-rustls", "tracing", "ts-rs", "typed-builder", "url", + "uuid", ] [[package]] name = "lemmy_db_views" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "diesel", "diesel-async", @@ -2676,7 +2736,7 @@ dependencies = [ [[package]] name = "lemmy_db_views_actor" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "diesel", "diesel-async", @@ -2689,7 +2749,7 @@ dependencies = [ [[package]] name = "lemmy_db_views_moderator" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "diesel", "diesel-async", @@ -2701,7 +2761,7 @@ dependencies = [ [[package]] name = "lemmy_routes" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "activitypub_federation", "actix-web", @@ -2726,16 +2786,18 @@ dependencies = [ [[package]] name = "lemmy_server" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "activitypub_federation", "actix-cors", "actix-web", + "chrono", "clokwerk", "console-subscriber", "diesel", "diesel-async", "doku", + "futures-util", "lemmy_api", "lemmy_api_common", "lemmy_api_crud", @@ -2749,9 +2811,12 @@ dependencies = [ "reqwest", "reqwest-middleware", "reqwest-tracing", + "rustls 0.21.2", "serde", "serde_json", "tokio", + "tokio-postgres", + "tokio-postgres-rustls", "tracing", "tracing-actix-web 0.6.2", "tracing-error", @@ -2763,7 +2828,7 @@ dependencies = [ [[package]] name = "lemmy_utils" -version = "0.18.0" +version = "0.18.1-rc.4" dependencies = [ "actix-web", "anyhow", @@ -2820,7 +2885,7 @@ dependencies = [ "nom 7.1.1", "once_cell", "quoted_printable", - "socket2", + "socket2 0.4.9", ] [[package]] @@ -3932,11 +3997,11 @@ dependencies = [ [[package]] name = "postgres-protocol" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "878c6cbf956e03af9aa8204b407b9cbf47c072164800aa918c516cd4b056c50c" +checksum = "78b7fa9f396f51dffd61546fd8573ee20592287996568e6175ceb0f8699ad75d" dependencies = [ - "base64 0.13.1", + "base64 0.21.2", "byteorder", "bytes", "fallible-iterator", @@ -4495,6 +4560,28 @@ dependencies = [ "webpki", ] +[[package]] +name = "rustls" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-webpki" +version = "0.100.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.9" @@ -4859,6 +4946,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "spin" version = "0.5.2" @@ -5304,7 +5401,7 @@ dependencies = [ "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.4.9", "tokio-macros", "tracing", "windows-sys 0.48.0", @@ -5343,9 +5440,9 @@ dependencies = [ [[package]] name = "tokio-postgres" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29a12c1b3e0704ae7dfc25562629798b29c72e6b1d0a681b6f29ab4ae5e7f7bf" +checksum = "6e89f6234aa8fd43779746012fcf53603cdb91fdd8399aa0de868c2d56b6dde1" dependencies = [ "async-trait", "byteorder", @@ -5360,22 +5457,46 @@ dependencies = [ "pin-project-lite", "postgres-protocol", "postgres-types", - "socket2", + "socket2 0.5.3", "tokio", "tokio-util 0.7.4", ] +[[package]] +name = "tokio-postgres-rustls" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd5831152cb0d3f79ef5523b357319ba154795d64c7078b2daa95a803b54057f" +dependencies = [ + "futures", + "ring", + "rustls 0.21.2", + "tokio", + "tokio-postgres", + "tokio-rustls 0.24.1", +] + [[package]] name = "tokio-rustls" version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ - "rustls", + "rustls 0.20.7", "tokio", "webpki", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.2", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.11" @@ -6460,3 +6581,33 @@ name = "zeroize" version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" + +[[package]] +name = "zstd" +version = "0.12.3+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "6.0.5+zstd.1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d56d9e60b4b1758206c238a10165fbcae3ca37b01744e394c463463f6529d23b" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.8+zstd.1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +dependencies = [ + "cc", + "libc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml index 430deb082..6fbd91d1b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace.package] -version = "0.18.0" +version = "0.18.1-rc.4" edition = "2021" description = "A link aggregator for the fediverse" license = "AGPL-3.0" @@ -49,23 +49,23 @@ members = [ ] [workspace.dependencies] -lemmy_api = { version = "=0.18.0", path = "./crates/api" } -lemmy_api_crud = { version = "=0.18.0", path = "./crates/api_crud" } -lemmy_apub = { version = "=0.18.0", path = "./crates/apub" } -lemmy_utils = { version = "=0.18.0", path = "./crates/utils" } -lemmy_db_schema = { version = "=0.18.0", path = "./crates/db_schema" } -lemmy_api_common = { version = "=0.18.0", path = "./crates/api_common" } -lemmy_routes = { version = "=0.18.0", path = "./crates/routes" } -lemmy_db_views = { version = "=0.18.0", path = "./crates/db_views" } -lemmy_db_views_actor = { version = "=0.18.0", path = "./crates/db_views_actor" } -lemmy_db_views_moderator = { version = "=0.18.0", path = "./crates/db_views_moderator" } +lemmy_api = { version = "=0.18.1-rc.4", path = "./crates/api" } +lemmy_api_crud = { version = "=0.18.1-rc.4", path = "./crates/api_crud" } +lemmy_apub = { version = "=0.18.1-rc.4", path = "./crates/apub" } +lemmy_utils = { version = "=0.18.1-rc.4", path = "./crates/utils" } +lemmy_db_schema = { version = "=0.18.1-rc.4", path = "./crates/db_schema" } +lemmy_api_common = { version = "=0.18.1-rc.4", path = "./crates/api_common" } +lemmy_routes = { version = "=0.18.1-rc.4", path = "./crates/routes" } +lemmy_db_views = { version = "=0.18.1-rc.4", path = "./crates/db_views" } +lemmy_db_views_actor = { version = "=0.18.1-rc.4", path = "./crates/db_views_actor" } +lemmy_db_views_moderator = { version = "=0.18.1-rc.4", path = "./crates/db_views_moderator" } activitypub_federation = { version = "0.4.4", default-features = false, features = ["actix-web"] } diesel = "2.1.0" diesel_migrations = "2.1.0" diesel-async = "0.3.1" serde = { version = "1.0.164", features = ["derive"] } serde_with = "1.14.0" -actix-web = { version = "4.3.1", default-features = false, features = ["macros", "rustls"] } +actix-web = { version = "4.3.1", default-features = false, features = ["macros", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"] } tracing = "0.1.37" tracing-actix-web = { version = "0.6.2", default-features = false } tracing-error = "0.2.0" @@ -89,7 +89,7 @@ anyhow = "1.0.71" diesel_ltree = "0.3.0" typed-builder = "0.10.0" serial_test = "0.9.0" -tokio = "1.28.2" +tokio = { version = "1.28.2", features = ["full"] } sha2 = "0.10.6" regex = "1.8.4" once_cell = "1.18.0" @@ -100,13 +100,16 @@ strum_macros = "0.24.3" itertools = "0.10.5" futures = "0.3.28" http = "0.2.9" -actix-rt = { version = "2.8.0", default-features = false } percent-encoding = "2.3.0" rosetta-i18n = "0.1.2" rand = "0.8.5" opentelemetry = { version = "0.17.0", features = ["rt-tokio"] } tracing-opentelemetry = { version = "0.17.4" } ts-rs = { version = "6.2", features = ["serde-compat", "format", "chrono-impl"] } +rustls = { version ="0.21.2", features = ["dangerous_configuration"]} +futures-util = "0.3.28" +tokio-postgres = "0.7.8" +tokio-postgres-rustls = "0.10.0" [dependencies] lemmy_api = { workspace = true } @@ -140,3 +143,8 @@ opentelemetry-otlp = { version = "0.10.0", optional = true } pict-rs = { version = "0.4.0-rc.3", optional = true } tokio.workspace = true actix-cors = "0.6.4" +rustls = { workspace = true } +futures-util = { workspace = true } +tokio-postgres = { workspace = true } +tokio-postgres-rustls = { workspace = true } +chrono = { workspace = true } \ No newline at end of file diff --git a/README.md b/README.md index f759c1cde..240bde516 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,7 @@ Each Lemmy server can set its own moderation policy; appointing site-wide admins - [Jerboa - A native Android app made by Lemmy's developers](https://github.com/dessalines/jerboa) - [Mlem - A Lemmy client for iOS](https://github.com/buresdv/Mlem) - [Lemoa - A Gtk client for Lemmy on Linux](https://github.com/lemmy-gtk/lemoa) +- [Liftoff - A Lemmy for Windows , Linux and Android ](https://github.com/liftoff-app/liftoff) ### Libraries diff --git a/config/defaults.hjson b/config/defaults.hjson index 4c38ddd45..6032f8fc9 100644 --- a/config/defaults.hjson +++ b/config/defaults.hjson @@ -76,4 +76,8 @@ port: 8536 # Whether the site is available over TLS. Needs to be true for federation to work. tls_enabled: true + # The number of activitypub federation workers that can be in-flight concurrently + worker_count: 0 + # The number of activitypub federation retry workers that can be in-flight concurrently + retry_count: 0 } diff --git a/crates/api/Cargo.toml b/crates/api/Cargo.toml index 2488f2c2c..ca792809b 100644 --- a/crates/api/Cargo.toml +++ b/crates/api/Cargo.toml @@ -29,6 +29,7 @@ async-trait = { workspace = true } captcha = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } +chrono = { workspace = true } [dev-dependencies] serial_test = { workspace = true } diff --git a/crates/api/src/community/ban.rs b/crates/api/src/community/ban.rs index d1666d48d..93a588641 100644 --- a/crates/api/src/community/ban.rs +++ b/crates/api/src/community/ban.rs @@ -47,7 +47,7 @@ impl Perform for BanFromCommunity { community_id, ) .await?; - is_valid_body_field(&data.reason)?; + is_valid_body_field(&data.reason, false)?; let community_user_ban_form = CommunityPersonBanForm { community_id: data.community_id, diff --git a/crates/api/src/lib.rs b/crates/api/src/lib.rs index bd78a46e1..632e2c3bd 100644 --- a/crates/api/src/lib.rs +++ b/crates/api/src/lib.rs @@ -1,4 +1,5 @@ use actix_web::web::Data; +use captcha::Captcha; use lemmy_api_common::{context::LemmyContext, utils::local_site_to_slur_regex}; use lemmy_db_schema::source::local_site::LocalSite; use lemmy_utils::{error::LemmyError, utils::slurs::check_slurs}; @@ -20,6 +21,21 @@ pub trait Perform { async fn perform(&self, context: &Data) -> Result; } +/// Converts the captcha to a base64 encoded wav audio file +pub(crate) fn captcha_as_wav_base64(captcha: &Captcha) -> String { + let letters = captcha.as_wav(); + + let mut concat_letters: Vec = Vec::new(); + + for letter in letters { + let bytes = letter.unwrap_or_default(); + concat_letters.extend(bytes); + } + + // Convert to base64 + base64::encode(concat_letters) +} + /// Check size of report and remove whitespace pub(crate) fn check_report_reason(reason: &str, local_site: &LocalSite) -> Result<(), LemmyError> { let slur_regex = &local_site_to_slur_regex(local_site); diff --git a/crates/api/src/local_user/ban_person.rs b/crates/api/src/local_user/ban_person.rs index d68aeed23..50e3ca550 100644 --- a/crates/api/src/local_user/ban_person.rs +++ b/crates/api/src/local_user/ban_person.rs @@ -30,7 +30,7 @@ impl Perform for BanPerson { // Make sure user is an admin is_admin(&local_user_view)?; - is_valid_body_field(&data.reason)?; + is_valid_body_field(&data.reason, false)?; let ban = data.ban; let banned_person_id = data.person_id; diff --git a/crates/api/src/local_user/get_captcha.rs b/crates/api/src/local_user/get_captcha.rs new file mode 100644 index 000000000..133044248 --- /dev/null +++ b/crates/api/src/local_user/get_captcha.rs @@ -0,0 +1,50 @@ +use crate::{captcha_as_wav_base64, Perform}; +use actix_web::web::Data; +use captcha::{gen, Difficulty}; +use lemmy_api_common::{ + context::LemmyContext, + person::{CaptchaResponse, GetCaptcha, GetCaptchaResponse}, +}; +use lemmy_db_schema::source::{ + captcha_answer::{CaptchaAnswer, CaptchaAnswerForm}, + local_site::LocalSite, +}; +use lemmy_utils::error::LemmyError; + +#[async_trait::async_trait(?Send)] +impl Perform for GetCaptcha { + type Response = GetCaptchaResponse; + + #[tracing::instrument(skip(context))] + async fn perform(&self, context: &Data) -> Result { + let local_site = LocalSite::read(context.pool()).await?; + + if !local_site.captcha_enabled { + return Ok(GetCaptchaResponse { ok: None }); + } + + let captcha = gen(match local_site.captcha_difficulty.as_str() { + "easy" => Difficulty::Easy, + "hard" => Difficulty::Hard, + _ => Difficulty::Medium, + }); + + let answer = captcha.chars_as_string(); + + let png = captcha.as_base64().expect("failed to generate captcha"); + + let wav = captcha_as_wav_base64(&captcha); + + let captcha_form: CaptchaAnswerForm = CaptchaAnswerForm { answer }; + // Stores the captcha item in the db + let captcha = CaptchaAnswer::insert(context.pool(), &captcha_form).await?; + + Ok(GetCaptchaResponse { + ok: Some(CaptchaResponse { + png, + wav, + uuid: captcha.uuid.to_string(), + }), + }) + } +} diff --git a/crates/api/src/local_user/mod.rs b/crates/api/src/local_user/mod.rs index 9244f825d..3a92beda5 100644 --- a/crates/api/src/local_user/mod.rs +++ b/crates/api/src/local_user/mod.rs @@ -3,6 +3,7 @@ mod ban_person; mod block; mod change_password; mod change_password_after_reset; +mod get_captcha; mod list_banned; mod login; mod notifications; diff --git a/crates/api/src/local_user/reset_password.rs b/crates/api/src/local_user/reset_password.rs index cdc1f3975..cf65f94eb 100644 --- a/crates/api/src/local_user/reset_password.rs +++ b/crates/api/src/local_user/reset_password.rs @@ -5,6 +5,7 @@ use lemmy_api_common::{ person::{PasswordReset, PasswordResetResponse}, utils::send_password_reset_email, }; +use lemmy_db_schema::source::password_reset_request::PasswordResetRequest; use lemmy_db_views::structs::LocalUserView; use lemmy_utils::error::LemmyError; @@ -25,6 +26,16 @@ impl Perform for PasswordReset { .await .map_err(|e| LemmyError::from_error_message(e, "couldnt_find_that_username_or_email"))?; + // Check for too many attempts (to limit potential abuse) + let recent_resets_count = PasswordResetRequest::get_recent_password_resets_count( + context.pool(), + local_user_view.local_user.id, + ) + .await?; + if recent_resets_count >= 3 { + return Err(LemmyError::from_message("password_reset_limit_reached")); + } + // Email the pure token to the user. send_password_reset_email( &local_user_view, diff --git a/crates/api/src/site/purge/comment.rs b/crates/api/src/site/purge/comment.rs index c5c3285a3..444f80f20 100644 --- a/crates/api/src/site/purge/comment.rs +++ b/crates/api/src/site/purge/comment.rs @@ -3,7 +3,7 @@ use actix_web::web::Data; use lemmy_api_common::{ context::LemmyContext, site::{PurgeComment, PurgeItemResponse}, - utils::{is_top_admin, local_user_view_from_jwt}, + utils::{is_admin, local_user_view_from_jwt}, }; use lemmy_db_schema::{ source::{ @@ -23,8 +23,8 @@ impl Perform for PurgeComment { let data: &Self = self; let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - // Only let the top admin purge an item - is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?; + // Only let admin purge an item + is_admin(&local_user_view)?; let comment_id = data.comment_id; diff --git a/crates/api/src/site/purge/community.rs b/crates/api/src/site/purge/community.rs index 0f05bd3c6..6e81b59e5 100644 --- a/crates/api/src/site/purge/community.rs +++ b/crates/api/src/site/purge/community.rs @@ -4,7 +4,7 @@ use lemmy_api_common::{ context::LemmyContext, request::purge_image_from_pictrs, site::{PurgeCommunity, PurgeItemResponse}, - utils::{is_top_admin, local_user_view_from_jwt, purge_image_posts_for_community}, + utils::{is_admin, local_user_view_from_jwt, purge_image_posts_for_community}, }; use lemmy_db_schema::{ source::{ @@ -24,8 +24,8 @@ impl Perform for PurgeCommunity { let data: &Self = self; let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - // Only let the top admin purge an item - is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?; + // Only let admin purge an item + is_admin(&local_user_view)?; let community_id = data.community_id; diff --git a/crates/api/src/site/purge/person.rs b/crates/api/src/site/purge/person.rs index 06921b070..923052a90 100644 --- a/crates/api/src/site/purge/person.rs +++ b/crates/api/src/site/purge/person.rs @@ -4,7 +4,7 @@ use lemmy_api_common::{ context::LemmyContext, request::purge_image_from_pictrs, site::{PurgeItemResponse, PurgePerson}, - utils::{is_top_admin, local_user_view_from_jwt, purge_image_posts_for_person}, + utils::{is_admin, local_user_view_from_jwt, purge_image_posts_for_person}, }; use lemmy_db_schema::{ source::{ @@ -24,8 +24,8 @@ impl Perform for PurgePerson { let data: &Self = self; let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - // Only let the top admin purge an item - is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?; + // Only let admin purge an item + is_admin(&local_user_view)?; // Read the person to get their images let person_id = data.person_id; diff --git a/crates/api/src/site/purge/post.rs b/crates/api/src/site/purge/post.rs index ee3036bae..7661a9c24 100644 --- a/crates/api/src/site/purge/post.rs +++ b/crates/api/src/site/purge/post.rs @@ -4,7 +4,7 @@ use lemmy_api_common::{ context::LemmyContext, request::purge_image_from_pictrs, site::{PurgeItemResponse, PurgePost}, - utils::{is_top_admin, local_user_view_from_jwt}, + utils::{is_admin, local_user_view_from_jwt}, }; use lemmy_db_schema::{ source::{ @@ -24,8 +24,8 @@ impl Perform for PurgePost { let data: &Self = self; let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - // Only let the top admin purge an item - is_top_admin(&mut *context.conn().await?, local_user_view.person.id).await?; + // Only let admin purge an item + is_admin(&local_user_view)?; let post_id = data.post_id; diff --git a/crates/api_common/Cargo.toml b/crates/api_common/Cargo.toml index 46045d805..a9b2bf19b 100644 --- a/crates/api_common/Cargo.toml +++ b/crates/api_common/Cargo.toml @@ -33,12 +33,12 @@ reqwest-middleware = { workspace = true, optional = true } regex = { workspace = true } rosetta-i18n = { workspace = true, optional = true } percent-encoding = { workspace = true, optional = true } -webpage = { version = "1.6.0", default-features = false, features = ["serde"], optional = true } +webpage = { version = "1.6", default-features = false, features = ["serde"], optional = true } encoding = { version = "0.2.33", optional = true } anyhow = { workspace = true } futures = { workspace = true } uuid = { workspace = true } -actix-rt = { workspace = true } +tokio = { workspace = true } reqwest = { workspace = true } ts-rs = { workspace = true, optional = true } actix-web = { workspace = true } diff --git a/crates/api_common/README.md b/crates/api_common/README.md index bf916bf0c..f6a16c53c 100644 --- a/crates/api_common/README.md +++ b/crates/api_common/README.md @@ -24,3 +24,10 @@ As you can see, each API endpoint needs a parameter type ( GetPosts), path (/pos For a real example of a Lemmy API client, look at [lemmyBB](https://github.com/LemmyNet/lemmyBB/tree/main/src/api). Lemmy also provides a websocket API. You can find the full websocket code in [this file](https://github.com/LemmyNet/lemmy/blob/main/src/api_routes_websocket.rs). + +## Generate TypeScript bindings + +TypeScript bindings (API types) can be generated by running `cargo test --features full`. +The ts files be generated into a `bindings` folder. + +This crate uses [`ts_rs`](https://docs.rs/ts-rs/6.2.1/ts_rs/#traits) macros `derive(TS)` and `ts(export)` to attribute types for binding generating. diff --git a/crates/api_common/src/build_response.rs b/crates/api_common/src/build_response.rs index 0eebe98cb..61294c53b 100644 --- a/crates/api_common/src/build_response.rs +++ b/crates/api_common/src/build_response.rs @@ -103,7 +103,6 @@ pub async fn send_local_notifs( for mention in mentions .iter() .filter(|m| m.is_local(&context.settings().hostname) && m.name.ne(&person.name)) - .collect::>() { let mention_name = mention.name.clone(); let user_view = LocalUserView::read_from_name(&mut *context.conn().await?, &mention_name).await; diff --git a/crates/api_common/src/community.rs b/crates/api_common/src/community.rs index cb92d4c2e..ff6ed1271 100644 --- a/crates/api_common/src/community.rs +++ b/crates/api_common/src/community.rs @@ -76,6 +76,7 @@ pub struct CommunityResponse { pub struct ListCommunities { pub type_: Option, pub sort: Option, + pub show_nsfw: Option, pub page: Option, pub limit: Option, pub auth: Option>, diff --git a/crates/api_common/src/request.rs b/crates/api_common/src/request.rs index c6f71b868..9f7f9db59 100644 --- a/crates/api_common/src/request.rs +++ b/crates/api_common/src/request.rs @@ -27,12 +27,12 @@ pub async fn fetch_site_metadata( // https://github.com/LemmyNet/lemmy/issues/1964 let html_bytes = response.bytes().await.map_err(LemmyError::from)?.to_vec(); - let tags = html_to_site_metadata(&html_bytes)?; + let tags = html_to_site_metadata(&html_bytes, url)?; Ok(tags) } -fn html_to_site_metadata(html_bytes: &[u8]) -> Result { +fn html_to_site_metadata(html_bytes: &[u8], url: &Url) -> Result { let html = String::from_utf8_lossy(html_bytes); // Make sure the first line is doctype html @@ -81,12 +81,14 @@ fn html_to_site_metadata(html_bytes: &[u8]) -> Result .opengraph .images .first() - .and_then(|ogo| Url::parse(&ogo.url).ok()); + // join also works if the target URL is absolute + .and_then(|ogo| url.join(&ogo.url).ok()); let og_embed_url = page .opengraph .videos .first() - .and_then(|v| Url::parse(&v.url).ok()); + // join also works if the target URL is absolute + .and_then(|v| url.join(&v.url).ok()); Ok(SiteMetadata { title: og_title.or(page_title), @@ -266,12 +268,17 @@ pub fn build_user_agent(settings: &Settings) -> String { #[cfg(test)] mod tests { - use crate::request::{build_user_agent, fetch_site_metadata, SiteMetadata}; + use crate::request::{ + build_user_agent, + fetch_site_metadata, + html_to_site_metadata, + SiteMetadata, + }; use lemmy_utils::settings::SETTINGS; use url::Url; // These helped with testing - #[actix_rt::test] + #[tokio::test] async fn test_site_metadata() { let settings = &SETTINGS.clone(); let client = reqwest::Client::builder() @@ -305,4 +312,46 @@ mod tests { // let res_other = fetch_pictshare("https://upload.wikimedia.org/wikipedia/en/2/27/The_Mandalorian_logo.jpgaoeu"); // assert!(res_other.is_err()); // } + + #[test] + fn test_resolve_image_url() { + // url that lists the opengraph fields + let url = Url::parse("https://example.com/one/two.html").unwrap(); + + // root relative url + let html_bytes = b""; + let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + assert_eq!( + metadata.image, + Some(Url::parse("https://example.com/image.jpg").unwrap().into()) + ); + + // base relative url + let html_bytes = b""; + let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + assert_eq!( + metadata.image, + Some( + Url::parse("https://example.com/one/image.jpg") + .unwrap() + .into() + ) + ); + + // absolute url + let html_bytes = b""; + let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + assert_eq!( + metadata.image, + Some(Url::parse("https://cdn.host.com/image.jpg").unwrap().into()) + ); + + // protocol relative url + let html_bytes = b""; + let metadata = html_to_site_metadata(html_bytes, &url).expect("Unable to parse metadata"); + assert_eq!( + metadata.image, + Some(Url::parse("https://example.com/image.jpg").unwrap().into()) + ); + } } diff --git a/crates/api_common/src/site.rs b/crates/api_common/src/site.rs index 4d488ec1b..865acc0dc 100644 --- a/crates/api_common/src/site.rs +++ b/crates/api_common/src/site.rs @@ -177,7 +177,6 @@ pub struct CreateSite { pub rate_limit_search_per_second: Option, pub federation_enabled: Option, pub federation_debug: Option, - pub federation_worker_count: Option, pub captcha_enabled: Option, pub captcha_difficulty: Option, pub allowed_instances: Option>, @@ -250,8 +249,6 @@ pub struct EditSite { pub federation_enabled: Option, /// Enables federation debugging. pub federation_debug: Option, - /// The number of federation workers. - pub federation_worker_count: Option, /// Whether to enable captchas for signups. pub captcha_enabled: Option, /// The captcha difficulty. Can be easy, medium, or hard diff --git a/crates/api_common/src/utils.rs b/crates/api_common/src/utils.rs index cba0fb297..f5ffe7d0c 100644 --- a/crates/api_common/src/utils.rs +++ b/crates/api_common/src/utils.rs @@ -32,7 +32,6 @@ use lemmy_db_views_actor::structs::{ CommunityModeratorView, CommunityPersonBanView, CommunityView, - PersonView, }; use lemmy_utils::{ claims::Claims, @@ -79,18 +78,6 @@ pub async fn is_mod_or_admin_opt( } } -pub async fn is_top_admin(conn: &mut DbConn, person_id: PersonId) -> Result<(), LemmyError> { - let admins = PersonView::admins(conn).await?; - let top_admin = admins - .first() - .ok_or_else(|| LemmyError::from_message("no admins"))?; - - if top_admin.person.id != person_id { - return Err(LemmyError::from_message("not_top_admin")); - } - Ok(()) -} - pub fn is_admin(local_user_view: &LocalUserView) -> Result<(), LemmyError> { if !local_user_view.person.admin { return Err(LemmyError::from_message("not_an_admin")); @@ -316,15 +303,6 @@ pub fn password_length_check(pass: &str) -> Result<(), LemmyError> { } } -/// Checks the site description length -pub fn site_description_length_check(description: &str) -> Result<(), LemmyError> { - if description.len() > 150 { - Err(LemmyError::from_message("site_description_length_overflow")) - } else { - Ok(()) - } -} - /// Checks for a honeypot. If this field is filled, fail the rest of the function pub fn honeypot_check(honeypot: &Option) -> Result<(), LemmyError> { if honeypot.is_some() && honeypot != &Some(String::new()) { diff --git a/crates/api_crud/Cargo.toml b/crates/api_crud/Cargo.toml index 1fb1e5a66..a4dfb4add 100644 --- a/crates/api_crud/Cargo.toml +++ b/crates/api_crud/Cargo.toml @@ -22,3 +22,5 @@ tracing = { workspace = true } url = { workspace = true } async-trait = { workspace = true } webmention = "0.4.0" +chrono = { workspace = true } +uuid = { workspace = true } \ No newline at end of file diff --git a/crates/api_crud/src/comment/create.rs b/crates/api_crud/src/comment/create.rs index 95c85a6da..79ed5be2b 100644 --- a/crates/api_crud/src/comment/create.rs +++ b/crates/api_crud/src/comment/create.rs @@ -49,7 +49,7 @@ impl PerformCrud for CreateComment { &data.content.clone(), &local_site_to_slur_regex(&local_site), ); - is_valid_body_field(&Some(content_slurs_removed.clone()))?; + is_valid_body_field(&Some(content_slurs_removed.clone()), false)?; // Check for a community ban let post_id = data.post_id; @@ -207,7 +207,7 @@ impl PerformCrud for CreateComment { pub fn check_comment_depth(comment: &Comment) -> Result<(), LemmyError> { let path = &comment.path.0; - let length = path.split('.').collect::>().len(); + let length = path.split('.').count(); if length > MAX_COMMENT_DEPTH_LIMIT { Err(LemmyError::from_message("max_comment_depth_reached")) } else { diff --git a/crates/api_crud/src/comment/update.rs b/crates/api_crud/src/comment/update.rs index e4cc5f4f1..d0baa8b36 100644 --- a/crates/api_crud/src/comment/update.rs +++ b/crates/api_crud/src/comment/update.rs @@ -64,7 +64,7 @@ impl PerformCrud for EditComment { .as_ref() .map(|c| remove_slurs(c, &local_site_to_slur_regex(&local_site))); - is_valid_body_field(&content_slurs_removed)?; + is_valid_body_field(&content_slurs_removed, false)?; let comment_id = data.comment_id; let form = CommentUpdateForm::builder() diff --git a/crates/api_crud/src/community/create.rs b/crates/api_crud/src/community/create.rs index c32cfd80c..ed441c8fe 100644 --- a/crates/api_crud/src/community/create.rs +++ b/crates/api_crud/src/community/create.rs @@ -67,7 +67,7 @@ impl PerformCrud for CreateCommunity { check_slurs_opt(&data.description, &slur_regex)?; is_valid_actor_name(&data.name, local_site.actor_name_max_length as usize)?; - is_valid_body_field(&data.description)?; + is_valid_body_field(&data.description, false)?; // Double check for duplicate community actor_ids let community_actor_id = generate_local_apub_endpoint( diff --git a/crates/api_crud/src/community/list.rs b/crates/api_crud/src/community/list.rs index b7ec5e4b3..6ab31fd0c 100644 --- a/crates/api_crud/src/community/list.rs +++ b/crates/api_crud/src/community/list.rs @@ -27,6 +27,7 @@ impl PerformCrud for ListCommunities { let sort = data.sort; let listing_type = data.type_; + let show_nsfw = data.show_nsfw; let page = data.page; let limit = data.limit; let local_user = local_user_view.map(|l| l.local_user); @@ -34,6 +35,7 @@ impl PerformCrud for ListCommunities { let communities = CommunityQuery::builder() .conn(&mut conn) .listing_type(listing_type) + .show_nsfw(show_nsfw) .sort(sort) .local_user(local_user.as_ref()) .page(page) diff --git a/crates/api_crud/src/community/update.rs b/crates/api_crud/src/community/update.rs index 06665ed63..9ffe082f1 100644 --- a/crates/api_crud/src/community/update.rs +++ b/crates/api_crud/src/community/update.rs @@ -39,7 +39,7 @@ impl PerformCrud for EditCommunity { let slur_regex = local_site_to_slur_regex(&local_site); check_slurs_opt(&data.title, &slur_regex)?; check_slurs_opt(&data.description, &slur_regex)?; - is_valid_body_field(&data.description)?; + is_valid_body_field(&data.description, false)?; // Verify its a mod (only mods can edit it) let community_id = data.community_id; diff --git a/crates/api_crud/src/post/create.rs b/crates/api_crud/src/post/create.rs index 28cad92be..b1eaf0925 100644 --- a/crates/api_crud/src/post/create.rs +++ b/crates/api_crud/src/post/create.rs @@ -57,7 +57,7 @@ impl PerformCrud for CreatePost { let url = data_url.map(clean_url_params).map(Into::into); // TODO no good way to handle a "clear" is_valid_post_title(&data.name)?; - is_valid_body_field(&data.body)?; + is_valid_body_field(&data.body, true)?; check_community_ban( local_user_view.person.id, diff --git a/crates/api_crud/src/post/update.rs b/crates/api_crud/src/post/update.rs index dae70f8b7..839235271 100644 --- a/crates/api_crud/src/post/update.rs +++ b/crates/api_crud/src/post/update.rs @@ -49,7 +49,7 @@ impl PerformCrud for EditPost { is_valid_post_title(name)?; } - is_valid_body_field(&data.body)?; + is_valid_body_field(&data.body, true)?; let post_id = data.post_id; let orig_post = Post::read(&mut *context.conn().await?, post_id).await?; diff --git a/crates/api_crud/src/private_message/create.rs b/crates/api_crud/src/private_message/create.rs index 77cc34a7a..a8580906a 100644 --- a/crates/api_crud/src/private_message/create.rs +++ b/crates/api_crud/src/private_message/create.rs @@ -43,7 +43,7 @@ impl PerformCrud for CreatePrivateMessage { &data.content.clone(), &local_site_to_slur_regex(&local_site), ); - is_valid_body_field(&Some(content_slurs_removed.clone()))?; + is_valid_body_field(&Some(content_slurs_removed.clone()), false)?; check_person_block( local_user_view.person.id, diff --git a/crates/api_crud/src/private_message/update.rs b/crates/api_crud/src/private_message/update.rs index 90ea5fb39..bc2e834c4 100644 --- a/crates/api_crud/src/private_message/update.rs +++ b/crates/api_crud/src/private_message/update.rs @@ -42,7 +42,7 @@ impl PerformCrud for EditPrivateMessage { // Doing the update let content_slurs_removed = remove_slurs(&data.content, &local_site_to_slur_regex(&local_site)); - is_valid_body_field(&Some(content_slurs_removed.clone()))?; + is_valid_body_field(&Some(content_slurs_removed.clone()), false)?; let private_message_id = data.private_message_id; PrivateMessage::update( diff --git a/crates/api_crud/src/site/create.rs b/crates/api_crud/src/site/create.rs index 91a531ea9..abd3af808 100644 --- a/crates/api_crud/src/site/create.rs +++ b/crates/api_crud/src/site/create.rs @@ -1,4 +1,7 @@ -use crate::{site::check_application_question, PerformCrud}; +use crate::{ + site::{application_question_check, site_default_post_listing_type_check}, + PerformCrud, +}; use activitypub_federation::http_signatures::generate_actor_keypair; use actix_web::web::Data; use lemmy_api_common::{ @@ -8,9 +11,7 @@ use lemmy_api_common::{ generate_site_inbox_url, is_admin, local_site_rate_limit_to_rate_limit_config, - local_site_to_slur_regex, local_user_view_from_jwt, - site_description_length_check, }, }; use lemmy_db_schema::{ @@ -26,10 +27,16 @@ use lemmy_db_schema::{ }; use lemmy_db_views::structs::SiteView; use lemmy_utils::{ - error::LemmyError, + error::{LemmyError, LemmyResult}, utils::{ slurs::{check_slurs, check_slurs_opt}, - validation::{check_site_visibility_valid, is_valid_body_field}, + validation::{ + build_and_check_regex, + check_site_visibility_valid, + is_valid_body_field, + site_description_length_check, + site_name_length_check, + }, }, }; use url::Url; @@ -42,56 +49,23 @@ impl PerformCrud for CreateSite { async fn perform(&self, context: &Data) -> Result { let data: &CreateSite = self; + let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; let local_site = LocalSite::read(&mut *context.conn().await?).await?; - if local_site.site_setup { - return Err(LemmyError::from_message("site_already_exists")); - }; - - let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - - // Make sure user is an admin + // Make sure user is an admin; other types of users should not create site data... is_admin(&local_user_view)?; - check_site_visibility_valid( - local_site.private_instance, - local_site.federation_enabled, - &data.private_instance, - &data.federation_enabled, - )?; - - let sidebar = diesel_option_overwrite(&data.sidebar); - let description = diesel_option_overwrite(&data.description); - let icon = diesel_option_overwrite_to_url(&data.icon)?; - let banner = diesel_option_overwrite_to_url(&data.banner)?; - - let slur_regex = local_site_to_slur_regex(&local_site); - check_slurs(&data.name, &slur_regex)?; - check_slurs_opt(&data.description, &slur_regex)?; - - if let Some(Some(desc)) = &description { - site_description_length_check(desc)?; - } - - is_valid_body_field(&data.sidebar)?; - - let application_question = diesel_option_overwrite(&data.application_question); - check_application_question( - &application_question, - data - .registration_mode - .unwrap_or(local_site.registration_mode), - )?; + validate_create_payload(&local_site, data)?; let actor_id: DbUrl = Url::parse(&context.settings().get_protocol_and_hostname())?.into(); let inbox_url = Some(generate_site_inbox_url(&actor_id)?); let keypair = generate_actor_keypair()?; let site_form = SiteUpdateForm::builder() .name(Some(data.name.clone())) - .sidebar(sidebar) - .description(description) - .icon(icon) - .banner(banner) + .sidebar(diesel_option_overwrite(&data.sidebar)) + .description(diesel_option_overwrite(&data.description)) + .icon(diesel_option_overwrite_to_url(&data.icon)?) + .banner(diesel_option_overwrite_to_url(&data.banner)?) .actor_id(Some(actor_id)) .last_refreshed_at(Some(naive_now())) .inbox_url(inbox_url) @@ -111,7 +85,7 @@ impl PerformCrud for CreateSite { .enable_nsfw(data.enable_nsfw) .community_creation_admin_only(data.community_creation_admin_only) .require_email_verification(data.require_email_verification) - .application_question(application_question) + .application_question(diesel_option_overwrite(&data.application_question)) .private_instance(data.private_instance) .default_theme(data.default_theme.clone()) .default_post_listing_type(data.default_post_listing_type) @@ -122,7 +96,6 @@ impl PerformCrud for CreateSite { .slur_filter_regex(diesel_option_overwrite(&data.slur_filter_regex)) .actor_name_max_length(data.actor_name_max_length) .federation_enabled(data.federation_enabled) - .federation_worker_count(data.federation_worker_count) .captcha_enabled(data.captcha_enabled) .captcha_difficulty(data.captcha_difficulty.clone()) .build(); @@ -165,3 +138,449 @@ impl PerformCrud for CreateSite { }) } } + +fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> LemmyResult<()> { + // Make sure the site hasn't already been set up... + if local_site.site_setup { + return Err(LemmyError::from_message("site_already_exists")); + }; + + // Check that the slur regex compiles, and returns the regex if valid... + // Prioritize using new slur regex from the request; if not provided, use the existing regex. + let slur_regex = build_and_check_regex( + &create_site + .slur_filter_regex + .as_deref() + .or(local_site.slur_filter_regex.as_deref()), + )?; + + site_name_length_check(&create_site.name)?; + check_slurs(&create_site.name, &slur_regex)?; + + if let Some(desc) = &create_site.description { + site_description_length_check(desc)?; + check_slurs_opt(&create_site.description, &slur_regex)?; + } + + site_default_post_listing_type_check(&create_site.default_post_listing_type)?; + + check_site_visibility_valid( + local_site.private_instance, + local_site.federation_enabled, + &create_site.private_instance, + &create_site.federation_enabled, + )?; + + // Ensure that the sidebar has fewer than the max num characters... + is_valid_body_field(&create_site.sidebar, false)?; + + application_question_check( + &local_site.application_question, + &create_site.application_question, + create_site + .registration_mode + .unwrap_or(local_site.registration_mode), + ) +} + +#[cfg(test)] +mod tests { + use crate::site::create::validate_create_payload; + use lemmy_api_common::site::CreateSite; + use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode}; + + #[test] + fn test_validate_invalid_create_payload() { + let invalid_payloads = [ + ( + "CreateSite attempted on set up LocalSite", + "site_already_exists", + &generate_local_site( + true, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "CreateSite name matches LocalSite slur filter", + "slurs", + &generate_local_site( + false, + Some(String::from("(foo|bar)")), + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("foo site_name"), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "CreateSite name matches new slur filter", + "slurs", + &generate_local_site( + false, + Some(String::from("(foo|bar)")), + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("zeta site_name"), + None::, + None::, + None::, + Some(String::from("(zeta|alpha)")), + None::, + None::, + None::, + None::, + ), + ), + ( + "CreateSite listing type is Subscribed, which is invalid", + "invalid_default_post_listing_type", + &generate_local_site( + false, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + Some(ListingType::Subscribed), + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "CreateSite is both private and federated", + "cant_enable_private_instance_and_federation_together", + &generate_local_site( + false, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + None::, + None::, + Some(true), + Some(true), + None::, + None::, + ), + ), + ( + "LocalSite is private, but CreateSite also makes it federated", + "cant_enable_private_instance_and_federation_together", + &generate_local_site( + false, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + None::, + None::, + None::, + Some(true), + None::, + None::, + ), + ), + ( + "CreateSite requires application, but neither it nor LocalSite has an application question", + "application_question_required", + &generate_local_site( + false, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + Some(RegistrationMode::RequireApplication), + ), + ), + ]; + + invalid_payloads.iter().enumerate().for_each( + |( + idx, + &(reason, expected_err, local_site, create_site), + )| { + match validate_create_payload( + local_site, + create_site, + ) { + Ok(_) => { + panic!( + "Got Ok, but validation should have failed with error: {} for reason: {}. invalid_payloads.nth({})", + expected_err, reason, idx + ) + } + Err(error) => { + assert!( + error.message.eq(&Some(String::from(expected_err))), + "Got Err {:?}, but should have failed with message: {} for reason: {}. invalid_payloads.nth({})", + error.message, + expected_err, + reason, + idx + ) + } + } + }, + ); + } + + #[test] + fn test_validate_valid_create_payload() { + let valid_payloads = [ + ( + "No changes between LocalSite and CreateSite", + &generate_local_site( + false, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "CreateSite allows clearing and changing values", + &generate_local_site( + false, + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + Some(String::new()), + Some(String::new()), + Some(ListingType::All), + Some(String::new()), + Some(false), + Some(true), + Some(String::new()), + Some(RegistrationMode::Open), + ), + ), + ( + "CreateSite clears existing slur filter regex", + &generate_local_site( + false, + Some(String::from("(foo|bar)")), + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_create_site( + String::from("foo site_name"), + None::, + None::, + None::, + Some(String::new()), + None::, + None::, + None::, + None::, + ), + ), + ( + "LocalSite has application question and CreateSite now requires applications,", + &generate_local_site( + false, + None::, + true, + false, + Some(String::from("question")), + RegistrationMode::Open, + ), + &generate_create_site( + String::from("site_name"), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + Some(RegistrationMode::RequireApplication), + ), + ), + ]; + + valid_payloads + .iter() + .enumerate() + .for_each(|(idx, &(reason, local_site, edit_site))| { + assert!( + validate_create_payload(local_site, edit_site).is_ok(), + "Got Err, but should have got Ok for reason: {}. valid_payloads.nth({})", + reason, + idx + ); + }) + } + + fn generate_local_site( + site_setup: bool, + site_slur_filter_regex: Option, + site_is_private: bool, + site_is_federated: bool, + site_application_question: Option, + site_registration_mode: RegistrationMode, + ) -> LocalSite { + LocalSite { + id: Default::default(), + site_id: Default::default(), + site_setup, + enable_downvotes: false, + enable_nsfw: false, + community_creation_admin_only: false, + require_email_verification: false, + application_question: site_application_question, + private_instance: site_is_private, + default_theme: String::new(), + default_post_listing_type: ListingType::All, + legal_information: None, + hide_modlog_mod_names: false, + application_email_admins: false, + slur_filter_regex: site_slur_filter_regex, + actor_name_max_length: 0, + federation_enabled: site_is_federated, + captcha_enabled: false, + captcha_difficulty: String::new(), + published: Default::default(), + updated: None, + registration_mode: site_registration_mode, + reports_email_admins: false, + } + } + + // Allow the test helper function to have too many arguments. + // It's either this or generate the entire struct each time for testing. + #[allow(clippy::too_many_arguments)] + fn generate_create_site( + site_name: String, + site_description: Option, + site_sidebar: Option, + site_listing_type: Option, + site_slur_filter_regex: Option, + site_is_private: Option, + site_is_federated: Option, + site_application_question: Option, + site_registration_mode: Option, + ) -> CreateSite { + CreateSite { + name: site_name, + sidebar: site_sidebar, + description: site_description, + icon: None, + banner: None, + enable_downvotes: None, + enable_nsfw: None, + community_creation_admin_only: None, + require_email_verification: None, + application_question: site_application_question, + private_instance: site_is_private, + default_theme: None, + default_post_listing_type: site_listing_type, + legal_information: None, + application_email_admins: None, + hide_modlog_mod_names: None, + discussion_languages: None, + slur_filter_regex: site_slur_filter_regex, + actor_name_max_length: None, + rate_limit_message: None, + rate_limit_message_per_second: None, + rate_limit_post: None, + rate_limit_post_per_second: None, + rate_limit_register: None, + rate_limit_register_per_second: None, + rate_limit_image: None, + rate_limit_image_per_second: None, + rate_limit_comment: None, + rate_limit_comment_per_second: None, + rate_limit_search: None, + rate_limit_search_per_second: None, + federation_enabled: site_is_federated, + federation_debug: None, + captcha_enabled: None, + captcha_difficulty: None, + allowed_instances: None, + blocked_instances: None, + taglines: None, + registration_mode: site_registration_mode, + auth: Default::default(), + } + } +} diff --git a/crates/api_crud/src/site/mod.rs b/crates/api_crud/src/site/mod.rs index d0c09b935..a98f2057c 100644 --- a/crates/api_crud/src/site/mod.rs +++ b/crates/api_crud/src/site/mod.rs @@ -1,19 +1,95 @@ -use lemmy_db_schema::RegistrationMode; -use lemmy_utils::error::LemmyError; +use lemmy_db_schema::{ListingType, RegistrationMode}; +use lemmy_utils::error::{LemmyError, LemmyResult}; mod create; mod read; mod update; -pub fn check_application_question( - application_question: &Option>, +/// Checks whether the default post listing type is valid for a site. +pub fn site_default_post_listing_type_check( + default_post_listing_type: &Option, +) -> LemmyResult<()> { + if let Some(listing_type) = default_post_listing_type { + // Only allow all or local as default listing types... + if listing_type != &ListingType::All && listing_type != &ListingType::Local { + Err(LemmyError::from_message( + "invalid_default_post_listing_type", + )) + } else { + Ok(()) + } + } else { + Ok(()) + } +} + +/// Checks whether the application question and registration mode align. +pub fn application_question_check( + current_application_question: &Option, + new_application_question: &Option, registration_mode: RegistrationMode, -) -> Result<(), LemmyError> { +) -> LemmyResult<()> { + let has_no_question: bool = + current_application_question.is_none() && new_application_question.is_none(); + let is_nullifying_question: bool = new_application_question == &Some(String::new()); + if registration_mode == RegistrationMode::RequireApplication - && application_question.as_ref().unwrap_or(&None).is_none() + && (has_no_question || is_nullifying_question) { Err(LemmyError::from_message("application_question_required")) } else { Ok(()) } } + +#[cfg(test)] +mod tests { + use crate::site::{application_question_check, site_default_post_listing_type_check}; + use lemmy_db_schema::{ListingType, RegistrationMode}; + + #[test] + fn test_site_default_post_listing_type_check() { + assert!(site_default_post_listing_type_check(&None::).is_ok()); + assert!(site_default_post_listing_type_check(&Some(ListingType::All)).is_ok()); + assert!(site_default_post_listing_type_check(&Some(ListingType::Local)).is_ok()); + assert!(site_default_post_listing_type_check(&Some(ListingType::Subscribed)).is_err()); + } + + #[test] + fn test_application_question_check() { + assert!( + application_question_check(&Some(String::from("q")), &Some(String::new()), RegistrationMode::RequireApplication).is_err(), + "Expected application to be invalid because an application is required, current question: {:?}, new question: {:?}", + "q", + String::new(), + ); + assert!( + application_question_check(&None, &None, RegistrationMode::RequireApplication).is_err(), + "Expected application to be invalid because an application is required, current question: {:?}, new question: {:?}", + None::, + None:: + ); + + assert!( + application_question_check(&None, &None, RegistrationMode::Open).is_ok(), + "Expected application to be valid because no application required, current question: {:?}, new question: {:?}, mode: {:?}", + None::, + None::, + RegistrationMode::Open + ); + assert!( + application_question_check(&None, &Some(String::from("q")), RegistrationMode::RequireApplication).is_ok(), + "Expected application to be valid because new application provided, current question: {:?}, new question: {:?}, mode: {:?}", + None::, + Some(String::from("q")), + RegistrationMode::RequireApplication + ); + assert!( + application_question_check(&Some(String::from("q")), &None, RegistrationMode::RequireApplication).is_ok(), + "Expected application to be valid because application existed, current question: {:?}, new question: {:?}, mode: {:?}", + Some(String::from("q")), + None::, + RegistrationMode::RequireApplication + ); + } +} diff --git a/crates/api_crud/src/site/update.rs b/crates/api_crud/src/site/update.rs index a18641010..32115aea1 100644 --- a/crates/api_crud/src/site/update.rs +++ b/crates/api_crud/src/site/update.rs @@ -1,15 +1,12 @@ -use crate::{site::check_application_question, PerformCrud}; +use crate::{ + site::{application_question_check, site_default_post_listing_type_check}, + PerformCrud, +}; use actix_web::web::Data; use lemmy_api_common::{ context::LemmyContext, site::{EditSite, SiteResponse}, - utils::{ - is_admin, - local_site_rate_limit_to_rate_limit_config, - local_site_to_slur_regex, - local_user_view_from_jwt, - site_description_length_check, - }, + utils::{is_admin, local_site_rate_limit_to_rate_limit_config, local_user_view_from_jwt}, }; use lemmy_db_schema::{ source::{ @@ -24,15 +21,20 @@ use lemmy_db_schema::{ }, traits::Crud, utils::{diesel_option_overwrite, diesel_option_overwrite_to_url, naive_now}, - ListingType, RegistrationMode, }; use lemmy_db_views::structs::SiteView; use lemmy_utils::{ - error::LemmyError, + error::{LemmyError, LemmyResult}, utils::{ slurs::check_slurs_opt, - validation::{check_site_visibility_valid, is_valid_body_field}, + validation::{ + build_and_check_regex, + check_site_visibility_valid, + is_valid_body_field, + site_description_length_check, + site_name_length_check, + }, }, }; @@ -48,43 +50,10 @@ impl PerformCrud for EditSite { let local_site = site_view.local_site; let site = site_view.site; - // Make sure user is an admin + // Make sure user is an admin; other types of users should not update site data... is_admin(&local_user_view)?; - check_site_visibility_valid( - local_site.private_instance, - local_site.federation_enabled, - &data.private_instance, - &data.federation_enabled, - )?; - - let slur_regex = local_site_to_slur_regex(&local_site); - - check_slurs_opt(&data.name, &slur_regex)?; - check_slurs_opt(&data.description, &slur_regex)?; - - if let Some(desc) = &data.description { - site_description_length_check(desc)?; - } - - is_valid_body_field(&data.sidebar)?; - - let application_question = diesel_option_overwrite(&data.application_question); - check_application_question( - &application_question, - data - .registration_mode - .unwrap_or(local_site.registration_mode), - )?; - - if let Some(listing_type) = &data.default_post_listing_type { - // only allow all or local as default listing types - if listing_type != &ListingType::All && listing_type != &ListingType::Local { - return Err(LemmyError::from_message( - "invalid_default_post_listing_type", - )); - } - } + validate_update_payload(&local_site, data)?; if let Some(discussion_languages) = data.discussion_languages.clone() { SiteLanguage::update( @@ -95,9 +64,8 @@ impl PerformCrud for EditSite { .await?; } - let name = data.name.clone(); let site_form = SiteUpdateForm::builder() - .name(name) + .name(data.name.clone()) .sidebar(diesel_option_overwrite(&data.sidebar)) .description(diesel_option_overwrite(&data.description)) .icon(diesel_option_overwrite_to_url(&data.icon)?) @@ -117,7 +85,7 @@ impl PerformCrud for EditSite { .enable_nsfw(data.enable_nsfw) .community_creation_admin_only(data.community_creation_admin_only) .require_email_verification(data.require_email_verification) - .application_question(application_question) + .application_question(diesel_option_overwrite(&data.application_question)) .private_instance(data.private_instance) .default_theme(data.default_theme.clone()) .default_post_listing_type(data.default_post_listing_type) @@ -128,7 +96,6 @@ impl PerformCrud for EditSite { .slur_filter_regex(diesel_option_overwrite(&data.slur_filter_regex)) .actor_name_max_length(data.actor_name_max_length) .federation_enabled(data.federation_enabled) - .federation_worker_count(data.federation_worker_count) .captcha_enabled(data.captcha_enabled) .captcha_difficulty(data.captcha_difficulty.clone()) .reports_email_admins(data.reports_email_admins) @@ -211,3 +178,411 @@ impl PerformCrud for EditSite { Ok(res) } } + +fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> LemmyResult<()> { + // Check that the slur regex compiles, and return the regex if valid... + // Prioritize using new slur regex from the request; if not provided, use the existing regex. + let slur_regex = build_and_check_regex( + &edit_site + .slur_filter_regex + .as_deref() + .or(local_site.slur_filter_regex.as_deref()), + )?; + + if let Some(name) = &edit_site.name { + // The name doesn't need to be updated, but if provided it cannot be blanked out... + site_name_length_check(name)?; + check_slurs_opt(&edit_site.name, &slur_regex)?; + } + + if let Some(desc) = &edit_site.description { + site_description_length_check(desc)?; + check_slurs_opt(&edit_site.description, &slur_regex)?; + } + + site_default_post_listing_type_check(&edit_site.default_post_listing_type)?; + + check_site_visibility_valid( + local_site.private_instance, + local_site.federation_enabled, + &edit_site.private_instance, + &edit_site.federation_enabled, + )?; + + // Ensure that the sidebar has fewer than the max num characters... + is_valid_body_field(&edit_site.sidebar, false)?; + + application_question_check( + &local_site.application_question, + &edit_site.application_question, + edit_site + .registration_mode + .unwrap_or(local_site.registration_mode), + ) +} + +#[cfg(test)] +mod tests { + use crate::site::update::validate_update_payload; + use lemmy_api_common::site::EditSite; + use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode}; + + #[test] + fn test_validate_invalid_update_payload() { + let invalid_payloads = [ + ( + "EditSite name matches LocalSite slur filter", + "slurs", + &generate_local_site( + Some(String::from("(foo|bar)")), + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("foo site_name")), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "EditSite name matches new slur filter", + "slurs", + &generate_local_site( + Some(String::from("(foo|bar)")), + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("zeta site_name")), + None::, + None::, + None::, + Some(String::from("(zeta|alpha)")), + None::, + None::, + None::, + None::, + ), + ), + ( + "EditSite listing type is Subscribed, which is invalid", + "invalid_default_post_listing_type", + &generate_local_site( + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("site_name")), + None::, + None::, + Some(ListingType::Subscribed), + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "EditSite is both private and federated", + "cant_enable_private_instance_and_federation_together", + &generate_local_site( + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("site_name")), + None::, + None::, + None::, + None::, + Some(true), + Some(true), + None::, + None::, + ), + ), + ( + "LocalSite is private, but EditSite also makes it federated", + "cant_enable_private_instance_and_federation_together", + &generate_local_site( + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("site_name")), + None::, + None::, + None::, + None::, + None::, + Some(true), + None::, + None::, + ), + ), + ( + "EditSite requires application, but neither it nor LocalSite has an application question", + "application_question_required", + &generate_local_site( + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("site_name")), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + Some(RegistrationMode::RequireApplication), + ), + ), + ]; + + invalid_payloads.iter().enumerate().for_each( + |( + idx, + &(reason, expected_err, local_site, edit_site), + )| { + match validate_update_payload(local_site, edit_site) { + Ok(_) => { + panic!( + "Got Ok, but validation should have failed with error: {} for reason: {}. invalid_payloads.nth({})", + expected_err, reason, idx + ) + } + Err(error) => { + assert!( + error.message.eq(&Some(String::from(expected_err))), + "Got Err {:?}, but should have failed with message: {} for reason: {}. invalid_payloads.nth({})", + error.message, + expected_err, + reason, + idx + ) + } + } + }, + ); + } + + #[test] + fn test_validate_valid_update_payload() { + let valid_payloads = [ + ( + "No changes between LocalSite and EditSite", + &generate_local_site( + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + None::, + None::, + None::, + None::, + None::, + None::, + None::, + None::, + None::, + ), + ), + ( + "EditSite allows clearing and changing values", + &generate_local_site( + None::, + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("site_name")), + Some(String::new()), + Some(String::new()), + Some(ListingType::All), + Some(String::new()), + Some(false), + Some(true), + Some(String::new()), + Some(RegistrationMode::Open), + ), + ), + ( + "EditSite name passes slur filter regex", + &generate_local_site( + Some(String::from("(foo|bar)")), + true, + false, + None::, + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("foo site_name")), + None::, + None::, + None::, + Some(String::new()), + None::, + None::, + None::, + None::, + ), + ), + ( + "LocalSite has application question and EditSite now requires applications,", + &generate_local_site( + None::, + true, + false, + Some(String::from("question")), + RegistrationMode::Open, + ), + &generate_edit_site( + Some(String::from("site_name")), + None::, + None::, + None::, + None::, + None::, + None::, + None::, + Some(RegistrationMode::RequireApplication), + ), + ), + ]; + + valid_payloads + .iter() + .enumerate() + .for_each(|(idx, &(reason, local_site, edit_site))| { + assert!( + validate_update_payload(local_site, edit_site).is_ok(), + "Got Err, but should have got Ok for reason: {}. valid_payloads.nth({})", + reason, + idx + ); + }) + } + + fn generate_local_site( + site_slur_filter_regex: Option, + site_is_private: bool, + site_is_federated: bool, + site_application_question: Option, + site_registration_mode: RegistrationMode, + ) -> LocalSite { + LocalSite { + id: Default::default(), + site_id: Default::default(), + site_setup: true, + enable_downvotes: false, + enable_nsfw: false, + community_creation_admin_only: false, + require_email_verification: false, + application_question: site_application_question, + private_instance: site_is_private, + default_theme: String::new(), + default_post_listing_type: ListingType::All, + legal_information: None, + hide_modlog_mod_names: false, + application_email_admins: false, + slur_filter_regex: site_slur_filter_regex, + actor_name_max_length: 0, + federation_enabled: site_is_federated, + captcha_enabled: false, + captcha_difficulty: String::new(), + published: Default::default(), + updated: None, + registration_mode: site_registration_mode, + reports_email_admins: false, + } + } + + // Allow the test helper function to have too many arguments. + // It's either this or generate the entire struct each time for testing. + #[allow(clippy::too_many_arguments)] + fn generate_edit_site( + site_name: Option, + site_description: Option, + site_sidebar: Option, + site_listing_type: Option, + site_slur_filter_regex: Option, + site_is_private: Option, + site_is_federated: Option, + site_application_question: Option, + site_registration_mode: Option, + ) -> EditSite { + EditSite { + name: site_name, + sidebar: site_sidebar, + description: site_description, + icon: None, + banner: None, + enable_downvotes: None, + enable_nsfw: None, + community_creation_admin_only: None, + require_email_verification: None, + application_question: site_application_question, + private_instance: site_is_private, + default_theme: None, + default_post_listing_type: site_listing_type, + legal_information: None, + application_email_admins: None, + hide_modlog_mod_names: None, + discussion_languages: None, + slur_filter_regex: site_slur_filter_regex, + actor_name_max_length: None, + rate_limit_message: None, + rate_limit_message_per_second: None, + rate_limit_post: None, + rate_limit_post_per_second: None, + rate_limit_register: None, + rate_limit_register_per_second: None, + rate_limit_image: None, + rate_limit_image_per_second: None, + rate_limit_comment: None, + rate_limit_comment_per_second: None, + rate_limit_search: None, + rate_limit_search_per_second: None, + federation_enabled: site_is_federated, + federation_debug: None, + captcha_enabled: None, + captcha_difficulty: None, + allowed_instances: None, + blocked_instances: None, + taglines: None, + registration_mode: site_registration_mode, + reports_email_admins: None, + auth: Default::default(), + } + } +} diff --git a/crates/api_crud/src/user/create.rs b/crates/api_crud/src/user/create.rs index 00b484ea8..9092376b6 100644 --- a/crates/api_crud/src/user/create.rs +++ b/crates/api_crud/src/user/create.rs @@ -19,6 +19,7 @@ use lemmy_api_common::{ use lemmy_db_schema::{ aggregates::structs::PersonAggregates, source::{ + captcha_answer::{CaptchaAnswer, CheckCaptchaAnswer}, local_user::{LocalUser, LocalUserInsertForm}, person::{Person, PersonInsertForm}, registration_application::{RegistrationApplication, RegistrationApplicationInsertForm}, @@ -71,6 +72,25 @@ impl PerformCrud for Register { return Err(LemmyError::from_message("passwords_dont_match")); } + if local_site.site_setup && local_site.captcha_enabled { + if let Some(captcha_uuid) = &data.captcha_uuid { + let uuid = uuid::Uuid::parse_str(captcha_uuid)?; + let check = CaptchaAnswer::check_captcha( + context.pool(), + CheckCaptchaAnswer { + uuid, + answer: data.captcha_answer.clone().unwrap_or_default(), + }, + ) + .await?; + if !check { + return Err(LemmyError::from_message("captcha_incorrect")); + } + } else { + return Err(LemmyError::from_message("captcha_incorrect")); + } + } + let slur_regex = local_site_to_slur_regex(&local_site); check_slurs(&data.username, &slur_regex)?; check_slurs_opt(&data.answer, &slur_regex)?; diff --git a/crates/apub/Cargo.toml b/crates/apub/Cargo.toml index 2007b541a..8570541f7 100644 --- a/crates/apub/Cargo.toml +++ b/crates/apub/Cargo.toml @@ -25,7 +25,7 @@ chrono = { workspace = true } serde_json = { workspace = true } serde = { workspace = true } actix-web = { workspace = true } -actix-rt = { workspace = true } +tokio = {workspace = true} tracing = { workspace = true } strum_macros = { workspace = true } url = { workspace = true } diff --git a/crates/apub/src/activities/community/mod.rs b/crates/apub/src/activities/community/mod.rs index d85df0c59..4e665e338 100644 --- a/crates/apub/src/activities/community/mod.rs +++ b/crates/apub/src/activities/community/mod.rs @@ -43,12 +43,11 @@ pub(crate) async fn send_activity_in_community( // send to user followers if !is_mod_action { - inboxes.append( + inboxes.extend( &mut PersonFollower::list_followers(&mut *context.conn().await?, actor.id) .await? .into_iter() - .map(|p| ApubPerson(p).shared_inbox_or_inbox()) - .collect(), + .map(|p| ApubPerson(p).shared_inbox_or_inbox()), ); } diff --git a/crates/apub/src/collections/community_moderators.rs b/crates/apub/src/collections/community_moderators.rs index 5fc503dd4..d178ffaf0 100644 --- a/crates/apub/src/collections/community_moderators.rs +++ b/crates/apub/src/collections/community_moderators.rs @@ -121,7 +121,7 @@ mod tests { }; use serial_test::serial; - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_lemmy_community_moderators() { let context = init_context().await; diff --git a/crates/apub/src/http/mod.rs b/crates/apub/src/http/mod.rs index 1f309bfcb..9000d38f2 100644 --- a/crates/apub/src/http/mod.rs +++ b/crates/apub/src/http/mod.rs @@ -48,7 +48,7 @@ where Ok( HttpResponse::Ok() .content_type(FEDERATION_CONTENT_TYPE) - .content_type("application/json") + .content_type("application/activity+json") .body(json), ) } @@ -61,7 +61,7 @@ fn create_apub_tombstone_response>(id: T) -> LemmyResult>(); + .filter(|m| !m.is_local(&context.settings().hostname)); - for mention in &mentions { + for mention in mentions { let identifier = format!("{}@{}", mention.name, mention.domain); let person = webfinger_resolve_actor::(&identifier, context).await; if let Ok(person) = person { diff --git a/crates/apub/src/objects/comment.rs b/crates/apub/src/objects/comment.rs index 684a3ae49..5c46a5892 100644 --- a/crates/apub/src/objects/comment.rs +++ b/crates/apub/src/objects/comment.rs @@ -239,7 +239,7 @@ pub(crate) mod tests { .unwrap(); } - #[actix_rt::test] + #[tokio::test] #[serial] pub(crate) async fn test_parse_lemmy_comment() { let context = init_context().await; @@ -267,7 +267,7 @@ pub(crate) mod tests { cleanup(data, &context).await; } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_pleroma_comment() { let context = init_context().await; @@ -299,7 +299,7 @@ pub(crate) mod tests { cleanup(data, &context).await; } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_html_to_markdown_sanitize() { let parsed = parse_html("hello"); diff --git a/crates/apub/src/objects/community.rs b/crates/apub/src/objects/community.rs index 4ea7637c8..15d254ffc 100644 --- a/crates/apub/src/objects/community.rs +++ b/crates/apub/src/objects/community.rs @@ -141,19 +141,16 @@ impl Object for ApubCommunity { // Fetching mods and outbox is not necessary for Lemmy to work, so ignore errors. Besides, // we need to ignore these errors so that tests can work entirely offline. - group - .outbox - .dereference(&community, context) - .await - .map_err(|e| debug!("{}", e)) - .ok(); + let fetch_outbox = group.outbox.dereference(&community, context); if let Some(moderators) = group.attributed_to { - moderators - .dereference(&community, context) - .await - .map_err(|e| debug!("{}", e)) - .ok(); + let fetch_moderators = moderators.dereference(&community, context); + // Fetch mods and outbox in parallel + let res = tokio::join!(fetch_outbox, fetch_moderators); + res.0.map_err(|e| debug!("{}", e)).ok(); + res.1.map_err(|e| debug!("{}", e)).ok(); + } else { + fetch_outbox.await.map_err(|e| debug!("{}", e)).ok(); } Ok(community) @@ -243,7 +240,7 @@ pub(crate) mod tests { community } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_lemmy_community() { let context = init_context().await; diff --git a/crates/apub/src/objects/instance.rs b/crates/apub/src/objects/instance.rs index 872cbaeb9..66332254a 100644 --- a/crates/apub/src/objects/instance.rs +++ b/crates/apub/src/objects/instance.rs @@ -222,7 +222,7 @@ pub(crate) mod tests { site } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_lemmy_instance() { let context = init_context().await; diff --git a/crates/apub/src/objects/person.rs b/crates/apub/src/objects/person.rs index 5227b119c..e5cd9e8e6 100644 --- a/crates/apub/src/objects/person.rs +++ b/crates/apub/src/objects/person.rs @@ -223,7 +223,7 @@ pub(crate) mod tests { (person, site) } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_lemmy_person() { let context = init_context().await; @@ -236,7 +236,7 @@ pub(crate) mod tests { cleanup((person, site), &context).await; } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_pleroma_person() { let context = init_context().await; diff --git a/crates/apub/src/objects/post.rs b/crates/apub/src/objects/post.rs index 3961f0dfa..f45d0685c 100644 --- a/crates/apub/src/objects/post.rs +++ b/crates/apub/src/objects/post.rs @@ -282,7 +282,7 @@ mod tests { use lemmy_db_schema::source::site::Site; use serial_test::serial; - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_lemmy_post() { let context = init_context().await; diff --git a/crates/apub/src/objects/private_message.rs b/crates/apub/src/objects/private_message.rs index 75f18dfa0..edacf5f4f 100644 --- a/crates/apub/src/objects/private_message.rs +++ b/crates/apub/src/objects/private_message.rs @@ -193,7 +193,7 @@ mod tests { .unwrap(); } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_lemmy_pm() { let context = init_context().await; @@ -221,7 +221,7 @@ mod tests { cleanup(data, &context).await; } - #[actix_rt::test] + #[tokio::test] #[serial] async fn test_parse_pleroma_pm() { let context = init_context().await; diff --git a/crates/db_schema/Cargo.toml b/crates/db_schema/Cargo.toml index aa26382c0..26f9b7901 100644 --- a/crates/db_schema/Cargo.toml +++ b/crates/db_schema/Cargo.toml @@ -29,7 +29,7 @@ serde_json = { workspace = true, optional = true } activitypub_federation = { workspace = true, optional = true } lemmy_utils = { workspace = true, optional = true } bcrypt = { workspace = true, optional = true } -diesel = { workspace = true, features = ["postgres","chrono", "serde_json"], optional = true } +diesel = { workspace = true, features = ["postgres","chrono", "serde_json", "uuid"], optional = true } diesel-derive-newtype = { workspace = true, optional = true } diesel-derive-enum = { workspace = true, optional = true } diesel_migrations = { workspace = true, optional = true } @@ -43,8 +43,12 @@ async-trait = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } deadpool = { version = "0.9.5", features = ["rt_tokio_1"], optional = true } -ts-rs = { workspace = true, optional = true } +ts-rs = { workspace = true, optional = true } +rustls = { workspace = true } +futures-util = { workspace = true } +tokio-postgres = { workspace = true } +tokio-postgres-rustls = { workspace = true } +uuid = { workspace = true, features = ["v4"] } [dev-dependencies] serial_test = { workspace = true } - diff --git a/crates/db_schema/src/diesel_ltree.patch b/crates/db_schema/src/diesel_ltree.patch index d7d49f03e..ecbeb2193 100644 --- a/crates/db_schema/src/diesel_ltree.patch +++ b/crates/db_schema/src/diesel_ltree.patch @@ -19,8 +19,8 @@ index 255c6422..f2ccf5e2 100644 #[derive(diesel::sql_types::SqlType)] #[diesel(postgres_type(name = "sort_type_enum"))] -@@ -67,13 +63,13 @@ diesel::table! { - when_ -> Timestamp, +@@ -76,13 +76,13 @@ diesel::table! { + published -> Timestamp, } } diff --git a/crates/db_schema/src/impls/captcha_answer.rs b/crates/db_schema/src/impls/captcha_answer.rs new file mode 100644 index 000000000..de5fac65e --- /dev/null +++ b/crates/db_schema/src/impls/captcha_answer.rs @@ -0,0 +1,118 @@ +use crate::{ + schema::captcha_answer::dsl::{answer, captcha_answer, uuid}, + source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer}, + utils::{functions::lower, get_conn, DbPool}, +}; +use diesel::{ + delete, + dsl::exists, + insert_into, + result::Error, + select, + ExpressionMethods, + QueryDsl, +}; +use diesel_async::RunQueryDsl; + +impl CaptchaAnswer { + pub async fn insert(pool: &DbPool, captcha: &CaptchaAnswerForm) -> Result { + let conn = &mut get_conn(pool).await?; + + insert_into(captcha_answer) + .values(captcha) + .get_result::(conn) + .await + } + + pub async fn check_captcha(pool: &DbPool, to_check: CheckCaptchaAnswer) -> Result { + let conn = &mut get_conn(pool).await?; + + // fetch requested captcha + let captcha_exists = select(exists( + captcha_answer + .filter((uuid).eq(to_check.uuid)) + .filter(lower(answer).eq(to_check.answer.to_lowercase().clone())), + )) + .get_result::(conn) + .await?; + + // delete checked captcha + delete(captcha_answer.filter(uuid.eq(to_check.uuid))) + .execute(conn) + .await?; + + Ok(captcha_exists) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer}, + utils::build_db_pool_for_tests, + }; + use serial_test::serial; + + #[tokio::test] + #[serial] + async fn test_captcha_happy_path() { + let pool = &build_db_pool_for_tests().await; + + let inserted = CaptchaAnswer::insert( + pool, + &CaptchaAnswerForm { + answer: "XYZ".to_string(), + }, + ) + .await + .expect("should not fail to insert captcha"); + + let result = CaptchaAnswer::check_captcha( + pool, + CheckCaptchaAnswer { + uuid: inserted.uuid, + answer: "xyz".to_string(), + }, + ) + .await; + + assert!(result.is_ok()); + assert!(result.unwrap()); + } + + #[tokio::test] + #[serial] + async fn test_captcha_repeat_answer_fails() { + let pool = &build_db_pool_for_tests().await; + + let inserted = CaptchaAnswer::insert( + pool, + &CaptchaAnswerForm { + answer: "XYZ".to_string(), + }, + ) + .await + .expect("should not fail to insert captcha"); + + let _result = CaptchaAnswer::check_captcha( + pool, + CheckCaptchaAnswer { + uuid: inserted.uuid, + answer: "xyz".to_string(), + }, + ) + .await; + + let result_repeat = CaptchaAnswer::check_captcha( + pool, + CheckCaptchaAnswer { + uuid: inserted.uuid, + answer: "xyz".to_string(), + }, + ) + .await; + + assert!(result_repeat.is_ok()); + assert!(!result_repeat.unwrap()); + } +} diff --git a/crates/db_schema/src/impls/comment.rs b/crates/db_schema/src/impls/comment.rs index cb21dbf4f..39ef2986a 100644 --- a/crates/db_schema/src/impls/comment.rs +++ b/crates/db_schema/src/impls/comment.rs @@ -11,7 +11,7 @@ use crate::{ CommentUpdateForm, }, traits::{Crud, Likeable, Saveable}, - utils::{naive_now, DbConn}, + utils::{get_conn, naive_now, DbConn, DELETED_REPLACEMENT_TEXT}, }; use diesel::{ dsl::{insert_into, sql_query}, @@ -30,7 +30,7 @@ impl Comment { ) -> Result, Error> { diesel::update(comment.filter(creator_id.eq(for_creator_id))) .set(( - content.eq("*Permananently Deleted*"), + content.eq(DELETED_REPLACEMENT_TEXT), deleted.eq(true), updated.eq(naive_now()), )) @@ -94,8 +94,7 @@ impl Comment { // left join comment c2 on c2.path <@ c.path and c2.path != c.path // group by c.id - let path_split = parent_path.0.split('.').collect::>(); - let parent_id = path_split.get(1); + let parent_id = parent_path.0.split('.').nth(1); if let Some(parent_id) = parent_id { let top_parent = format!("0.{}", parent_id); diff --git a/crates/db_schema/src/impls/mod.rs b/crates/db_schema/src/impls/mod.rs index 915d1c8e2..f13004d01 100644 --- a/crates/db_schema/src/impls/mod.rs +++ b/crates/db_schema/src/impls/mod.rs @@ -1,5 +1,6 @@ pub mod activity; pub mod actor_language; +pub mod captcha_answer; pub mod comment; pub mod comment_reply; pub mod comment_report; diff --git a/crates/db_schema/src/impls/password_reset_request.rs b/crates/db_schema/src/impls/password_reset_request.rs index 45a371ce2..d5e050c60 100644 --- a/crates/db_schema/src/impls/password_reset_request.rs +++ b/crates/db_schema/src/impls/password_reset_request.rs @@ -1,6 +1,11 @@ use crate::{ newtypes::LocalUserId, - schema::password_reset_request::dsl::{password_reset_request, published, token_encrypted}, + schema::password_reset_request::dsl::{ + local_user_id, + password_reset_request, + published, + token_encrypted, + }, source::password_reset_request::{PasswordResetRequest, PasswordResetRequestForm}, traits::Crud, utils::DbConn, @@ -73,6 +78,19 @@ impl PasswordResetRequest { .first::(conn) .await } + + pub async fn get_recent_password_resets_count( + pool: &DbPool, + user_id: LocalUserId, + ) -> Result { + let conn = &mut get_conn(pool).await?; + password_reset_request + .filter(local_user_id.eq(user_id)) + .filter(published.gt(now - 1.days())) + .count() + .get_result(conn) + .await + } } fn bytes_to_hex(bytes: Vec) -> String { diff --git a/crates/db_schema/src/impls/post.rs b/crates/db_schema/src/impls/post.rs index 2dfe9e297..99b49b190 100644 --- a/crates/db_schema/src/impls/post.rs +++ b/crates/db_schema/src/impls/post.rs @@ -27,7 +27,7 @@ use crate::{ PostUpdateForm, }, traits::{Crud, Likeable, Readable, Saveable}, - utils::{naive_now, DbConn, FETCH_LIMIT_MAX}, + utils::{get_conn, naive_now, DbConn, DbPool, DELETED_REPLACEMENT_TEXT, FETCH_LIMIT_MAX}, }; use ::url::Url; use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl, TextExpressionMethods}; @@ -108,9 +108,9 @@ impl Post { diesel::update(post.filter(creator_id.eq(for_creator_id))) .set(( - name.eq(perma_deleted), - url.eq(perma_deleted_url), - body.eq(perma_deleted), + name.eq(DELETED_REPLACEMENT_TEXT), + url.eq(Option::<&str>::None), + body.eq(DELETED_REPLACEMENT_TEXT), deleted.eq(true), updated.eq(naive_now()), )) diff --git a/crates/db_schema/src/lib.rs b/crates/db_schema/src/lib.rs index 4ab26981b..acb069ca7 100644 --- a/crates/db_schema/src/lib.rs +++ b/crates/db_schema/src/lib.rs @@ -26,6 +26,7 @@ pub mod impls; pub mod newtypes; #[cfg(feature = "full")] #[rustfmt::skip] +#[allow(clippy::wildcard_imports)] pub mod schema; pub mod source; #[cfg(feature = "full")] @@ -62,6 +63,9 @@ pub enum SortType { TopHour, TopSixHour, TopTwelveHour, + TopThreeMonths, + TopSixMonths, + TopNineMonths, } #[derive(EnumString, Display, Debug, Serialize, Deserialize, Clone, Copy)] diff --git a/crates/db_schema/src/schema.rs b/crates/db_schema/src/schema.rs index ac4ddc47a..42946d699 100644 --- a/crates/db_schema/src/schema.rs +++ b/crates/db_schema/src/schema.rs @@ -65,7 +65,16 @@ diesel::table! { } diesel::table! { - use diesel::sql_types::{Bool, Int4, Nullable, Text, Timestamp, Varchar}; + captcha_answer (id) { + id -> Int4, + uuid -> Uuid, + answer -> Text, + published -> Timestamp, + } +} + +diesel::table! { + use diesel::sql_types::*; use diesel_ltree::sql_types::Ltree; comment (id) { @@ -317,7 +326,7 @@ diesel::table! { } diesel::table! { - use diesel::sql_types::{Bool, Int4, Nullable, Text, Timestamp, Varchar}; + use diesel::sql_types::*; use super::sql_types::ListingTypeEnum; use super::sql_types::RegistrationModeEnum; @@ -339,7 +348,6 @@ diesel::table! { slur_filter_regex -> Nullable, actor_name_max_length -> Int4, federation_enabled -> Bool, - federation_worker_count -> Int4, captcha_enabled -> Bool, #[max_length = 255] captcha_difficulty -> Varchar, @@ -372,7 +380,7 @@ diesel::table! { } diesel::table! { - use diesel::sql_types::{Bool, Int4, Nullable, Text, Timestamp, Varchar}; + use diesel::sql_types::*; use super::sql_types::SortTypeEnum; use super::sql_types::ListingTypeEnum; @@ -382,8 +390,7 @@ diesel::table! { password_encrypted -> Text, email -> Nullable, show_nsfw -> Bool, - #[max_length = 20] - theme -> Varchar, + theme -> Text, default_sort_type -> SortTypeEnum, default_listing_type -> ListingTypeEnum, #[max_length = 20] @@ -916,6 +923,7 @@ diesel::allow_tables_to_appear_in_same_query!( admin_purge_community, admin_purge_person, admin_purge_post, + captcha_answer, comment, comment_aggregates, comment_like, diff --git a/crates/db_schema/src/source/captcha_answer.rs b/crates/db_schema/src/source/captcha_answer.rs new file mode 100644 index 000000000..e3e64c4eb --- /dev/null +++ b/crates/db_schema/src/source/captcha_answer.rs @@ -0,0 +1,33 @@ +#[cfg(feature = "full")] +use crate::schema::captcha_answer; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +use uuid::Uuid; + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable))] +#[cfg_attr(feature = "full", diesel(table_name = captcha_answer))] +pub struct CaptchaAnswer { + pub id: i32, + pub uuid: Uuid, + pub answer: String, + pub published: chrono::NaiveDateTime, +} + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Queryable))] +#[cfg_attr(feature = "full", diesel(table_name = captcha_answer))] +pub struct CheckCaptchaAnswer { + pub uuid: Uuid, + pub answer: String, +} + +#[skip_serializing_none] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))] +#[cfg_attr(feature = "full", diesel(table_name = captcha_answer))] +pub struct CaptchaAnswerForm { + pub answer: String, +} diff --git a/crates/db_schema/src/source/local_site.rs b/crates/db_schema/src/source/local_site.rs index e65a61535..be93717a9 100644 --- a/crates/db_schema/src/source/local_site.rs +++ b/crates/db_schema/src/source/local_site.rs @@ -50,8 +50,6 @@ pub struct LocalSite { pub actor_name_max_length: i32, /// Whether federation is enabled. pub federation_enabled: bool, - /// The number of concurrent federation http workers. - pub federation_worker_count: i32, /// Whether captcha is enabled. pub captcha_enabled: bool, /// The captcha difficulty. @@ -85,7 +83,6 @@ pub struct LocalSiteInsertForm { pub slur_filter_regex: Option, pub actor_name_max_length: Option, pub federation_enabled: Option, - pub federation_worker_count: Option, pub captcha_enabled: Option, pub captcha_difficulty: Option, pub registration_mode: Option, @@ -112,7 +109,6 @@ pub struct LocalSiteUpdateForm { pub slur_filter_regex: Option>, pub actor_name_max_length: Option, pub federation_enabled: Option, - pub federation_worker_count: Option, pub captcha_enabled: Option, pub captcha_difficulty: Option, pub registration_mode: Option, diff --git a/crates/db_schema/src/source/mod.rs b/crates/db_schema/src/source/mod.rs index 9aab4b90b..926e23e73 100644 --- a/crates/db_schema/src/source/mod.rs +++ b/crates/db_schema/src/source/mod.rs @@ -1,6 +1,7 @@ #[cfg(feature = "full")] pub mod activity; pub mod actor_language; +pub mod captcha_answer; pub mod comment; pub mod comment_reply; pub mod comment_report; diff --git a/crates/db_schema/src/utils.rs b/crates/db_schema/src/utils.rs index a9c3cf35f..f344df6a8 100644 --- a/crates/db_schema/src/utils.rs +++ b/crates/db_schema/src/utils.rs @@ -12,7 +12,7 @@ use diesel::{ backend::Backend, deserialize::FromSql, pg::Pg, - result::{Error as DieselError, Error::QueryBuilderError}, + result::{ConnectionError, ConnectionResult, Error as DieselError, Error::QueryBuilderError}, serialize::{Output, ToSql}, sql_types::Text, PgConnection, @@ -25,11 +25,21 @@ use diesel_async::{ }, }; use diesel_migrations::EmbeddedMigrations; +use futures_util::{future::BoxFuture, FutureExt}; use lemmy_utils::{error::LemmyError, settings::structs::Settings}; use once_cell::sync::Lazy; use regex::Regex; -use std::{env, env::VarError, time::Duration}; -use tracing::info; +use rustls::{ + client::{ServerCertVerified, ServerCertVerifier}, + ServerName, +}; +use std::{ + env, + env::VarError, + sync::Arc, + time::{Duration, SystemTime}, +}; +use tracing::{error, info}; use url::Url; const FETCH_LIMIT_DEFAULT: i64 = 10; @@ -138,7 +148,15 @@ pub fn diesel_option_overwrite_to_url_create( async fn build_db_pool_settings_opt(settings: Option<&Settings>) -> Result { let db_url = get_database_url(settings); let pool_size = settings.map(|s| s.database.pool_size).unwrap_or(5); - let manager = AsyncDieselConnectionManager::::new(&db_url); + // We only support TLS with sslmode=require currently + let tls_enabled = db_url.contains("sslmode=require"); + let manager = if tls_enabled { + // diesel-async does not support any TLS connections out of the box, so we need to manually + // provide a setup function which handles creating the connection + AsyncDieselConnectionManager::::new_with_setup(&db_url, establish_connection) + } else { + AsyncDieselConnectionManager::::new(&db_url) + }; let pool = Pool::builder(manager) .max_size(pool_size) .wait_timeout(POOL_TIMEOUT) @@ -155,6 +173,44 @@ async fn build_db_pool_settings_opt(settings: Option<&Settings>) -> Result BoxFuture> { + let fut = async { + let rustls_config = rustls::ClientConfig::builder() + .with_safe_defaults() + .with_custom_certificate_verifier(Arc::new(NoCertVerifier {})) + .with_no_client_auth(); + + let tls = tokio_postgres_rustls::MakeRustlsConnect::new(rustls_config); + let (client, conn) = tokio_postgres::connect(config, tls) + .await + .map_err(|e| ConnectionError::BadConnection(e.to_string()))?; + tokio::spawn(async move { + if let Err(e) = conn.await { + error!("Database connection failed: {e}"); + } + }); + AsyncPgConnection::try_from(client).await + }; + fut.boxed() +} + +struct NoCertVerifier {} + +impl ServerCertVerifier for NoCertVerifier { + fn verify_server_cert( + &self, + _end_entity: &rustls::Certificate, + _intermediates: &[rustls::Certificate], + _server_name: &ServerName, + _scts: &mut dyn Iterator, + _ocsp_response: &[u8], + _now: SystemTime, + ) -> Result { + // Will verify all (even invalid) certs without any checks (sslmode=require) + Ok(ServerCertVerified::assertion()) + } +} + pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); pub fn run_migrations(db_url: &str) { @@ -211,7 +267,10 @@ pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType { | SortType::TopAll | SortType::TopWeek | SortType::TopYear - | SortType::TopMonth => CommentSortType::Top, + | SortType::TopMonth + | SortType::TopThreeMonths + | SortType::TopSixMonths + | SortType::TopNineMonths => CommentSortType::Top, } } @@ -230,6 +289,8 @@ pub mod functions { sql_function!(fn lower(x: Text) -> Text); } +pub const DELETED_REPLACEMENT_TEXT: &str = "*Permanently Deleted*"; + impl ToSql for DbUrl { fn to_sql(&self, out: &mut Output) -> diesel::serialize::Result { >::to_sql(&self.0.to_string(), &mut out.reborrow()) diff --git a/crates/db_views/src/post_view.rs b/crates/db_views/src/post_view.rs index 6c6d48c75..c4e21c60c 100644 --- a/crates/db_views/src/post_view.rs +++ b/crates/db_views/src/post_view.rs @@ -428,6 +428,18 @@ impl<'a> PostQuery<'a> { .filter(post_aggregates::published.gt(now - 12.hours())) .then_order_by(post_aggregates::score.desc()) .then_order_by(post_aggregates::published.desc()), + SortType::TopThreeMonths => query + .filter(post_aggregates::published.gt(now - 3.months())) + .then_order_by(post_aggregates::score.desc()) + .then_order_by(post_aggregates::published.desc()), + SortType::TopSixMonths => query + .filter(post_aggregates::published.gt(now - 6.months())) + .then_order_by(post_aggregates::score.desc()) + .then_order_by(post_aggregates::published.desc()), + SortType::TopNineMonths => query + .filter(post_aggregates::published.gt(now - 9.months())) + .then_order_by(post_aggregates::score.desc()) + .then_order_by(post_aggregates::published.desc()), }; let (limit, offset) = limit_and_offset(self.page, self.limit)?; diff --git a/crates/db_views_actor/src/community_view.rs b/crates/db_views_actor/src/community_view.rs index c6921ddc6..7f83c6639 100644 --- a/crates/db_views_actor/src/community_view.rs +++ b/crates/db_views_actor/src/community_view.rs @@ -125,6 +125,7 @@ pub struct CommunityQuery<'a> { local_user: Option<&'a LocalUser>, search_term: Option, is_mod_or_admin: Option, + show_nsfw: Option, page: Option, limit: Option, } @@ -202,8 +203,8 @@ impl<'a> CommunityQuery<'a> { query = query.filter(community_block::person_id.is_null()); query = query.filter(community::nsfw.eq(false).or(local_user::show_nsfw.eq(true))); } else { - // No person in request, only show nsfw communities if show_nsfw passed into request - if !self.local_user.map(|l| l.show_nsfw).unwrap_or(false) { + // No person in request, only show nsfw communities if show_nsfw is passed into request + if !self.show_nsfw.unwrap_or(false) { query = query.filter(community::nsfw.eq(false)); } } diff --git a/crates/db_views_actor/src/person_view.rs b/crates/db_views_actor/src/person_view.rs index 33e9f56f0..9db18e371 100644 --- a/crates/db_views_actor/src/person_view.rs +++ b/crates/db_views_actor/src/person_view.rs @@ -119,6 +119,15 @@ impl<'a> PersonQuery<'a> { SortType::TopTwelveHour => query .filter(person::published.gt(now - 12.hours())) .order_by(person_aggregates::comment_score.desc()), + SortType::TopThreeMonths => query + .filter(person::published.gt(now - 3.months())) + .order_by(person_aggregates::comment_score.desc()), + SortType::TopSixMonths => query + .filter(person::published.gt(now - 6.months())) + .order_by(person_aggregates::comment_score.desc()), + SortType::TopNineMonths => query + .filter(person::published.gt(now - 9.months())) + .order_by(person_aggregates::comment_score.desc()), }; let (limit, offset) = limit_and_offset(self.page, self.limit)?; diff --git a/crates/routes/src/feeds.rs b/crates/routes/src/feeds.rs index 143d9bb9c..a45f82135 100644 --- a/crates/routes/src/feeds.rs +++ b/crates/routes/src/feeds.rs @@ -482,7 +482,6 @@ fn create_post_items( i.pub_date(dt.to_rfc2822()); let post_url = format!("{}/post/{}", protocol_and_hostname, p.post.id); - i.link(post_url.clone()); i.comments(post_url.clone()); let guid = GuidBuilder::default() .permalink(true) @@ -506,6 +505,9 @@ fn create_post_items( if let Some(url) = p.post.url { let link_html = format!("
{url}"); description.push_str(&link_html); + i.link(url.to_string()); + } else { + i.link(post_url.clone()); } if let Some(body) = p.post.body { diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 1ec8d4ba2..47b55f735 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -49,7 +49,6 @@ enum-map = "2.5" [dev-dependencies] reqwest = { workspace = true } -tokio = { workspace = true, features = ["macros"] } [build-dependencies] rosetta-build = "0.1.2" diff --git a/crates/utils/src/main.rs b/crates/utils/src/main.rs new file mode 100644 index 000000000..c2365f233 --- /dev/null +++ b/crates/utils/src/main.rs @@ -0,0 +1,16 @@ +use doku::json::{AutoComments, CommentsStyle, Formatting, ObjectsStyle}; +use lemmy_utils::settings::structs::Settings; +fn main() { + let fmt = Formatting { + auto_comments: AutoComments::none(), + comments_style: CommentsStyle { + separator: "#".to_owned(), + }, + objects_style: ObjectsStyle { + surround_keys_with_quotes: false, + use_comma_as_separator: false, + }, + ..Default::default() + }; + println!("{}", doku::to_json_fmt_val(&fmt, &Settings::default())); +} diff --git a/crates/utils/src/settings/structs.rs b/crates/utils/src/settings/structs.rs index 6e200b224..5d0e642f6 100644 --- a/crates/utils/src/settings/structs.rs +++ b/crates/utils/src/settings/structs.rs @@ -39,6 +39,12 @@ pub struct Settings { #[default(None)] #[doku(skip)] pub opentelemetry_url: Option, + /// The number of activitypub federation workers that can be in-flight concurrently + #[default(0)] + pub worker_count: usize, + /// The number of activitypub federation retry workers that can be in-flight concurrently + #[default(0)] + pub retry_count: usize, } #[derive(Debug, Deserialize, Serialize, Clone, SmartDefault, Document)] diff --git a/crates/utils/src/utils/validation.rs b/crates/utils/src/utils/validation.rs index 41103332c..7e09c5af8 100644 --- a/crates/utils/src/utils/validation.rs +++ b/crates/utils/src/utils/validation.rs @@ -1,7 +1,7 @@ use crate::error::{LemmyError, LemmyResult}; use itertools::Itertools; use once_cell::sync::Lazy; -use regex::Regex; +use regex::{Regex, RegexBuilder}; use totp_rs::{Secret, TOTP}; use url::Url; @@ -17,8 +17,13 @@ static CLEAN_URL_PARAMS_REGEX: Lazy = Lazy::new(|| { Regex::new(r"^utm_source|utm_medium|utm_campaign|utm_term|utm_content|gclid|gclsrc|dclid|fbclid$") .expect("compile regex") }); + const BODY_MAX_LENGTH: usize = 10000; +const POST_BODY_MAX_LENGTH: usize = 50000; const BIO_MAX_LENGTH: usize = 300; +const SITE_NAME_MAX_LENGTH: usize = 20; +const SITE_NAME_MIN_LENGTH: usize = 1; +const SITE_DESCRIPTION_MAX_LENGTH: usize = 150; fn has_newline(name: &str) -> bool { name.contains('\n') @@ -68,9 +73,14 @@ pub fn is_valid_post_title(title: &str) -> LemmyResult<()> { } /// This could be post bodies, comments, or any description field -pub fn is_valid_body_field(body: &Option) -> LemmyResult<()> { +pub fn is_valid_body_field(body: &Option, post: bool) -> LemmyResult<()> { if let Some(body) = body { - let check = body.chars().count() <= BODY_MAX_LENGTH; + let check = if post { + body.chars().count() <= POST_BODY_MAX_LENGTH + } else { + body.chars().count() <= BODY_MAX_LENGTH + }; + if !check { Err(LemmyError::from_message("invalid_body_field")) } else { @@ -82,14 +92,83 @@ pub fn is_valid_body_field(body: &Option) -> LemmyResult<()> { } pub fn is_valid_bio_field(bio: &str) -> LemmyResult<()> { - let check = bio.chars().count() <= BIO_MAX_LENGTH; - if !check { - Err(LemmyError::from_message("bio_length_overflow")) + max_length_check(bio, BIO_MAX_LENGTH, String::from("bio_length_overflow")) +} + +/// Checks the site name length, the limit as defined in the DB. +pub fn site_name_length_check(name: &str) -> LemmyResult<()> { + min_max_length_check( + name, + SITE_NAME_MIN_LENGTH, + SITE_NAME_MAX_LENGTH, + String::from("site_name_required"), + String::from("site_name_length_overflow"), + ) +} + +/// Checks the site description length, the limit as defined in the DB. +pub fn site_description_length_check(description: &str) -> LemmyResult<()> { + max_length_check( + description, + SITE_DESCRIPTION_MAX_LENGTH, + String::from("site_description_length_overflow"), + ) +} + +fn max_length_check(item: &str, max_length: usize, msg: String) -> LemmyResult<()> { + if item.len() > max_length { + Err(LemmyError::from_message(&msg)) } else { Ok(()) } } +fn min_max_length_check( + item: &str, + min_length: usize, + max_length: usize, + min_msg: String, + max_msg: String, +) -> LemmyResult<()> { + if item.len() > max_length { + Err(LemmyError::from_message(&max_msg)) + } else if item.len() < min_length { + Err(LemmyError::from_message(&min_msg)) + } else { + Ok(()) + } +} + +/// Attempts to build a regex and check it for common errors before inserting into the DB. +pub fn build_and_check_regex(regex_str_opt: &Option<&str>) -> LemmyResult> { + regex_str_opt.map_or_else( + || Ok(None::), + |regex_str| { + if regex_str.is_empty() { + // If the proposed regex is empty, return as having no regex at all; this is the same + // behavior that happens downstream before the write to the database. + return Ok(None::); + } + + RegexBuilder::new(regex_str) + .case_insensitive(true) + .build() + .map_err(|e| LemmyError::from_error_message(e, "invalid_regex")) + .and_then(|regex| { + // NOTE: It is difficult to know, in the universe of user-crafted regex, which ones + // may match against any string text. To keep it simple, we'll match the regex + // against an innocuous string - a single number - which should help catch a regex + // that accidentally matches against all strings. + if regex.is_match("1") { + return Err(LemmyError::from_message("permissive_regex")); + } + + Ok(Some(regex)) + }) + }, + ) +} + pub fn clean_url_params(url: &Url) -> Url { let mut url_out = url.clone(); if url.query().is_some() { @@ -171,13 +250,20 @@ pub fn check_site_visibility_valid( mod tests { use super::build_totp_2fa; use crate::utils::validation::{ + build_and_check_regex, check_site_visibility_valid, clean_url_params, generate_totp_2fa_secret, is_valid_actor_name, + is_valid_bio_field, is_valid_display_name, is_valid_matrix_id, is_valid_post_title, + site_description_length_check, + site_name_length_check, + BIO_MAX_LENGTH, + SITE_DESCRIPTION_MAX_LENGTH, + SITE_NAME_MAX_LENGTH, }; use url::Url; @@ -246,6 +332,126 @@ mod tests { assert!(totp.is_ok()); } + #[test] + fn test_valid_site_name() { + let valid_names = [ + (0..SITE_NAME_MAX_LENGTH).map(|_| 'A').collect::(), + String::from("A"), + ]; + let invalid_names = [ + ( + &(0..SITE_NAME_MAX_LENGTH + 1) + .map(|_| 'A') + .collect::(), + "site_name_length_overflow", + ), + (&String::new(), "site_name_required"), + ]; + + valid_names.iter().for_each(|valid_name| { + assert!( + site_name_length_check(valid_name).is_ok(), + "Expected {} of length {} to be Ok.", + valid_name, + valid_name.len() + ) + }); + + invalid_names + .iter() + .for_each(|&(invalid_name, expected_err)| { + let result = site_name_length_check(invalid_name); + + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .message + .eq(&Some(String::from(expected_err))), + "Testing {}, expected error {}", + invalid_name, + expected_err + ); + }); + } + + #[test] + fn test_valid_bio() { + assert!(is_valid_bio_field(&(0..BIO_MAX_LENGTH).map(|_| 'A').collect::()).is_ok()); + + let invalid_result = + is_valid_bio_field(&(0..BIO_MAX_LENGTH + 1).map(|_| 'A').collect::()); + + assert!( + invalid_result.is_err() + && invalid_result + .unwrap_err() + .message + .eq(&Some(String::from("bio_length_overflow"))) + ); + } + + #[test] + fn test_valid_site_description() { + assert!(site_description_length_check( + &(0..SITE_DESCRIPTION_MAX_LENGTH) + .map(|_| 'A') + .collect::() + ) + .is_ok()); + + let invalid_result = site_description_length_check( + &(0..SITE_DESCRIPTION_MAX_LENGTH + 1) + .map(|_| 'A') + .collect::(), + ); + + assert!( + invalid_result.is_err() + && invalid_result + .unwrap_err() + .message + .eq(&Some(String::from("site_description_length_overflow"))) + ); + } + + #[test] + fn test_valid_slur_regex() { + let valid_regexes = [&None, &Some(""), &Some("(foo|bar)")]; + + valid_regexes.iter().for_each(|regex| { + let result = build_and_check_regex(regex); + + assert!(result.is_ok(), "Testing regex: {:?}", regex); + }); + } + + #[test] + fn test_too_permissive_slur_regex() { + let match_everything_regexes = [ + (&Some("["), "invalid_regex"), + (&Some("(foo|bar|)"), "permissive_regex"), + (&Some(".*"), "permissive_regex"), + ]; + + match_everything_regexes + .iter() + .for_each(|&(regex_str, expected_err)| { + let result = build_and_check_regex(regex_str); + + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .message + .eq(&Some(String::from(expected_err))), + "Testing regex {:?}, expected error {}", + regex_str, + expected_err + ); + }); + } + #[test] fn test_check_site_visibility_valid() { assert!(check_site_visibility_valid(true, true, &None, &None).is_err()); diff --git a/crates/utils/translations b/crates/utils/translations index 7fc71d086..5a9d44656 160000 --- a/crates/utils/translations +++ b/crates/utils/translations @@ -1 +1 @@ -Subproject commit 7fc71d0860bbe5c6d620ec27112350ffe5b9229c +Subproject commit 5a9d44656e2658ab7cb2dbec3fd1bfaf57654533 diff --git a/docker/Dockerfile b/docker/Dockerfile index 5943e1710..010527515 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -29,7 +29,7 @@ RUN \ FROM alpine:3 as lemmy # Install libpq for postgres -RUN apk add libpq +RUN apk add --no-cache libpq # Copy resources COPY --from=builder /app/lemmy_server /app/lemmy diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 2033ee8c7..985eae5e6 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -6,20 +6,9 @@ x-logging: &default-logging max-size: "50m" max-file: 4 -networks: - # communication to web and clients - lemmyexternalproxy: - # communication between lemmy services - lemmyinternal: - driver: bridge - internal: true - services: proxy: image: nginx:1-alpine - networks: - - lemmyinternal - - lemmyexternalproxy ports: # actual and only port facing any connection from outside # Note, change the left number if port 1236 is already in use on your system @@ -45,9 +34,6 @@ services: # RUST_RELEASE_MODE: release # this hostname is used in nginx reverse proxy and also for lemmy ui to connect to the backend, do not change hostname: lemmy - networks: - - lemmyinternal - - lemmyexternalproxy restart: always environment: - RUST_LOG="warn,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug" @@ -67,14 +53,12 @@ services: # build: # context: ../../lemmy-ui # dockerfile: dev.dockerfile - networks: - - lemmyinternal environment: # this needs to match the hostname defined in the lemmy service - LEMMY_UI_LEMMY_INTERNAL_HOST=lemmy:8536 # set the outside hostname here - LEMMY_UI_LEMMY_EXTERNAL_HOST=localhost:1236 - - LEMMY_HTTPS=false + - LEMMY_UI_HTTPS=false - LEMMY_UI_DEBUG=true depends_on: - lemmy @@ -88,8 +72,6 @@ services: hostname: pictrs # we can set options to pictrs like this, here we set max. image size and forced format for conversion # entrypoint: /sbin/tini -- /usr/local/bin/pict-rs -p /mnt -m 4 --image-format webp - networks: - - lemmyinternal environment: - PICTRS_OPENTELEMETRY_URL=http://otel:4137 - PICTRS__API_KEY=API_KEY @@ -126,10 +108,6 @@ services: "-c", "track_activity_query_size=1048576", ] - networks: - - lemmyinternal - # adding the external facing network to allow direct db access for devs - - lemmyexternalproxy ports: # use a different port so it doesnt conflict with potential postgres db running on the host - "5433:5432" diff --git a/migrations/2023-06-19-055530_add_retry_worker_setting/down.sql b/migrations/2023-06-19-055530_add_retry_worker_setting/down.sql new file mode 100644 index 000000000..e3c200a15 --- /dev/null +++ b/migrations/2023-06-19-055530_add_retry_worker_setting/down.sql @@ -0,0 +1 @@ +alter table local_site add column federation_worker_count int default 64 not null; \ No newline at end of file diff --git a/migrations/2023-06-19-055530_add_retry_worker_setting/up.sql b/migrations/2023-06-19-055530_add_retry_worker_setting/up.sql new file mode 100644 index 000000000..2aac86f85 --- /dev/null +++ b/migrations/2023-06-19-055530_add_retry_worker_setting/up.sql @@ -0,0 +1 @@ +alter table local_site drop column federation_worker_count; \ No newline at end of file diff --git a/migrations/2023-06-20-191145_add_listingtype_sorttype_3_6_9_months_enums/down.sql b/migrations/2023-06-20-191145_add_listingtype_sorttype_3_6_9_months_enums/down.sql new file mode 100644 index 000000000..5b135223e --- /dev/null +++ b/migrations/2023-06-20-191145_add_listingtype_sorttype_3_6_9_months_enums/down.sql @@ -0,0 +1,14 @@ +-- update the default sort type +update local_user set default_sort_type = 'TopDay' where default_sort_type in ('TopThreeMonths', 'TopSixMonths', 'TopNineMonths'); + +-- rename the old enum +alter type sort_type_enum rename to sort_type_enum__; +-- create the new enum +CREATE TYPE sort_type_enum AS ENUM ('Active', 'Hot', 'New', 'Old', 'TopDay', 'TopWeek', 'TopMonth', 'TopYear', 'TopAll', 'MostComments', 'NewComments', 'TopHour', 'TopSixHour', 'TopTwelveHour'); + +-- alter all you enum columns +alter table local_user + alter column default_sort_type type sort_type_enum using default_sort_type::text::sort_type_enum; + +-- drop the old enum +drop type sort_type_enum__; diff --git a/migrations/2023-06-20-191145_add_listingtype_sorttype_3_6_9_months_enums/up.sql b/migrations/2023-06-20-191145_add_listingtype_sorttype_3_6_9_months_enums/up.sql new file mode 100644 index 000000000..85bcfad7c --- /dev/null +++ b/migrations/2023-06-20-191145_add_listingtype_sorttype_3_6_9_months_enums/up.sql @@ -0,0 +1,4 @@ +-- Update the enums +ALTER TYPE sort_type_enum ADD VALUE 'TopThreeMonths'; +ALTER TYPE sort_type_enum ADD VALUE 'TopSixMonths'; +ALTER TYPE sort_type_enum ADD VALUE 'TopNineMonths'; diff --git a/migrations/2023-06-21-153242_add_captcha/down.sql b/migrations/2023-06-21-153242_add_captcha/down.sql new file mode 100644 index 000000000..4e5b83042 --- /dev/null +++ b/migrations/2023-06-21-153242_add_captcha/down.sql @@ -0,0 +1 @@ +drop table captcha_answer; \ No newline at end of file diff --git a/migrations/2023-06-21-153242_add_captcha/up.sql b/migrations/2023-06-21-153242_add_captcha/up.sql new file mode 100644 index 000000000..5c566bc92 --- /dev/null +++ b/migrations/2023-06-21-153242_add_captcha/up.sql @@ -0,0 +1,6 @@ +create table captcha_answer ( + id serial primary key, + uuid uuid not null unique default gen_random_uuid(), + answer text not null, + published timestamp not null default now() +); diff --git a/migrations/2023-06-22-101245_increase_user_theme_column_size/down.sql b/migrations/2023-06-22-101245_increase_user_theme_column_size/down.sql new file mode 100644 index 000000000..0731e0682 --- /dev/null +++ b/migrations/2023-06-22-101245_increase_user_theme_column_size/down.sql @@ -0,0 +1,2 @@ +alter table only local_user alter column theme TYPE character varying(20); +alter table only local_user alter column theme set default 'browser'::character varying; \ No newline at end of file diff --git a/migrations/2023-06-22-101245_increase_user_theme_column_size/up.sql b/migrations/2023-06-22-101245_increase_user_theme_column_size/up.sql new file mode 100644 index 000000000..cbab25663 --- /dev/null +++ b/migrations/2023-06-22-101245_increase_user_theme_column_size/up.sql @@ -0,0 +1,2 @@ +alter table only local_user alter column theme type text; +alter table only local_user alter column theme set default 'browser'::text; diff --git a/migrations/2023-06-24-185942_aggegates_published_indexes/down.sql b/migrations/2023-06-24-185942_aggegates_published_indexes/down.sql new file mode 100644 index 000000000..fa7f7d48f --- /dev/null +++ b/migrations/2023-06-24-185942_aggegates_published_indexes/down.sql @@ -0,0 +1,2 @@ +drop index idx_comment_aggregates_published; +drop index idx_community_aggregates_published; diff --git a/migrations/2023-06-24-185942_aggegates_published_indexes/up.sql b/migrations/2023-06-24-185942_aggegates_published_indexes/up.sql new file mode 100644 index 000000000..42230af10 --- /dev/null +++ b/migrations/2023-06-24-185942_aggegates_published_indexes/up.sql @@ -0,0 +1,4 @@ +-- Add indexes on published column (needed for hot_rank updates) + +create index idx_community_aggregates_published on community_aggregates (published desc); +create index idx_comment_aggregates_published on comment_aggregates (published desc); \ No newline at end of file diff --git a/scripts/fix-clippy.sh b/scripts/fix-clippy.sh index 25b4b22ca..759de5773 100755 --- a/scripts/fix-clippy.sh +++ b/scripts/fix-clippy.sh @@ -14,7 +14,10 @@ cargo clippy --workspace --fix --allow-staged --allow-dirty --tests --all-target -D clippy::manual_string_new -D clippy::redundant_closure_for_method_calls \ -D clippy::unused_self \ -A clippy::uninlined_format_args \ - -D clippy::get_first + -D clippy::get_first \ + -D clippy::explicit_into_iter_loop \ + -D clippy::explicit_iter_loop \ + -D clippy::needless_collect cargo clippy --workspace --features console -- \ -D clippy::unwrap_used \ diff --git a/scripts/release.sh b/scripts/release.sh index 4b9fdd8e1..76cb2dbfb 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -6,19 +6,23 @@ set -e new_tag="$1" third_semver=$(echo $new_tag | cut -d "." -f 3) +# Goto the upper route +CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P)" +cd $CWD/../ + # The ansible and docker installs should only update for non release-candidates # IE, when the third semver is a number, not '2-rc' if [ ! -z "${third_semver##*[!0-9]*}" ]; then - pushd ../docker - sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" ../docker-compose.yml - sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../docker-compose.yml - sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" ../federation/docker-compose.yml - git add ../docker-compose.yml - git add ../federation/docker-compose.yml + pushd docker + sed -i "s/dessalines\/lemmy:.*/dessalines\/lemmy:$new_tag/" docker-compose.yml + sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" docker-compose.yml + sed -i "s/dessalines\/lemmy-ui:.*/dessalines\/lemmy-ui:$new_tag/" federation/docker-compose.yml + git add docker-compose.yml + git add federation/docker-compose.yml popd # Setting the version for Ansible - pushd ../../../lemmy-ansible + pushd ../lemmy-ansible echo $new_tag > "VERSION" git add "VERSION" git commit -m"Updating VERSION" @@ -29,14 +33,16 @@ if [ ! -z "${third_semver##*[!0-9]*}" ]; then fi # Update crate versions -pushd .. old_tag=$(grep version Cargo.toml | head -1 | cut -d'"' -f 2) sed -i "s/{ version = \"=$old_tag\", path/{ version = \"=$new_tag\", path/g" Cargo.toml sed -i "s/version = \"$old_tag\"/version = \"$new_tag\"/g" Cargo.toml git add Cargo.toml cargo check git add Cargo.lock -popd + +# Update the submodules +git submodule update --remote +git add crates/utils/translations # The commit git commit -m"Version $new_tag" diff --git a/scripts/update_config_defaults.sh b/scripts/update_config_defaults.sh index 024b8ca11..0984c247c 100755 --- a/scripts/update_config_defaults.sh +++ b/scripts/update_config_defaults.sh @@ -3,4 +3,4 @@ set -e dest=${1-config/defaults.hjson} -cargo run -- --print-config-docs > "$dest" +cargo run --manifest-path crates/utils/Cargo.toml > "$dest" diff --git a/src/api_routes_http.rs b/src/api_routes_http.rs index a2abfa690..375630a92 100644 --- a/src/api_routes_http.rs +++ b/src/api_routes_http.rs @@ -38,6 +38,7 @@ use lemmy_api_common::{ ChangePassword, DeleteAccount, GetBannedPersons, + GetCaptcha, GetPersonDetails, GetPersonMentions, GetReplies, @@ -272,6 +273,12 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) { .wrap(rate_limit.register()) .route(web::post().to(route_post_crud::)), ) + .service( + // Handle captcha separately + web::resource("/user/get_captcha") + .wrap(rate_limit.post()) + .route(web::get().to(route_get::)), + ) // User actions .service( web::scope("/user") diff --git a/src/lib.rs b/src/lib.rs index 698bd4626..ac02d9c26 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -9,7 +9,6 @@ use crate::{code_migrations::run_advanced_migrations, root_span_builder::Quieter use activitypub_federation::config::{FederationConfig, FederationMiddleware}; use actix_cors::Cors; use actix_web::{middleware, web::Data, App, HttpServer, Result}; -use doku::json::{AutoComments, CommentsStyle, Formatting, ObjectsStyle}; use lemmy_api_common::{ context::LemmyContext, lemmy_db_views::structs::SiteView, @@ -25,11 +24,7 @@ use lemmy_db_schema::{ utils::{build_db_pool, get_conn, get_database_url, run_migrations}, }; use lemmy_routes::{feeds, images, nodeinfo, webfinger}; -use lemmy_utils::{ - error::LemmyError, - rate_limit::RateLimitCell, - settings::{structs::Settings, SETTINGS}, -}; +use lemmy_utils::{error::LemmyError, rate_limit::RateLimitCell, settings::SETTINGS}; use reqwest::Client; use reqwest_middleware::ClientBuilder; use reqwest_tracing::TracingMiddleware; @@ -47,21 +42,6 @@ pub(crate) const REQWEST_TIMEOUT: Duration = Duration::from_secs(10); /// Placing the main function in lib.rs allows other crates to import it and embed Lemmy pub async fn start_lemmy_server() -> Result<(), LemmyError> { let args: Vec = env::args().collect(); - if args.get(1) == Some(&"--print-config-docs".to_string()) { - let fmt = Formatting { - auto_comments: AutoComments::none(), - comments_style: CommentsStyle { - separator: "#".to_owned(), - }, - objects_style: ObjectsStyle { - surround_keys_with_quotes: false, - use_comma_as_separator: false, - }, - ..Default::default() - }; - println!("{}", doku::to_json_fmt_val(&fmt, &Settings::default())); - return Ok(()); - } let scheduled_tasks_enabled = args.get(1) != Some(&"--disable-scheduled-tasks".to_string()); @@ -140,24 +120,23 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> { }); } + let settings_bind = settings.clone(); + let federation_config = FederationConfig::builder() .domain(settings.hostname.clone()) .app_data(context.clone()) .client(client.clone()) .http_fetch_limit(FEDERATION_HTTP_FETCH_LIMIT) - .worker_count(local_site.federation_worker_count as usize) + .worker_count(settings.worker_count) + .retry_count(settings.retry_count) .debug(cfg!(debug_assertions)) .http_signature_compat(true) .url_verifier(Box::new(VerifyUrlData(context.pool().clone()))) .build() - .await - .expect("configure federation"); + .await?; // Create Http server with websocket support - let settings_bind = settings.clone(); HttpServer::new(move || { - let context = context.clone(); - let cors_config = if cfg!(debug_assertions) { Cors::permissive() } else { @@ -172,9 +151,10 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> { // This is the default log format save for the usage of %{r}a over %a to guarantee to record the client's (forwarded) IP and not the last peer address, since the latter is frequently just a reverse proxy "%{r}a '%r' %s %b '%{Referer}i' '%{User-Agent}i' %T", )) + .wrap(middleware::Compress::default()) .wrap(cors_config) .wrap(TracingLogger::::new()) - .app_data(Data::new(context)) + .app_data(Data::new(context.clone())) .app_data(Data::new(rate_limit_cell.clone())) .wrap(FederationMiddleware::new(federation_config.clone())) // The routes diff --git a/src/main.rs b/src/main.rs index 315fe84be..5fc03ed02 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,7 @@ use lemmy_server::{init_logging, start_lemmy_server}; use lemmy_utils::{error::LemmyError, settings::SETTINGS}; -#[actix_web::main] +#[tokio::main] pub async fn main() -> Result<(), LemmyError> { init_logging(&SETTINGS.opentelemetry_url)?; #[cfg(not(feature = "embed-pictrs"))] diff --git a/src/scheduled_tasks.rs b/src/scheduled_tasks.rs index 9fb1ba702..4d3c936e8 100644 --- a/src/scheduled_tasks.rs +++ b/src/scheduled_tasks.rs @@ -1,25 +1,21 @@ +use chrono::NaiveDateTime; use clokwerk::{Scheduler, TimeUnits as CTimeUnits}; use diesel::{ dsl::{now, IntervalDsl}, + sql_types::{Integer, Timestamp}, Connection, ExpressionMethods, + NullableExpressionMethods, QueryDsl, + QueryableByName, }; // Import week days and WeekDay use diesel::{sql_query, PgConnection, RunQueryDsl}; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ - schema::{ - activity, - comment_aggregates, - community_aggregates, - community_person_ban, - instance, - person, - post_aggregates, - }, + schema::{activity, captcha_answer, comment, community_person_ban, instance, person, post}, source::instance::{Instance, InstanceForm}, - utils::{functions::hot_rank, naive_now}, + utils::{naive_now, DELETED_REPLACEMENT_TEXT}, }; use lemmy_routes::nodeinfo::NodeInfo; use lemmy_utils::{error::LemmyError, REQWEST_TIMEOUT}; @@ -46,13 +42,20 @@ pub fn setup( update_banned_when_expired(&mut conn); }); - // Update hot ranks every 5 minutes + // Update hot ranks every 15 minutes let url = db_url.clone(); - scheduler.every(CTimeUnits::minutes(5)).run(move || { + scheduler.every(CTimeUnits::minutes(15)).run(move || { let mut conn = PgConnection::establish(&url).expect("could not establish connection"); update_hot_ranks(&mut conn, true); }); + // Delete any captcha answers older than ten minutes, every ten minutes + let url = db_url.clone(); + scheduler.every(CTimeUnits::minutes(10)).run(move || { + let mut conn = PgConnection::establish(&url).expect("could not establish connection"); + delete_expired_captcha_answers(&mut conn); + }); + // Clear old activities every week let url = db_url.clone(); scheduler.every(CTimeUnits::weeks(1)).run(move || { @@ -66,6 +69,13 @@ pub fn setup( context_1.settings_updated_channel().remove_older_than(hour); }); + // Overwrite deleted & removed posts and comments every day + let url = db_url.clone(); + scheduler.every(CTimeUnits::days(1)).run(move || { + let mut conn = PgConnection::establish(&url).expect("could not establish connection"); + overwrite_deleted_posts_and_comments(&mut conn); + }); + // Update the Instance Software scheduler.every(CTimeUnits::days(1)).run(move || { let mut conn = PgConnection::establish(&db_url).expect("could not establish connection"); @@ -86,67 +96,109 @@ fn startup_jobs(db_url: &str) { update_hot_ranks(&mut conn, false); update_banned_when_expired(&mut conn); clear_old_activities(&mut conn); + overwrite_deleted_posts_and_comments(&mut conn); } /// Update the hot_rank columns for the aggregates tables +/// Runs in batches until all necessary rows are updated once fn update_hot_ranks(conn: &mut PgConnection, last_week_only: bool) { - let mut post_update = diesel::update(post_aggregates::table).into_boxed(); - let mut comment_update = diesel::update(comment_aggregates::table).into_boxed(); - let mut community_update = diesel::update(community_aggregates::table).into_boxed(); - - // Only update for the last week of content - if last_week_only { + let process_start_time = if last_week_only { info!("Updating hot ranks for last week..."); - let last_week = now - diesel::dsl::IntervalDsl::weeks(1); - - post_update = post_update.filter(post_aggregates::published.gt(last_week)); - comment_update = comment_update.filter(comment_aggregates::published.gt(last_week)); - community_update = community_update.filter(community_aggregates::published.gt(last_week)); + naive_now() - chrono::Duration::days(7) } else { info!("Updating hot ranks for all history..."); - } + NaiveDateTime::from_timestamp_opt(0, 0).expect("0 timestamp creation") + }; - match post_update - .set(( - post_aggregates::hot_rank.eq(hot_rank(post_aggregates::score, post_aggregates::published)), - post_aggregates::hot_rank_active.eq(hot_rank( - post_aggregates::score, - post_aggregates::newest_comment_time_necro, - )), + process_hot_ranks_in_batches( + conn, + "post_aggregates", + "SET hot_rank = hot_rank(a.score, a.published), + hot_rank_active = hot_rank(a.score, a.newest_comment_time_necro)", + process_start_time, + ); + + process_hot_ranks_in_batches( + conn, + "comment_aggregates", + "SET hot_rank = hot_rank(a.score, a.published)", + process_start_time, + ); + + process_hot_ranks_in_batches( + conn, + "community_aggregates", + "SET hot_rank = hot_rank(a.subscribers, a.published)", + process_start_time, + ); + + info!("Finished hot ranks update!"); +} + +#[derive(QueryableByName)] +struct HotRanksUpdateResult { + #[diesel(sql_type = Timestamp)] + published: NaiveDateTime, +} + +/// Runs the hot rank update query in batches until all rows after `process_start_time` have been +/// processed. +/// In `set_clause`, "a" will refer to the current aggregates table. +/// Locked rows are skipped in order to prevent deadlocks (they will likely get updated on the next +/// run) +fn process_hot_ranks_in_batches( + conn: &mut PgConnection, + table_name: &str, + set_clause: &str, + process_start_time: NaiveDateTime, +) { + let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans + let mut previous_batch_result = Some(process_start_time); + while let Some(previous_batch_last_published) = previous_batch_result { + // Raw `sql_query` is used as a performance optimization - Diesel does not support doing this + // in a single query (neither as a CTE, nor using a subquery) + let result = sql_query(format!( + r#"WITH batch AS (SELECT a.id + FROM {aggregates_table} a + WHERE a.published > $1 + ORDER BY a.published + LIMIT $2 + FOR UPDATE SKIP LOCKED) + UPDATE {aggregates_table} a {set_clause} + FROM batch WHERE a.id = batch.id RETURNING a.published; + "#, + aggregates_table = table_name, + set_clause = set_clause )) - .execute(conn) - { - Ok(_) => {} - Err(e) => { - error!("Failed to update post_aggregates hot_ranks: {}", e) + .bind::(previous_batch_last_published) + .bind::(update_batch_size) + .get_results::(conn); + + match result { + Ok(updated_rows) => previous_batch_result = updated_rows.last().map(|row| row.published), + Err(e) => { + error!("Failed to update {} hot_ranks: {}", table_name, e); + break; + } } } + info!( + "Finished process_hot_ranks_in_batches execution for {}", + table_name + ); +} - match comment_update - .set(comment_aggregates::hot_rank.eq(hot_rank( - comment_aggregates::score, - comment_aggregates::published, - ))) - .execute(conn) - { - Ok(_) => {} - Err(e) => { - error!("Failed to update comment_aggregates hot_ranks: {}", e) - } - } - - match community_update - .set(community_aggregates::hot_rank.eq(hot_rank( - community_aggregates::subscribers, - community_aggregates::published, - ))) - .execute(conn) +fn delete_expired_captcha_answers(conn: &mut PgConnection) { + match diesel::delete( + captcha_answer::table.filter(captcha_answer::published.lt(now - IntervalDsl::minutes(10))), + ) + .execute(conn) { Ok(_) => { info!("Done."); } Err(e) => { - error!("Failed to update community_aggregates hot_ranks: {}", e) + error!("Failed to clear old captcha answers: {}", e) } } } @@ -166,6 +218,48 @@ fn clear_old_activities(conn: &mut PgConnection) { } } +/// overwrite posts and comments 30d after deletion +fn overwrite_deleted_posts_and_comments(conn: &mut PgConnection) { + info!("Overwriting deleted posts..."); + match diesel::update( + post::table + .filter(post::deleted.eq(true)) + .filter(post::updated.lt(now.nullable() - 1.months())) + .filter(post::body.ne(DELETED_REPLACEMENT_TEXT)), + ) + .set(( + post::body.eq(DELETED_REPLACEMENT_TEXT), + post::name.eq(DELETED_REPLACEMENT_TEXT), + )) + .execute(conn) + { + Ok(_) => { + info!("Done."); + } + Err(e) => { + error!("Failed to overwrite deleted posts: {}", e) + } + } + + info!("Overwriting deleted comments..."); + match diesel::update( + comment::table + .filter(comment::deleted.eq(true)) + .filter(comment::updated.lt(now.nullable() - 1.months())) + .filter(comment::content.ne(DELETED_REPLACEMENT_TEXT)), + ) + .set(comment::content.eq(DELETED_REPLACEMENT_TEXT)) + .execute(conn) + { + Ok(_) => { + info!("Done."); + } + Err(e) => { + error!("Failed to overwrite deleted comments: {}", e) + } + } +} + /// Re-calculate the site and community active counts every 12 hours fn active_counts(conn: &mut PgConnection) { info!("Updating active site and community aggregates ...");