diff --git a/.rustfmt.toml b/.rustfmt.toml index c539ff0b4..80c01a69b 100644 --- a/.rustfmt.toml +++ b/.rustfmt.toml @@ -1,5 +1,5 @@ tab_spaces = 2 -edition="2021" -imports_layout="HorizontalVertical" -imports_granularity="Crate" -group_imports="One" \ No newline at end of file +edition = "2021" +imports_layout = "HorizontalVertical" +imports_granularity = "Crate" +group_imports = "One" diff --git a/.woodpecker.yml b/.woodpecker.yml index 073bc0a0b..58ab2f0f9 100644 --- a/.woodpecker.yml +++ b/.woodpecker.yml @@ -18,7 +18,6 @@ pipeline: image: alpine:3 commands: - apk add git - #- git fetch --tags - git submodule init - git submodule update @@ -27,7 +26,34 @@ pipeline: commands: - prettier -c . '!**/volumes' '!**/dist' '!target' '!**/translations' - # use minimum supported rust version for most steps + restore-cache: + image: meltwater/drone-cache:v1 + pull: true + settings: + restore: true + endpoint: + from_secret: MINIO_ENDPOINT + access-key: + from_secret: MINIO_WRITE_USER + secret-key: + from_secret: MINIO_WRITE_PASSWORD + bucket: + from_secret: MINIO_BUCKET + region: us-east-1 + cache_key: "rust-cache" + path-style: true + mount: + - ".cargo" + - "target" + - "api_tests/node_modules" + secrets: + [MINIO_ENDPOINT, MINIO_WRITE_USER, MINIO_WRITE_PASSWORD, MINIO_BUCKET] + + toml_fmt: + image: tamasfe/taplo:0.8.1 + commands: + - taplo format --check + cargo_fmt: image: *muslrust_image environment: @@ -35,42 +61,15 @@ pipeline: CARGO_HOME: .cargo commands: # need make existing toolchain available - - cp ~/.cargo . -r - - rustup toolchain install nightly - - rustup component add rustfmt --toolchain nightly - - cargo +nightly fmt -- --check - # when: - # platform: linux/amd64 - - cargo_clippy: - image: *muslrust_image - environment: - CARGO_HOME: .cargo - commands: - # latest rust for clippy to get extra checks - # when adding new clippy lints, make sure to also add them in scripts/fix-clippy.sh - - rustup component add clippy - - cargo clippy --workspace --tests --all-targets --features console -- - -D warnings -D deprecated -D clippy::perf -D clippy::complexity - -D clippy::style -D clippy::correctness -D clippy::suspicious - -D clippy::dbg_macro -D clippy::inefficient_to_string - -D clippy::items-after-statements -D clippy::implicit_clone - -D clippy::cast_lossless -D clippy::manual_string_new - -D clippy::redundant_closure_for_method_calls - -D clippy::unused_self - -A clippy::uninlined_format_args - -D clippy::get_first - -D clippy::explicit_into_iter_loop - -D clippy::explicit_iter_loop - -D clippy::needless_collect - - cargo clippy --workspace --features console -- - -D clippy::unwrap_used - -D clippy::indexing_slicing + - cp -n ~/.cargo . -r + - rustup toolchain install nightly-2023-07-10 + - rustup component add rustfmt --toolchain nightly-2023-07-10 + - cargo +nightly-2023-07-10 fmt -- --check # when: # platform: linux/amd64 # make sure api builds with default features (used by other crates relying on lemmy api) - cargo_check: + check_api_common_default_features: image: *muslrust_image environment: CARGO_HOME: .cargo @@ -88,6 +87,14 @@ pipeline: # when: # platform: linux/amd64 + lemmy_api_common_works_with_wasm: + image: *muslrust_image + environment: + CARGO_HOME: .cargo + commands: + - "rustup target add wasm32-unknown-unknown" + - "cargo check --target wasm32-unknown-unknown -p lemmy_api_common" + check_defaults_hjson_updated: image: *muslrust_image environment: @@ -109,12 +116,45 @@ pipeline: - diesel print-schema --config-file=diesel.toml > tmp.schema - diff tmp.schema crates/db_schema/src/schema.rs + check_diesel_migration_revertable: + image: willsquire/diesel-cli + environment: + CARGO_HOME: .cargo + DATABASE_URL: postgres://lemmy:password@database:5432/lemmy + commands: + - diesel migration run + - diesel migration redo + + cargo_clippy: + image: *muslrust_image + environment: + CARGO_HOME: .cargo + commands: + # when adding new clippy lints, make sure to also add them in scripts/fix-clippy.sh + - rustup component add clippy + - cargo clippy --workspace --tests --all-targets --features console -- + -D warnings -D deprecated -D clippy::perf -D clippy::complexity + -D clippy::style -D clippy::correctness -D clippy::suspicious + -D clippy::dbg_macro -D clippy::inefficient_to_string + -D clippy::items-after-statements -D clippy::implicit_clone + -D clippy::cast_lossless -D clippy::manual_string_new + -D clippy::redundant_closure_for_method_calls + -D clippy::unused_self + -A clippy::uninlined_format_args + -D clippy::get_first + -D clippy::explicit_into_iter_loop + -D clippy::explicit_iter_loop + -D clippy::needless_collect + -D clippy::unwrap_used + -D clippy::indexing_slicing + # when: + # platform: linux/amd64 + cargo_test: image: *muslrust_image environment: LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy RUST_BACKTRACE: "1" - RUST_TEST_THREADS: "1" CARGO_HOME: .cargo commands: - export LEMMY_CONFIG_LOCATION=../../config/config.hjson @@ -146,6 +186,29 @@ pipeline: # when: # platform: linux/amd64 + rebuild-cache: + image: meltwater/drone-cache:v1 + pull: true + settings: + rebuild: true + endpoint: + from_secret: MINIO_ENDPOINT + access-key: + from_secret: MINIO_WRITE_USER + secret-key: + from_secret: MINIO_WRITE_PASSWORD + bucket: + from_secret: MINIO_BUCKET + cache_key: "rust-cache" + region: us-east-1 + path-style: true + mount: + - ".cargo" + - "target" + - "api_tests/node_modules" + secrets: + [MINIO_ENDPOINT, MINIO_WRITE_USER, MINIO_WRITE_PASSWORD, MINIO_BUCKET] + publish_release_docker: image: woodpeckerci/plugin-docker-buildx secrets: [docker_username, docker_password] @@ -172,20 +235,6 @@ pipeline: when: event: cron - # using https://github.com/pksunkara/cargo-workspaces - publish_to_crates_io: - image: *muslrust_image - commands: - - 'echo "pub const VERSION: &str = \"$(git describe --tag)\";" > "crates/utils/src/version.rs"' - - cargo install cargo-workspaces - - cp -r migrations crates/db_schema/ - - cargo login "$CARGO_API_TOKEN" - - cargo workspaces publish --from-git --allow-dirty --no-verify --allow-branch "${CI_COMMIT_TAG}" --yes custom "${CI_COMMIT_TAG}" - secrets: [cargo_api_token] - when: - event: tag - #platform: linux/amd64 - notify_on_failure: image: alpine:3 commands: diff --git a/Cargo.lock b/Cargo.lock index b2d3a7bf4..0e8f5fc6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,9 +10,9 @@ checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" [[package]] name = "activitypub_federation" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ab3ac148d9c0b4163a6d41040c17de7558a42224b9ecbd4e8f033aef6c254d9" +checksum = "4e6e7fefba6602240fcf612931b70640ad1e249dff833551ebc218f1c96a4193" dependencies = [ "activitystreams-kinds", "actix-web", @@ -22,7 +22,6 @@ dependencies = [ "bytes", "chrono", "derive_builder", - "displaydoc", "dyn-clone", "enum_delegate", "futures-core", @@ -346,7 +345,7 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.8", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -358,7 +357,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", - "getrandom 0.2.8", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -678,7 +677,7 @@ checksum = "28d1c9c15093eb224f0baa400f38fcd713fc1391a6f1c389d886beef146d60a3" dependencies = [ "base64 0.21.2", "blowfish", - "getrandom 0.2.8", + "getrandom 0.2.10", "subtle", "zeroize", ] @@ -1548,17 +1547,6 @@ dependencies = [ "chrono", ] -[[package]] -name = "displaydoc" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.25", -] - [[package]] name = "dlv-list" version = "0.3.0" @@ -2045,9 +2033,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", "js-sys", @@ -2397,17 +2385,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" -[[package]] -name = "idna" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - [[package]] name = "idna" version = "0.3.0" @@ -2644,16 +2621,19 @@ dependencies = [ name = "lemmy_api_common" version = "0.18.1" dependencies = [ + "activitypub_federation", "actix-web", "anyhow", "chrono", "encoding", "futures", + "getrandom 0.2.10", "lemmy_db_schema", "lemmy_db_views", "lemmy_db_views_actor", "lemmy_db_views_moderator", "lemmy_utils", + "once_cell", "percent-encoding", "regex", "reqwest", @@ -2779,7 +2759,6 @@ dependencies = [ "tokio", "tracing", "ts-rs", - "typed-builder", ] [[package]] @@ -2792,7 +2771,6 @@ dependencies = [ "serde", "serde_with", "ts-rs", - "typed-builder", ] [[package]] @@ -3162,12 +3140,6 @@ dependencies = [ "regex-automata 0.1.10", ] -[[package]] -name = "matches" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" - [[package]] name = "matchit" version = "0.5.0" @@ -3195,7 +3167,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5736ba45bbac8f7ccc99a897f88ce85e508a18baec973a040f2514e6cdbff0d2" dependencies = [ - "idna 0.2.3", + "idna 0.3.0", "once_cell", "regex", ] @@ -4271,7 +4243,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.8", + "getrandom 0.2.10", ] [[package]] @@ -4437,7 +4409,7 @@ checksum = "1b97ad83c2fc18113346b7158d79732242002427c30f620fa817c1f32901e0a8" dependencies = [ "anyhow", "async-trait", - "getrandom 0.2.8", + "getrandom 0.2.10", "matchit 0.7.0", "opentelemetry 0.16.0", "reqwest", @@ -5915,7 +5887,7 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" dependencies = [ - "getrandom 0.2.8", + "getrandom 0.2.10", "serde", ] diff --git a/Cargo.toml b/Cargo.toml index 9df383df4..f5268be24 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,25 +24,36 @@ doctest = false debug = 0 lto = "thin" +# This profile significantly speeds up build time. If debug info is needed you can comment the line +# out temporarily, but make sure to leave this in the main branch. +[profile.dev] +debug = 0 + [features] embed-pictrs = ["pict-rs"] -console = ["console-subscriber", "opentelemetry", "opentelemetry-otlp", "tracing-opentelemetry", "reqwest-tracing/opentelemetry_0_16"] +console = [ + "console-subscriber", + "opentelemetry", + "opentelemetry-otlp", + "tracing-opentelemetry", + "reqwest-tracing/opentelemetry_0_16", +] json-log = ["tracing-subscriber/json"] prometheus-metrics = ["prometheus", "actix-web-prom"] default = [] [workspace] members = [ - "crates/api", - "crates/api_crud", - "crates/api_common", - "crates/apub", - "crates/utils", - "crates/db_schema", - "crates/db_views", - "crates/db_views_actor", - "crates/db_views_actor", - "crates/routes" + "crates/api", + "crates/api_crud", + "crates/api_common", + "crates/apub", + "crates/utils", + "crates/db_schema", + "crates/db_views", + "crates/db_views_actor", + "crates/db_views_actor", + "crates/routes", ] [workspace.dependencies] @@ -56,13 +67,21 @@ lemmy_routes = { version = "=0.18.1", path = "./crates/routes" } lemmy_db_views = { version = "=0.18.1", path = "./crates/db_views" } lemmy_db_views_actor = { version = "=0.18.1", path = "./crates/db_views_actor" } lemmy_db_views_moderator = { version = "=0.18.1", path = "./crates/db_views_moderator" } -activitypub_federation = { version = "0.4.5", default-features = false, features = ["actix-web"] } +activitypub_federation = { version = "0.4.6", default-features = false, features = [ + "actix-web", +] } diesel = "2.1.0" diesel_migrations = "2.1.0" diesel-async = "0.3.1" serde = { version = "1.0.167", features = ["derive"] } serde_with = "3.0.0" -actix-web = { version = "4.3.1", default-features = false, features = ["macros", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"] } +actix-web = { version = "4.3.1", default-features = false, features = [ + "macros", + "rustls", + "compress-brotli", + "compress-gzip", + "compress-zstd", +] } tracing = "0.1.37" tracing-actix-web = { version = "0.7.5", default-features = false } tracing-error = "0.2.0" @@ -82,7 +101,9 @@ base64 = "0.21.2" uuid = { version = "1.4.0", features = ["serde", "v4"] } async-trait = "0.1.71" captcha = "0.0.9" -anyhow = { version = "1.0.71", features = ["backtrace"] } # backtrace is on by default on nightly, but not stable rust +anyhow = { version = "1.0.71", features = [ + "backtrace", +] } # backtrace is on by default on nightly, but not stable rust diesel_ltree = "0.3.0" typed-builder = "0.15.0" serial_test = "2.0.0" @@ -91,7 +112,7 @@ sha2 = "0.10.7" regex = "1.9.0" once_cell = "1.18.0" diesel-derive-newtype = "2.1.0" -diesel-derive-enum = {version = "2.1.0", features = ["postgres"] } +diesel-derive-enum = { version = "2.1.0", features = ["postgres"] } strum = "0.25.0" strum_macros = "0.25.1" itertools = "0.11.0" @@ -103,7 +124,7 @@ rand = "0.8.5" opentelemetry = { version = "0.19.0", features = ["rt-tokio"] } tracing-opentelemetry = { version = "0.19.0" } ts-rs = { version = "6.2", features = ["serde-compat", "chrono-impl"] } -rustls = { version ="0.21.3", features = ["dangerous_configuration"]} +rustls = { version = "0.21.3", features = ["dangerous_configuration"] } futures-util = "0.3.28" tokio-postgres = "0.7.8" tokio-postgres-rustls = "0.10.0" diff --git a/README.md b/README.md index 513ad8c87..f27d8441e 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,8 @@ Español | Русский | 汉语 | - 漢語 + 漢語 | + 日本語

diff --git a/RELEASES.md b/RELEASES.md index 1dd500f40..bca2f02f1 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -470,7 +470,7 @@ The installation instructions have been slightly updated. However there are no b Follow the upgrade instructions for [ansible](https://github.com/LemmyNet/lemmy-ansible#upgrading) or [docker](https://join-lemmy.org/docs/en/administration/install_docker.html#updating). -If you need help with the upgrade, you can ask in our [support forum](https://lemmy.ml/c/lemmy_support) or on the [Matrix Chat](https://matrix.to/#/!OwmdVYiZSXrXbtCNLw:matrix.org). +If you need help with the upgrade, you can ask in our [support forum](https://lemmy.ml/c/lemmy_support) or on the [Matrix Chat](https://matrix.to/#/#lemmy-admin-support-topics:discuss.online). ## Support development @@ -1016,8 +1016,8 @@ Next, **manually edit** your [lemmy.hjson](https://github.com/LemmyNet/lemmy/blo - `pictrs_url` is removed, and the pictrs config is now a block. If using docker, it should look like: ``` pictrs: { - url: "http://pictrs:8080/" - # api_key: "API_KEY" + url: "http://pictrs:8080/" + api_key: "{{ postgres_password }}" } ``` - The `rate_limit`, `federation`, `captcha`, and `slur_filter` blocks should be removed, as they are now in the database, and can be updated through the UI. @@ -1048,7 +1048,7 @@ _Note_: On production databases with thousands of comments, this upgrade **takes _Note_: If you have any issues upgrading, you can restore your old database using the [backup and restore instructions here](https://join-lemmy.org/docs/en/administration/backup_and_restore.html). -If you need help with the upgrade, you can ask in our [support forum](https://lemmy.ml/c/lemmy_support) or on the [Matrix Chat](https://matrix.to/#/!BZVTUuEiNmRcbFeLeI:matrix.org). +If you need help with the upgrade, you can ask in our [support forum](https://lemmy.ml/c/lemmy_support) or on the [Matrix Chat](https://matrix.to/#/#lemmy-admin-support-topics:discuss.online). ## Support development diff --git a/api_tests/package.json b/api_tests/package.json index d81ef235d..ec692e1b5 100644 --- a/api_tests/package.json +++ b/api_tests/package.json @@ -19,7 +19,7 @@ "eslint": "^8.40.0", "eslint-plugin-prettier": "^4.0.0", "jest": "^29.5.0", - "lemmy-js-client": "0.17.2-rc.13", + "lemmy-js-client": "0.18.3-rc.3", "prettier": "^3.0.0", "ts-jest": "^29.1.0", "typescript": "^5.0.4" diff --git a/api_tests/prepare-drone-federation-test.sh b/api_tests/prepare-drone-federation-test.sh index 813b3b15c..7eceeeb77 100755 --- a/api_tests/prepare-drone-federation-test.sh +++ b/api_tests/prepare-drone-federation-test.sh @@ -1,11 +1,15 @@ #!/usr/bin/env bash +# IMPORTANT NOTE: this script does not use the normal LEMMY_DATABASE_URL format +# it is expected that this script is called by run-federation-test.sh script. set -e export RUST_BACKTRACE=1 export RUST_LOG="warn,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug" for INSTANCE in lemmy_alpha lemmy_beta lemmy_gamma lemmy_delta lemmy_epsilon; do + echo "DB URL: ${LEMMY_DATABASE_URL} INSTANCE: $INSTANCE" psql "${LEMMY_DATABASE_URL}/lemmy" -c "DROP DATABASE IF EXISTS $INSTANCE" + echo "create database" psql "${LEMMY_DATABASE_URL}/lemmy" -c "CREATE DATABASE $INSTANCE" done @@ -26,6 +30,7 @@ else done fi +echo "killall existing lemmy_server processes" killall lemmy_server || true echo "$PWD" @@ -59,7 +64,12 @@ target/lemmy_server >/tmp/lemmy_epsilon.out 2>&1 & echo "wait for all instances to start" while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'lemmy-alpha:8541/api/v3/site')" != "200" ]]; do sleep 1; done +echo "alpha started" while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'lemmy-beta:8551/api/v3/site')" != "200" ]]; do sleep 1; done +echo "beta started" while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'lemmy-gamma:8561/api/v3/site')" != "200" ]]; do sleep 1; done +echo "gamma started" while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'lemmy-delta:8571/api/v3/site')" != "200" ]]; do sleep 1; done +echo "delta started" while [[ "$(curl -s -o /dev/null -w '%{http_code}' 'lemmy-epsilon:8581/api/v3/site')" != "200" ]]; do sleep 1; done +echo "epsilon started. All started" diff --git a/api_tests/src/comment.spec.ts b/api_tests/src/comment.spec.ts index 80cb868f2..932c7ffeb 100644 --- a/api_tests/src/comment.spec.ts +++ b/api_tests/src/comment.spec.ts @@ -29,6 +29,7 @@ import { getComments, getCommentParentId, resolveCommunity, + getPersonDetails, } from "./shared"; import { CommentView } from "lemmy-js-client/dist/types/CommentView"; @@ -82,8 +83,7 @@ test("Create a comment", async () => { }); test("Create a comment in a non-existent post", async () => { - let commentRes = (await createComment(alpha, -1)) as any; - expect(commentRes.error).toBe("couldnt_find_post"); + await expect(createComment(alpha, -1)).rejects.toBe("couldnt_find_post"); }); test("Update a comment", async () => { @@ -122,11 +122,9 @@ test("Delete a comment", async () => { expect(deleteCommentRes.comment_view.comment.deleted).toBe(true); // Make sure that comment is undefined on beta - let betaCommentRes = (await resolveComment( - beta, - commentRes.comment_view.comment, - )) as any; - expect(betaCommentRes.error).toBe("couldnt_find_object"); + await expect( + resolveComment(beta, commentRes.comment_view.comment), + ).rejects.toBe("couldnt_find_object"); let undeleteCommentRes = await deleteComment( alpha, @@ -160,9 +158,9 @@ test("Remove a comment from admin and community on the same instance", async () expect(removeCommentRes.comment_view.comment.removed).toBe(true); // Make sure that comment is removed on alpha (it gets pushed since an admin from beta removed it) - let refetchedPostComments = await getComments( + let refetchedPostComments = await getPersonDetails( alpha, - postRes.post_view.post.id, + commentRes.comment_view.comment.creator_id, ); expect(refetchedPostComments.comments[0].comment.removed).toBe(true); diff --git a/api_tests/src/community.spec.ts b/api_tests/src/community.spec.ts index d9a76cf87..a5a202ace 100644 --- a/api_tests/src/community.spec.ts +++ b/api_tests/src/community.spec.ts @@ -52,8 +52,9 @@ test("Create community", async () => { // A dupe check let prevName = communityRes.community_view.community.name; - let communityRes2: any = await createCommunity(alpha, prevName); - expect(communityRes2["error"]).toBe("community_already_exists"); + await expect(createCommunity(alpha, prevName)).rejects.toBe( + "community_already_exists", + ); // Cache the community on beta, make sure it has the other fields let searchShort = `!${prevName}@lemmy-alpha:8541`; diff --git a/api_tests/src/post.spec.ts b/api_tests/src/post.spec.ts index cabbcfd8a..8ea3ea912 100644 --- a/api_tests/src/post.spec.ts +++ b/api_tests/src/post.spec.ts @@ -88,17 +88,18 @@ test("Create a post", async () => { assertPostFederation(betaPost, postRes.post_view); // Delta only follows beta, so it should not see an alpha ap_id - let deltaPost = (await resolvePost(delta, postRes.post_view.post)).post; - expect(deltaPost).toBeUndefined(); + await expect(resolvePost(delta, postRes.post_view.post)).rejects.toBe( + "couldnt_find_object", + ); // Epsilon has alpha blocked, it should not see the alpha post - let epsilonPost = (await resolvePost(epsilon, postRes.post_view.post)).post; - expect(epsilonPost).toBeUndefined(); + await expect(resolvePost(epsilon, postRes.post_view.post)).rejects.toBe( + "couldnt_find_object", + ); }); test("Create a post in a non-existent community", async () => { - let postRes = (await createPost(alpha, -2)) as any; - expect(postRes.error).toBe("couldnt_find_community"); + await expect(createPost(alpha, -2)).rejects.toBe("couldnt_find_community"); }); test("Unlike a post", async () => { @@ -145,8 +146,9 @@ test("Update a post", async () => { assertPostFederation(betaPost, updatedPost.post_view); // Make sure lemmy beta cannot update the post - let updatedPostBeta = (await editPost(beta, betaPost.post)) as any; - expect(updatedPostBeta.error).toBe("no_post_edit_allowed"); + await expect(editPost(beta, betaPost.post)).rejects.toBe( + "no_post_edit_allowed", + ); }); test("Sticky a post", async () => { @@ -210,8 +212,7 @@ test("Lock a post", async () => { expect(alphaPost1.post.locked).toBe(true); // Try to make a new comment there, on alpha - let comment: any = await createComment(alpha, alphaPost1.post.id); - expect(comment["error"]).toBe("locked"); + await expect(createComment(alpha, alphaPost1.post.id)).rejects.toBe("locked"); // Unlock a post let unlockedPost = await lockPost(beta, false, betaPost1.post); @@ -242,9 +243,10 @@ test("Delete a post", async () => { expect(deletedPost.post_view.post.name).toBe(postRes.post_view.post.name); // Make sure lemmy beta sees post is deleted - let betaPost = (await resolvePost(beta, postRes.post_view.post)).post; // This will be undefined because of the tombstone - expect(betaPost).toBeUndefined(); + await expect(resolvePost(beta, postRes.post_view.post)).rejects.toBe( + "couldnt_find_object", + ); // Undelete let undeletedPost = await deletePost(alpha, false, postRes.post_view.post); @@ -259,8 +261,9 @@ test("Delete a post", async () => { assertPostFederation(betaPost2, undeletedPost.post_view); // Make sure lemmy beta cannot delete the post - let deletedPostBeta = (await deletePost(beta, true, betaPost2.post)) as any; - expect(deletedPostBeta.error).toStrictEqual("no_post_edit_allowed"); + await expect(deletePost(beta, true, betaPost2.post)).rejects.toBe( + "no_post_edit_allowed", + ); }); test("Remove a post from admin and community on different instance", async () => { @@ -388,8 +391,8 @@ test("Enforce site ban for federated user", async () => { expect(alphaUserOnBeta1.person?.person.banned).toBe(true); // existing alpha post should be removed on beta - let searchBeta2 = await searchPostLocal(beta, postRes1.post_view.post); - expect(searchBeta2.posts[0].post.removed).toBe(true); + let searchBeta2 = await getPost(beta, searchBeta1.posts[0].post.id); + expect(searchBeta2.post_view.post.removed).toBe(true); // Unban alpha let unBanAlpha = await banPersonFromSite( @@ -436,12 +439,14 @@ test("Enforce community ban for federated user", async () => { expect(banAlpha.banned).toBe(true); // ensure that the post by alpha got removed - let searchAlpha1 = await searchPostLocal(alpha, postRes1.post_view.post); - expect(searchAlpha1.posts[0].post.removed).toBe(true); + await expect(getPost(alpha, searchBeta1.posts[0].post.id)).rejects.toBe( + "unknown", + ); // Alpha tries to make post on beta, but it fails because of ban - let postRes2 = await createPost(alpha, betaCommunity.community.id); - expect(postRes2.post_view).toBeUndefined(); + await expect(createPost(alpha, betaCommunity.community.id)).rejects.toBe( + "banned_from_community", + ); // Unban alpha let unBanAlpha = await banPersonFromCommunity( diff --git a/api_tests/src/shared.ts b/api_tests/src/shared.ts index 0523712e0..bbd4eaaeb 100644 --- a/api_tests/src/shared.ts +++ b/api_tests/src/shared.ts @@ -58,6 +58,8 @@ import { CommentReportResponse } from "lemmy-js-client/dist/types/CommentReportR import { CreateCommentReport } from "lemmy-js-client/dist/types/CreateCommentReport"; import { ListCommentReportsResponse } from "lemmy-js-client/dist/types/ListCommentReportsResponse"; import { ListCommentReports } from "lemmy-js-client/dist/types/ListCommentReports"; +import { GetPersonDetailsResponse } from "lemmy-js-client/dist/types/GetPersonDetailsResponse"; +import { GetPersonDetails } from "lemmy-js-client/dist/types/GetPersonDetails"; export interface API { client: LemmyHttp; @@ -186,8 +188,11 @@ export async function setupLogins() { await epsilon.client.editSite(editSiteForm); // Create the main alpha/beta communities - await createCommunity(alpha, "main"); - await createCommunity(beta, "main"); + // Ignore thrown errors of duplicates + try { + await createCommunity(alpha, "main"); + await createCommunity(beta, "main"); + } catch (_) {} } export async function createPost( @@ -646,6 +651,16 @@ export async function saveUserSettings( ): Promise { return api.client.saveUserSettings(form); } +export async function getPersonDetails( + api: API, + person_id: number, +): Promise { + let form: GetPersonDetails = { + auth: api.auth, + person_id: person_id, + }; + return api.client.getPersonDetails(form); +} export async function deleteUser(api: API): Promise { let form: DeleteAccount = { diff --git a/api_tests/src/user.spec.ts b/api_tests/src/user.spec.ts index afe21d1a0..f488ebe1e 100644 --- a/api_tests/src/user.spec.ts +++ b/api_tests/src/user.spec.ts @@ -92,10 +92,18 @@ test("Delete user", async () => { await deleteUser(user); - expect((await resolvePost(alpha, localPost)).post).toBeUndefined(); - expect((await resolveComment(alpha, localComment)).comment).toBeUndefined(); - expect((await resolvePost(alpha, remotePost)).post).toBeUndefined(); - expect((await resolveComment(alpha, remoteComment)).comment).toBeUndefined(); + await expect(resolvePost(alpha, localPost)).rejects.toBe( + "couldnt_find_object", + ); + await expect(resolveComment(alpha, localComment)).rejects.toBe( + "couldnt_find_object", + ); + await expect(resolvePost(alpha, remotePost)).rejects.toBe( + "couldnt_find_object", + ); + await expect(resolveComment(alpha, remoteComment)).rejects.toBe( + "couldnt_find_object", + ); }); test("Requests with invalid auth should be treated as unauthenticated", async () => { diff --git a/api_tests/yarn.lock b/api_tests/yarn.lock index a404dc52d..30f13014b 100644 --- a/api_tests/yarn.lock +++ b/api_tests/yarn.lock @@ -2157,10 +2157,10 @@ kleur@^3.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== -lemmy-js-client@0.17.2-rc.13: - version "0.17.2-rc.13" - resolved "https://registry.yarnpkg.com/lemmy-js-client/-/lemmy-js-client-0.17.2-rc.13.tgz#f2a61050c1308e85cb39c0e1f561e392e84e3921" - integrity sha512-4IyR1pisCumJ9L8fEPISC+Su1kVTI4pL/gWLsuOXxZC/lK36mG2+NfaNPiUmIklpCF5TUN+1F7E9bEvtTGogww== +lemmy-js-client@0.18.3-rc.3: + version "0.18.3-rc.3" + resolved "https://registry.yarnpkg.com/lemmy-js-client/-/lemmy-js-client-0.18.3-rc.3.tgz#fc6489eb141bd09558bca38d9e46b40771a29f37" + integrity sha512-njixgXk4uMU4gGifnljwhSe9Kf445C4wAXcXhtpTtwPPLXpHQgxA1RASMb9Uq4zblfE6nC2JbrAka8y8N2N/Bw== dependencies: cross-fetch "^3.1.5" form-data "^4.0.0" diff --git a/crates/api/src/comment_report/list.rs b/crates/api/src/comment_report/list.rs index baa1bf45f..b67ec333c 100644 --- a/crates/api/src/comment_report/list.rs +++ b/crates/api/src/comment_report/list.rs @@ -22,24 +22,19 @@ impl Perform for ListCommentReports { let data: &ListCommentReports = self; let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - let person_id = local_user_view.person.id; - let admin = local_user_view.person.admin; let community_id = data.community_id; let unresolved_only = data.unresolved_only; let page = data.page; let limit = data.limit; - let comment_reports = CommentReportQuery::builder() - .pool(&mut context.pool()) - .my_person_id(person_id) - .admin(admin) - .community_id(community_id) - .unresolved_only(unresolved_only) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let comment_reports = CommentReportQuery { + community_id, + unresolved_only, + page, + limit, + } + .list(&mut context.pool(), &local_user_view.person) + .await?; Ok(ListCommentReportsResponse { comment_reports }) } diff --git a/crates/api/src/lib.rs b/crates/api/src/lib.rs index 988dac27a..9d3cf211c 100644 --- a/crates/api/src/lib.rs +++ b/crates/api/src/lib.rs @@ -76,6 +76,9 @@ pub(crate) fn check_report_reason(reason: &str, local_site: &LocalSite) -> Resul #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use lemmy_api_common::utils::check_validator_time; use lemmy_db_schema::{ source::{ diff --git a/crates/api/src/local_user/notifications/list_mentions.rs b/crates/api/src/local_user/notifications/list_mentions.rs index 1b2d3c7b6..10d24ff8f 100644 --- a/crates/api/src/local_user/notifications/list_mentions.rs +++ b/crates/api/src/local_user/notifications/list_mentions.rs @@ -27,18 +27,17 @@ impl Perform for GetPersonMentions { let person_id = Some(local_user_view.person.id); let show_bot_accounts = Some(local_user_view.local_user.show_bot_accounts); - let mentions = PersonMentionQuery::builder() - .pool(&mut context.pool()) - .recipient_id(person_id) - .my_person_id(person_id) - .sort(sort) - .unread_only(unread_only) - .show_bot_accounts(show_bot_accounts) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let mentions = PersonMentionQuery { + recipient_id: person_id, + my_person_id: person_id, + sort, + unread_only, + show_bot_accounts, + page, + limit, + } + .list(&mut context.pool()) + .await?; Ok(GetPersonMentionsResponse { mentions }) } diff --git a/crates/api/src/local_user/notifications/list_replies.rs b/crates/api/src/local_user/notifications/list_replies.rs index 79b0fe223..8c7f3059b 100644 --- a/crates/api/src/local_user/notifications/list_replies.rs +++ b/crates/api/src/local_user/notifications/list_replies.rs @@ -24,18 +24,17 @@ impl Perform for GetReplies { let person_id = Some(local_user_view.person.id); let show_bot_accounts = Some(local_user_view.local_user.show_bot_accounts); - let replies = CommentReplyQuery::builder() - .pool(&mut context.pool()) - .recipient_id(person_id) - .my_person_id(person_id) - .sort(sort) - .unread_only(unread_only) - .show_bot_accounts(show_bot_accounts) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let replies = CommentReplyQuery { + recipient_id: person_id, + my_person_id: person_id, + sort, + unread_only, + show_bot_accounts, + page, + limit, + } + .list(&mut context.pool()) + .await?; Ok(GetRepliesResponse { replies }) } diff --git a/crates/api/src/local_user/save_settings.rs b/crates/api/src/local_user/save_settings.rs index 822f08d28..4176a3f4c 100644 --- a/crates/api/src/local_user/save_settings.rs +++ b/crates/api/src/local_user/save_settings.rs @@ -133,6 +133,7 @@ impl Perform for SaveUserSettings { .totp_2fa_secret(totp_2fa_secret) .totp_2fa_url(totp_2fa_url) .open_links_in_new_tab(data.open_links_in_new_tab) + .infinite_scroll_enabled(data.infinite_scroll_enabled) .build(); let local_user_res = diff --git a/crates/api/src/post_report/list.rs b/crates/api/src/post_report/list.rs index f7496e1a3..a0d909681 100644 --- a/crates/api/src/post_report/list.rs +++ b/crates/api/src/post_report/list.rs @@ -22,24 +22,19 @@ impl Perform for ListPostReports { let data: &ListPostReports = self; let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - let person_id = local_user_view.person.id; - let admin = local_user_view.person.admin; let community_id = data.community_id; let unresolved_only = data.unresolved_only; let page = data.page; let limit = data.limit; - let post_reports = PostReportQuery::builder() - .pool(&mut context.pool()) - .my_person_id(person_id) - .admin(admin) - .community_id(community_id) - .unresolved_only(unresolved_only) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let post_reports = PostReportQuery { + community_id, + unresolved_only, + page, + limit, + } + .list(&mut context.pool(), &local_user_view.person) + .await?; Ok(ListPostReportsResponse { post_reports }) } diff --git a/crates/api/src/private_message_report/list.rs b/crates/api/src/private_message_report/list.rs index 72d182d45..8b4e50767 100644 --- a/crates/api/src/private_message_report/list.rs +++ b/crates/api/src/private_message_report/list.rs @@ -21,14 +21,13 @@ impl Perform for ListPrivateMessageReports { let unresolved_only = self.unresolved_only; let page = self.page; let limit = self.limit; - let private_message_reports = PrivateMessageReportQuery::builder() - .pool(&mut context.pool()) - .unresolved_only(unresolved_only) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let private_message_reports = PrivateMessageReportQuery { + unresolved_only, + page, + limit, + } + .list(&mut context.pool()) + .await?; Ok(ListPrivateMessageReportsResponse { private_message_reports, diff --git a/crates/api/src/site/registration_applications/list.rs b/crates/api/src/site/registration_applications/list.rs index 2389ad403..433cee425 100644 --- a/crates/api/src/site/registration_applications/list.rs +++ b/crates/api/src/site/registration_applications/list.rs @@ -23,19 +23,18 @@ impl Perform for ListRegistrationApplications { is_admin(&local_user_view)?; let unread_only = data.unread_only; - let verified_email_only = local_site.require_email_verification; + let verified_email_only = Some(local_site.require_email_verification); let page = data.page; let limit = data.limit; - let registration_applications = RegistrationApplicationQuery::builder() - .pool(&mut context.pool()) - .unread_only(unread_only) - .verified_email_only(Some(verified_email_only)) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let registration_applications = RegistrationApplicationQuery { + unread_only, + verified_email_only, + page, + limit, + } + .list(&mut context.pool()) + .await?; Ok(Self::Response { registration_applications, diff --git a/crates/api_common/Cargo.toml b/crates/api_common/Cargo.toml index a9b2bf19b..8a23a4cb2 100644 --- a/crates/api_common/Cargo.toml +++ b/crates/api_common/Cargo.toml @@ -14,9 +14,27 @@ path = "src/lib.rs" doctest = false [features] -full = ["tracing", "rosetta-i18n", "chrono", "lemmy_utils", - "lemmy_db_views/full", "lemmy_db_views_actor/full", "lemmy_db_views_moderator/full", - "percent-encoding", "encoding", "reqwest-middleware", "webpage", "ts-rs"] +full = [ + "tracing", + "rosetta-i18n", + "chrono", + "lemmy_utils", + "lemmy_db_views/full", + "lemmy_db_views_actor/full", + "lemmy_db_views_moderator/full", + "activitypub_federation", + "percent-encoding", + "encoding", + "reqwest-middleware", + "webpage", + "ts-rs", + "tokio", + "uuid", + "reqwest", + "actix-web", + "futures", + "once_cell", +] [dependencies] lemmy_db_views = { workspace = true } @@ -24,6 +42,7 @@ lemmy_db_views_moderator = { workspace = true } lemmy_db_views_actor = { workspace = true } lemmy_db_schema = { workspace = true } lemmy_utils = { workspace = true, optional = true } +activitypub_federation = { workspace = true, optional = true } serde = { workspace = true } serde_with = { workspace = true } url = { workspace = true } @@ -33,12 +52,17 @@ reqwest-middleware = { workspace = true, optional = true } regex = { workspace = true } rosetta-i18n = { workspace = true, optional = true } percent-encoding = { workspace = true, optional = true } -webpage = { version = "1.6", default-features = false, features = ["serde"], optional = true } +webpage = { version = "1.6", default-features = false, features = [ + "serde", +], optional = true } encoding = { version = "0.2.33", optional = true } anyhow = { workspace = true } -futures = { workspace = true } -uuid = { workspace = true } -tokio = { workspace = true } -reqwest = { workspace = true } +futures = { workspace = true, optional = true } +uuid = { workspace = true, optional = true } +tokio = { workspace = true, optional = true } +reqwest = { workspace = true, optional = true } ts-rs = { workspace = true, optional = true } -actix-web = { workspace = true } +once_cell = { workspace = true, optional = true } +actix-web = { workspace = true, optional = true } +# necessary for wasmt compilation +getrandom = { version = "0.2.10", features = ["js"] } diff --git a/crates/api_common/src/build_response.rs b/crates/api_common/src/build_response.rs index 8b96206cb..8a63f7ad4 100644 --- a/crates/api_common/src/build_response.rs +++ b/crates/api_common/src/build_response.rs @@ -64,7 +64,7 @@ pub async fn build_community_response( } pub async fn build_post_response( - context: &Data, + context: &LemmyContext, community_id: CommunityId, person_id: PersonId, post_id: PostId, diff --git a/crates/api_common/src/lib.rs b/crates/api_common/src/lib.rs index 224e114a5..652cbaf43 100644 --- a/crates/api_common/src/lib.rs +++ b/crates/api_common/src/lib.rs @@ -10,6 +10,8 @@ pub mod post; pub mod private_message; #[cfg(feature = "full")] pub mod request; +#[cfg(feature = "full")] +pub mod send_activity; pub mod sensitive; pub mod site; #[cfg(feature = "full")] diff --git a/crates/api_common/src/person.rs b/crates/api_common/src/person.rs index 824d132a5..031bc6c7e 100644 --- a/crates/api_common/src/person.rs +++ b/crates/api_common/src/person.rs @@ -133,6 +133,8 @@ pub struct SaveUserSettings { pub auth: Sensitive, /// Open links in a new tab pub open_links_in_new_tab: Option, + /// Enable infinite scroll + pub infinite_scroll_enabled: Option, } #[derive(Debug, Serialize, Deserialize, Clone, Default)] diff --git a/crates/api_common/src/request.rs b/crates/api_common/src/request.rs index dc09ecaa7..82126887a 100644 --- a/crates/api_common/src/request.rs +++ b/crates/api_common/src/request.rs @@ -270,6 +270,9 @@ pub fn build_user_agent(settings: &Settings) -> String { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::request::{ build_user_agent, fetch_site_metadata, diff --git a/crates/api_common/src/send_activity.rs b/crates/api_common/src/send_activity.rs new file mode 100644 index 000000000..6c91258ec --- /dev/null +++ b/crates/api_common/src/send_activity.rs @@ -0,0 +1,74 @@ +use crate::context::LemmyContext; +use activitypub_federation::config::Data; +use futures::future::BoxFuture; +use lemmy_db_schema::source::post::Post; +use lemmy_utils::{error::LemmyResult, SYNCHRONOUS_FEDERATION}; +use once_cell::sync::{Lazy, OnceCell}; +use tokio::{ + sync::{ + mpsc, + mpsc::{UnboundedReceiver, UnboundedSender, WeakUnboundedSender}, + Mutex, + }, + task::JoinHandle, +}; + +type MatchOutgoingActivitiesBoxed = + Box fn(SendActivityData, &'a Data) -> BoxFuture<'a, LemmyResult<()>>>; + +/// This static is necessary so that activities can be sent out synchronously for tests. +pub static MATCH_OUTGOING_ACTIVITIES: OnceCell = OnceCell::new(); + +#[derive(Debug)] +pub enum SendActivityData { + CreatePost(Post), +} + +// TODO: instead of static, move this into LemmyContext. make sure that stopping the process with +// ctrl+c still works. +static ACTIVITY_CHANNEL: Lazy = Lazy::new(|| { + let (sender, receiver) = mpsc::unbounded_channel(); + let weak_sender = sender.downgrade(); + ActivityChannel { + weak_sender, + receiver: Mutex::new(receiver), + keepalive_sender: Mutex::new(Some(sender)), + } +}); + +pub struct ActivityChannel { + weak_sender: WeakUnboundedSender, + receiver: Mutex>, + keepalive_sender: Mutex>>, +} + +impl ActivityChannel { + pub async fn retrieve_activity() -> Option { + let mut lock = ACTIVITY_CHANNEL.receiver.lock().await; + lock.recv().await + } + + pub async fn submit_activity( + data: SendActivityData, + context: &Data, + ) -> LemmyResult<()> { + if *SYNCHRONOUS_FEDERATION { + MATCH_OUTGOING_ACTIVITIES + .get() + .expect("retrieve function pointer")(data, context) + .await?; + } + // could do `ACTIVITY_CHANNEL.keepalive_sender.lock()` instead and get rid of weak_sender, + // not sure which way is more efficient + else if let Some(sender) = ACTIVITY_CHANNEL.weak_sender.upgrade() { + sender.send(data)?; + } + Ok(()) + } + + pub async fn close(outgoing_activities_task: JoinHandle>) -> LemmyResult<()> { + ACTIVITY_CHANNEL.keepalive_sender.lock().await.take(); + outgoing_activities_task.await??; + Ok(()) + } +} diff --git a/crates/api_common/src/utils.rs b/crates/api_common/src/utils.rs index bd194c0f9..d259b9e4c 100644 --- a/crates/api_common/src/utils.rs +++ b/crates/api_common/src/utils.rs @@ -667,13 +667,13 @@ pub async fn remove_user_data_in_community( // Comments // TODO Diesel doesn't allow updates with joins, so this has to be a loop - let comments = CommentQuery::builder() - .pool(pool) - .creator_id(Some(banned_person_id)) - .community_id(Some(community_id)) - .build() - .list() - .await?; + let comments = CommentQuery { + creator_id: Some(banned_person_id), + community_id: Some(community_id), + ..Default::default() + } + .list(pool) + .await?; for comment_view in &comments { let comment_id = comment_view.comment.id; @@ -731,6 +731,9 @@ pub async fn delete_user_account( #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::utils::{honeypot_check, password_length_check}; #[test] diff --git a/crates/api_crud/Cargo.toml b/crates/api_crud/Cargo.toml index a4dfb4add..1da8335ea 100644 --- a/crates/api_crud/Cargo.toml +++ b/crates/api_crud/Cargo.toml @@ -23,4 +23,4 @@ url = { workspace = true } async-trait = { workspace = true } webmention = "0.4.0" chrono = { workspace = true } -uuid = { workspace = true } \ No newline at end of file +uuid = { workspace = true } diff --git a/crates/api_crud/src/comment/create.rs b/crates/api_crud/src/comment/create.rs index 92775772b..098d1a664 100644 --- a/crates/api_crud/src/comment/create.rs +++ b/crates/api_crud/src/comment/create.rs @@ -16,6 +16,7 @@ use lemmy_api_common::{ }, }; use lemmy_db_schema::{ + impls::actor_language::default_post_language, source::{ actor_language::CommunityLanguage, comment::{Comment, CommentInsertForm, CommentLike, CommentLikeForm, CommentUpdateForm}, @@ -82,25 +83,31 @@ impl PerformCrud for CreateComment { check_comment_depth(parent)?; } - // if no language is set, copy language from parent post/comment - let parent_language = parent_opt - .as_ref() - .map(|p| p.language_id) - .unwrap_or(post.language_id); - let language_id = data.language_id.unwrap_or(parent_language); - CommunityLanguage::is_allowed_community_language( &mut context.pool(), - Some(language_id), + data.language_id, community_id, ) .await?; + // attempt to set default language if none was provided + let language_id = match data.language_id { + Some(lid) => Some(lid), + None => { + default_post_language( + &mut context.pool(), + community_id, + local_user_view.local_user.id, + ) + .await? + } + }; + let comment_form = CommentInsertForm::builder() .content(content_slurs_removed.clone()) .post_id(data.post_id) .creator_id(local_user_view.person.id) - .language_id(Some(language_id)) + .language_id(language_id) .build(); // Create the comment diff --git a/crates/api_crud/src/community/list.rs b/crates/api_crud/src/community/list.rs index 80ad6129c..bd8189951 100644 --- a/crates/api_crud/src/community/list.rs +++ b/crates/api_crud/src/community/list.rs @@ -31,18 +31,18 @@ impl PerformCrud for ListCommunities { let page = data.page; let limit = data.limit; let local_user = local_user_view.map(|l| l.local_user); - let communities = CommunityQuery::builder() - .pool(&mut context.pool()) - .listing_type(listing_type) - .show_nsfw(show_nsfw) - .sort(sort) - .local_user(local_user.as_ref()) - .page(page) - .limit(limit) - .is_mod_or_admin(is_admin) - .build() - .list() - .await?; + let communities = CommunityQuery { + listing_type, + show_nsfw, + sort, + local_user: local_user.as_ref(), + page, + limit, + is_mod_or_admin: is_admin, + ..Default::default() + } + .list(&mut context.pool()) + .await?; // Return the jwt Ok(ListCommunitiesResponse { communities }) diff --git a/crates/api_crud/src/lib.rs b/crates/api_crud/src/lib.rs index b9449ca69..e79342865 100644 --- a/crates/api_crud/src/lib.rs +++ b/crates/api_crud/src/lib.rs @@ -5,7 +5,7 @@ use lemmy_utils::error::LemmyError; mod comment; mod community; mod custom_emoji; -mod post; +pub mod post; mod private_message; mod site; mod user; diff --git a/crates/api_crud/src/post/create.rs b/crates/api_crud/src/post/create.rs index a7aafe812..458fdb248 100644 --- a/crates/api_crud/src/post/create.rs +++ b/crates/api_crud/src/post/create.rs @@ -1,10 +1,11 @@ -use crate::PerformCrud; -use actix_web::web::Data; +use activitypub_federation::config::Data; +use actix_web::web::Json; use lemmy_api_common::{ build_response::build_post_response, context::LemmyContext, post::{CreatePost, PostResponse}, request::fetch_site_data, + send_activity::{ActivityChannel, SendActivityData}, utils::{ check_community_ban, check_community_deleted_or_removed, @@ -40,147 +41,153 @@ use tracing::Instrument; use url::Url; use webmention::{Webmention, WebmentionError}; -#[async_trait::async_trait(?Send)] -impl PerformCrud for CreatePost { - type Response = PostResponse; +#[tracing::instrument(skip(context))] +pub async fn create_post( + data: Json, + context: Data, +) -> Result, LemmyError> { + let local_user_view = local_user_view_from_jwt(&data.auth, &context).await?; + let local_site = LocalSite::read(&mut context.pool()).await?; - #[tracing::instrument(skip(context))] - async fn perform(&self, context: &Data) -> Result { - let data: &CreatePost = self; - let local_user_view = local_user_view_from_jwt(&data.auth, context).await?; - let local_site = LocalSite::read(&mut context.pool()).await?; + let slur_regex = local_site_to_slur_regex(&local_site); + check_slurs(&data.name, &slur_regex)?; + check_slurs_opt(&data.body, &slur_regex)?; + honeypot_check(&data.honeypot)?; - let slur_regex = local_site_to_slur_regex(&local_site); - check_slurs(&data.name, &slur_regex)?; - check_slurs_opt(&data.body, &slur_regex)?; - honeypot_check(&data.honeypot)?; + let data_url = data.url.as_ref(); + let url = data_url.map(clean_url_params).map(Into::into); // TODO no good way to handle a "clear" - let data_url = data.url.as_ref(); - let url = data_url.map(clean_url_params).map(Into::into); // TODO no good way to handle a "clear" + is_valid_post_title(&data.name)?; + is_valid_body_field(&data.body, true)?; + check_url_scheme(&data.url)?; - is_valid_post_title(&data.name)?; - is_valid_body_field(&data.body, true)?; - check_url_scheme(&data.url)?; - - check_community_ban( - local_user_view.person.id, - data.community_id, - &mut context.pool(), - ) - .await?; - check_community_deleted_or_removed(data.community_id, &mut context.pool()).await?; + check_community_ban( + local_user_view.person.id, + data.community_id, + &mut context.pool(), + ) + .await?; + check_community_deleted_or_removed(data.community_id, &mut context.pool()).await?; + let community_id = data.community_id; + let community = Community::read(&mut context.pool(), community_id).await?; + if community.posting_restricted_to_mods { let community_id = data.community_id; - let community = Community::read(&mut context.pool(), community_id).await?; - if community.posting_restricted_to_mods { - let community_id = data.community_id; - let is_mod = CommunityView::is_mod_or_admin( - &mut context.pool(), - local_user_view.local_user.person_id, - community_id, - ) - .await?; - if !is_mod { - return Err(LemmyErrorType::OnlyModsCanPostInCommunity)?; - } - } - - // Fetch post links and pictrs cached image - let (metadata_res, thumbnail_url) = - fetch_site_data(context.client(), context.settings(), data_url, true).await; - let (embed_title, embed_description, embed_video_url) = metadata_res - .map(|u| (u.title, u.description, u.embed_video_url)) - .unwrap_or_default(); - - let language_id = match data.language_id { - Some(lid) => Some(lid), - None => { - default_post_language( - &mut context.pool(), - community_id, - local_user_view.local_user.id, - ) - .await? - } - }; - CommunityLanguage::is_allowed_community_language( + let is_mod = CommunityView::is_mod_or_admin( &mut context.pool(), - language_id, + local_user_view.local_user.person_id, community_id, ) .await?; + if !is_mod { + return Err(LemmyErrorType::OnlyModsCanPostInCommunity)?; + } + } - let post_form = PostInsertForm::builder() - .name(data.name.trim().to_owned()) - .url(url) - .body(data.body.clone()) - .community_id(data.community_id) - .creator_id(local_user_view.person.id) - .nsfw(data.nsfw) - .embed_title(embed_title) - .embed_description(embed_description) - .embed_video_url(embed_video_url) - .language_id(language_id) - .thumbnail_url(thumbnail_url) - .build(); + // Fetch post links and pictrs cached image + let (metadata_res, thumbnail_url) = + fetch_site_data(context.client(), context.settings(), data_url, true).await; + let (embed_title, embed_description, embed_video_url) = metadata_res + .map(|u| (u.title, u.description, u.embed_video_url)) + .unwrap_or_default(); - let inserted_post = Post::create(&mut context.pool(), &post_form) - .await - .with_lemmy_type(LemmyErrorType::CouldntCreatePost)?; + // Only need to check if language is allowed in case user set it explicitly. When using default + // language, it already only returns allowed languages. + CommunityLanguage::is_allowed_community_language( + &mut context.pool(), + data.language_id, + community_id, + ) + .await?; - let inserted_post_id = inserted_post.id; - let protocol_and_hostname = context.settings().get_protocol_and_hostname(); - let apub_id = generate_local_apub_endpoint( - EndpointType::Post, - &inserted_post_id.to_string(), - &protocol_and_hostname, - )?; - let updated_post = Post::update( - &mut context.pool(), - inserted_post_id, - &PostUpdateForm::builder().ap_id(Some(apub_id)).build(), - ) + // attempt to set default language if none was provided + let language_id = match data.language_id { + Some(lid) => Some(lid), + None => { + default_post_language( + &mut context.pool(), + community_id, + local_user_view.local_user.id, + ) + .await? + } + }; + + let post_form = PostInsertForm::builder() + .name(data.name.trim().to_owned()) + .url(url) + .body(data.body.clone()) + .community_id(data.community_id) + .creator_id(local_user_view.person.id) + .nsfw(data.nsfw) + .embed_title(embed_title) + .embed_description(embed_description) + .embed_video_url(embed_video_url) + .language_id(language_id) + .thumbnail_url(thumbnail_url) + .build(); + + let inserted_post = Post::create(&mut context.pool(), &post_form) .await .with_lemmy_type(LemmyErrorType::CouldntCreatePost)?; - // They like their own post by default - let person_id = local_user_view.person.id; - let post_id = inserted_post.id; - let like_form = PostLikeForm { - post_id, - person_id, - score: 1, - }; + let inserted_post_id = inserted_post.id; + let protocol_and_hostname = context.settings().get_protocol_and_hostname(); + let apub_id = generate_local_apub_endpoint( + EndpointType::Post, + &inserted_post_id.to_string(), + &protocol_and_hostname, + )?; + let updated_post = Post::update( + &mut context.pool(), + inserted_post_id, + &PostUpdateForm::builder().ap_id(Some(apub_id)).build(), + ) + .await + .with_lemmy_type(LemmyErrorType::CouldntCreatePost)?; - PostLike::like(&mut context.pool(), &like_form) - .await - .with_lemmy_type(LemmyErrorType::CouldntLikePost)?; + // They like their own post by default + let person_id = local_user_view.person.id; + let post_id = inserted_post.id; + let like_form = PostLikeForm { + post_id, + person_id, + score: 1, + }; - // Mark the post as read - mark_post_as_read(person_id, post_id, &mut context.pool()).await?; + PostLike::like(&mut context.pool(), &like_form) + .await + .with_lemmy_type(LemmyErrorType::CouldntLikePost)?; - if let Some(url) = updated_post.url.clone() { - let task = async move { - let mut webmention = - Webmention::new::(updated_post.ap_id.clone().into(), url.clone().into())?; - webmention.set_checked(true); - match webmention - .send() - .instrument(tracing::info_span!("Sending webmention")) - .await - { - Err(WebmentionError::NoEndpointDiscovered(_)) => Ok(()), - Ok(_) => Ok(()), - Err(e) => Err(e).with_lemmy_type(LemmyErrorType::CouldntSendWebmention), - } - }; - if *SYNCHRONOUS_FEDERATION { - task.await?; - } else { - spawn_try_task(task); + ActivityChannel::submit_activity(SendActivityData::CreatePost(updated_post.clone()), &context) + .await?; + + // Mark the post as read + mark_post_as_read(person_id, post_id, &mut context.pool()).await?; + + if let Some(url) = updated_post.url.clone() { + let task = async move { + let mut webmention = + Webmention::new::(updated_post.ap_id.clone().into(), url.clone().into())?; + webmention.set_checked(true); + match webmention + .send() + .instrument(tracing::info_span!("Sending webmention")) + .await + { + Err(WebmentionError::NoEndpointDiscovered(_)) => Ok(()), + Ok(_) => Ok(()), + Err(e) => Err(e).with_lemmy_type(LemmyErrorType::CouldntSendWebmention), } }; + if *SYNCHRONOUS_FEDERATION { + task.await?; + } else { + spawn_try_task(task); + } + }; - build_post_response(context, community_id, person_id, post_id).await - } + Ok(Json( + build_post_response(&context, community_id, person_id, post_id).await?, + )) } diff --git a/crates/api_crud/src/post/mod.rs b/crates/api_crud/src/post/mod.rs index d3d789a02..437955561 100644 --- a/crates/api_crud/src/post/mod.rs +++ b/crates/api_crud/src/post/mod.rs @@ -1,4 +1,4 @@ -mod create; +pub mod create; mod delete; mod read; mod remove; diff --git a/crates/api_crud/src/post/read.rs b/crates/api_crud/src/post/read.rs index af19402ee..e668517d3 100644 --- a/crates/api_crud/src/post/read.rs +++ b/crates/api_crud/src/post/read.rs @@ -100,12 +100,12 @@ impl PerformCrud for GetPost { // Fetch the cross_posts let cross_posts = if let Some(url) = &post_view.post.url { - let mut x_posts = PostQuery::builder() - .pool(&mut context.pool()) - .url_search(Some(url.inner().as_str().into())) - .build() - .list() - .await?; + let mut x_posts = PostQuery { + url_search: Some(url.inner().as_str().into()), + ..Default::default() + } + .list(&mut context.pool()) + .await?; // Don't return this post as one of the cross_posts x_posts.retain(|x| x.post.id != post_id); diff --git a/crates/api_crud/src/private_message/read.rs b/crates/api_crud/src/private_message/read.rs index 73ce034e9..87d8ee66e 100644 --- a/crates/api_crud/src/private_message/read.rs +++ b/crates/api_crud/src/private_message/read.rs @@ -24,15 +24,13 @@ impl PerformCrud for GetPrivateMessages { let page = data.page; let limit = data.limit; let unread_only = data.unread_only; - let mut messages = PrivateMessageQuery::builder() - .pool(&mut context.pool()) - .recipient_id(person_id) - .page(page) - .limit(limit) - .unread_only(unread_only) - .build() - .list() - .await?; + let mut messages = PrivateMessageQuery { + page, + limit, + unread_only, + } + .list(&mut context.pool(), person_id) + .await?; // Messages sent by ourselves should be marked as read. The `read` column in database is only // for the recipient, and shouldnt be exposed to sender. diff --git a/crates/api_crud/src/site/create.rs b/crates/api_crud/src/site/create.rs index dab504c8d..540b3c6c1 100644 --- a/crates/api_crud/src/site/create.rs +++ b/crates/api_crud/src/site/create.rs @@ -183,6 +183,9 @@ fn validate_create_payload(local_site: &LocalSite, create_site: &CreateSite) -> #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::site::create::validate_create_payload; use lemmy_api_common::site::CreateSite; use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode}; diff --git a/crates/api_crud/src/site/mod.rs b/crates/api_crud/src/site/mod.rs index d7ae94aca..652b9e656 100644 --- a/crates/api_crud/src/site/mod.rs +++ b/crates/api_crud/src/site/mod.rs @@ -42,6 +42,9 @@ pub fn application_question_check( #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::site::{application_question_check, site_default_post_listing_type_check}; use lemmy_db_schema::{ListingType, RegistrationMode}; diff --git a/crates/api_crud/src/site/update.rs b/crates/api_crud/src/site/update.rs index e5c0bc5e2..ea3c53aa7 100644 --- a/crates/api_crud/src/site/update.rs +++ b/crates/api_crud/src/site/update.rs @@ -217,6 +217,9 @@ fn validate_update_payload(local_site: &LocalSite, edit_site: &EditSite) -> Lemm #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::site::update::validate_update_payload; use lemmy_api_common::site::EditSite; use lemmy_db_schema::{source::local_site::LocalSite, ListingType, RegistrationMode}; diff --git a/crates/api_crud/src/user/create.rs b/crates/api_crud/src/user/create.rs index c2f42bba5..caba9bd8a 100644 --- a/crates/api_crud/src/user/create.rs +++ b/crates/api_crud/src/user/create.rs @@ -138,6 +138,7 @@ impl PerformCrud for Register { .password_encrypted(data.password.to_string()) .show_nsfw(Some(data.show_nsfw)) .accepted_application(accepted_application) + .default_listing_type(Some(local_site.default_post_listing_type)) .build(); let inserted_local_user = LocalUser::create(&mut context.pool(), &local_user_form).await?; diff --git a/crates/apub/Cargo.toml b/crates/apub/Cargo.toml index 0bf02e57c..cdddea4fc 100644 --- a/crates/apub/Cargo.toml +++ b/crates/apub/Cargo.toml @@ -25,7 +25,7 @@ chrono = { workspace = true } serde_json = { workspace = true } serde = { workspace = true } actix-web = { workspace = true } -tokio = {workspace = true} +tokio = { workspace = true } tracing = { workspace = true } strum_macros = { workspace = true } url = { workspace = true } diff --git a/crates/apub/src/activities/block/block_user.rs b/crates/apub/src/activities/block/block_user.rs index f8a1e4b8d..55642f862 100644 --- a/crates/apub/src/activities/block/block_user.rs +++ b/crates/apub/src/activities/block/block_user.rs @@ -9,7 +9,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::{instance::remote_instance_inboxes, person::ApubPerson}, protocol::activities::block::block_user::BlockUser, }; @@ -124,6 +124,7 @@ impl ActivityHandler for BlockUser { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; match self.target.dereference(context).await? { SiteOrCommunity::Site(site) => { @@ -147,7 +148,6 @@ impl ActivityHandler for BlockUser { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let expires = self.expires.map(|u| u.naive_local()); let mod_person = self.actor.dereference(context).await?; let blocked_person = self.object.dereference(context).await?; diff --git a/crates/apub/src/activities/block/undo_block_user.rs b/crates/apub/src/activities/block/undo_block_user.rs index b31f8b4b2..f68349794 100644 --- a/crates/apub/src/activities/block/undo_block_user.rs +++ b/crates/apub/src/activities/block/undo_block_user.rs @@ -7,7 +7,7 @@ use crate::{ verify_is_public, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::{instance::remote_instance_inboxes, person::ApubPerson}, protocol::activities::block::{block_user::BlockUser, undo_block_user::UndoBlockUser}, }; @@ -88,6 +88,7 @@ impl ActivityHandler for UndoBlockUser { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; verify_domains_match(self.actor.inner(), self.object.actor.inner())?; self.object.verify(context).await?; @@ -96,7 +97,6 @@ impl ActivityHandler for UndoBlockUser { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let expires = self.object.expires.map(|u| u.naive_local()); let mod_person = self.actor.dereference(context).await?; let blocked_person = self.object.object.dereference(context).await?; diff --git a/crates/apub/src/activities/community/announce.rs b/crates/apub/src/activities/community/announce.rs index e33e9fbf4..ed489158e 100644 --- a/crates/apub/src/activities/community/announce.rs +++ b/crates/apub/src/activities/community/announce.rs @@ -6,7 +6,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::community::ApubCommunity, protocol::{ activities::community::announce::{AnnounceActivity, RawAnnouncableActivities}, @@ -133,14 +133,14 @@ impl ActivityHandler for AnnounceActivity { } #[tracing::instrument(skip_all)] - async fn verify(&self, _context: &Data) -> Result<(), LemmyError> { + async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; Ok(()) } #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let object: AnnouncableActivities = self.object.object(context).await?.try_into()?; // This is only for sending, not receiving so we reject it. if let AnnouncableActivities::Page(_) = object { diff --git a/crates/apub/src/activities/community/collection_add.rs b/crates/apub/src/activities/community/collection_add.rs index d08b0cb48..c36a8f0da 100644 --- a/crates/apub/src/activities/community/collection_add.rs +++ b/crates/apub/src/activities/community/collection_add.rs @@ -7,7 +7,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson, post::ApubPost}, protocol::{ activities::community::{collection_add::CollectionAdd, collection_remove::CollectionRemove}, @@ -108,6 +108,7 @@ impl ActivityHandler for CollectionAdd { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -117,7 +118,6 @@ impl ActivityHandler for CollectionAdd { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let (community, collection_type) = Community::get_by_collection_url(&mut context.pool(), &self.target.into()).await?; match collection_type { diff --git a/crates/apub/src/activities/community/collection_remove.rs b/crates/apub/src/activities/community/collection_remove.rs index a1c443ea8..28214284b 100644 --- a/crates/apub/src/activities/community/collection_remove.rs +++ b/crates/apub/src/activities/community/collection_remove.rs @@ -7,7 +7,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson, post::ApubPost}, protocol::{activities::community::collection_remove::CollectionRemove, InCommunity}, }; @@ -101,6 +101,7 @@ impl ActivityHandler for CollectionRemove { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -110,7 +111,6 @@ impl ActivityHandler for CollectionRemove { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let (community, collection_type) = Community::get_by_collection_url(&mut context.pool(), &self.target.into()).await?; match collection_type { diff --git a/crates/apub/src/activities/community/lock_page.rs b/crates/apub/src/activities/community/lock_page.rs index 0416b972a..94135ede9 100644 --- a/crates/apub/src/activities/community/lock_page.rs +++ b/crates/apub/src/activities/community/lock_page.rs @@ -8,7 +8,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, protocol::{ activities::community::lock_page::{LockPage, LockType, UndoLockPage}, InCommunity, @@ -79,6 +79,7 @@ impl ActivityHandler for UndoLockPage { } async fn verify(&self, context: &Data) -> Result<(), Self::Error> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -94,7 +95,6 @@ impl ActivityHandler for UndoLockPage { } async fn receive(self, context: &Data) -> Result<(), Self::Error> { - insert_activity(&self.id, &self, false, false, context).await?; let form = PostUpdateForm::builder().locked(Some(false)).build(); let post = self.object.object.dereference(context).await?; Post::update(&mut context.pool(), post.id, &form).await?; diff --git a/crates/apub/src/activities/community/report.rs b/crates/apub/src/activities/community/report.rs index 1dffacc39..67b84644e 100644 --- a/crates/apub/src/activities/community/report.rs +++ b/crates/apub/src/activities/community/report.rs @@ -1,6 +1,6 @@ use crate::{ activities::{generate_activity_id, send_lemmy_activity, verify_person_in_community}, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, protocol::{activities::community::report::Report, InCommunity}, PostOrComment, @@ -115,6 +115,7 @@ impl ActivityHandler for Report { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; Ok(()) @@ -122,7 +123,6 @@ impl ActivityHandler for Report { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; let actor = self.actor.dereference(context).await?; match self.object.dereference(context).await? { PostOrComment::Post(post) => { diff --git a/crates/apub/src/activities/community/update.rs b/crates/apub/src/activities/community/update.rs index 3e697fddc..fe2477d6e 100644 --- a/crates/apub/src/activities/community/update.rs +++ b/crates/apub/src/activities/community/update.rs @@ -7,7 +7,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, protocol::{activities::community::update::UpdateCommunity, InCommunity}, SendActivity, @@ -82,6 +82,7 @@ impl ActivityHandler for UpdateCommunity { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -92,7 +93,6 @@ impl ActivityHandler for UpdateCommunity { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let community = self.community(context).await?; let community_update_form = self.object.into_update_form(); diff --git a/crates/apub/src/activities/create_or_update/comment.rs b/crates/apub/src/activities/create_or_update/comment.rs index 804f1827b..51b87ed27 100644 --- a/crates/apub/src/activities/create_or_update/comment.rs +++ b/crates/apub/src/activities/create_or_update/comment.rs @@ -7,7 +7,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, mentions::MentionOrValue, objects::{comment::ApubComment, community::ApubCommunity, person::ApubPerson}, protocol::{ @@ -154,6 +154,7 @@ impl ActivityHandler for CreateOrUpdateNote { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let post = self.object.get_parents(context).await?.0; let community = self.community(context).await?; @@ -169,7 +170,6 @@ impl ActivityHandler for CreateOrUpdateNote { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; // Need to do this check here instead of Note::from_json because we need the person who // send the activity, not the comment author. let existing_comment = self.object.id.dereference_local(context).await.ok(); diff --git a/crates/apub/src/activities/create_or_update/post.rs b/crates/apub/src/activities/create_or_update/post.rs index e0ce0fec4..4767114f9 100644 --- a/crates/apub/src/activities/create_or_update/post.rs +++ b/crates/apub/src/activities/create_or_update/post.rs @@ -8,7 +8,7 @@ use crate::{ verify_person_in_community, }, activity_lists::AnnouncableActivities, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson, post::ApubPost}, protocol::{ activities::{create_or_update::page::CreateOrUpdatePage, CreateOrUpdateType}, @@ -24,7 +24,7 @@ use activitypub_federation::{ }; use lemmy_api_common::{ context::LemmyContext, - post::{CreatePost, EditPost, PostResponse}, + post::{EditPost, PostResponse}, }; use lemmy_db_schema::{ aggregates::structs::PostAggregates, @@ -39,25 +39,6 @@ use lemmy_db_schema::{ use lemmy_utils::error::{LemmyError, LemmyErrorType}; use url::Url; -#[async_trait::async_trait] -impl SendActivity for CreatePost { - type Response = PostResponse; - - async fn send_activity( - _request: &Self, - response: &Self::Response, - context: &Data, - ) -> Result<(), LemmyError> { - CreateOrUpdatePage::send( - &response.post_view.post, - response.post_view.creator.id, - CreateOrUpdateType::Create, - context, - ) - .await - } -} - #[async_trait::async_trait] impl SendActivity for EditPost { type Response = PostResponse; @@ -68,10 +49,10 @@ impl SendActivity for EditPost { context: &Data, ) -> Result<(), LemmyError> { CreateOrUpdatePage::send( - &response.post_view.post, + response.post_view.post.clone(), response.post_view.creator.id, CreateOrUpdateType::Update, - context, + context.reset_request_count(), ) .await } @@ -102,12 +83,12 @@ impl CreateOrUpdatePage { #[tracing::instrument(skip_all)] pub(crate) async fn send( - post: &Post, + post: Post, person_id: PersonId, kind: CreateOrUpdateType, - context: &Data, + context: Data, ) -> Result<(), LemmyError> { - let post = ApubPost(post.clone()); + let post = ApubPost(post); let community_id = post.community_id; let person: ApubPerson = Person::read(&mut context.pool(), person_id).await?.into(); let community: ApubCommunity = Community::read(&mut context.pool(), community_id) @@ -115,8 +96,8 @@ impl CreateOrUpdatePage { .into(); let create_or_update = - CreateOrUpdatePage::new(post, &person, &community, kind, context).await?; - let is_mod_action = create_or_update.object.is_mod_action(context).await?; + CreateOrUpdatePage::new(post, &person, &community, kind, &context).await?; + let is_mod_action = create_or_update.object.is_mod_action(&context).await?; let activity = AnnouncableActivities::CreateOrUpdatePost(create_or_update); send_activity_in_community( activity, @@ -124,7 +105,7 @@ impl CreateOrUpdatePage { &community, vec![], is_mod_action, - context, + &context, ) .await?; Ok(()) @@ -146,6 +127,7 @@ impl ActivityHandler for CreateOrUpdatePage { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &self.cc)?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; @@ -180,7 +162,6 @@ impl ActivityHandler for CreateOrUpdatePage { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let post = ApubPost::from_json(self.object, context).await?; // author likes their own post by default diff --git a/crates/apub/src/activities/create_or_update/private_message.rs b/crates/apub/src/activities/create_or_update/private_message.rs index 36c9785da..3eaad2f71 100644 --- a/crates/apub/src/activities/create_or_update/private_message.rs +++ b/crates/apub/src/activities/create_or_update/private_message.rs @@ -1,6 +1,6 @@ use crate::{ activities::{generate_activity_id, send_lemmy_activity, verify_person}, - insert_activity, + insert_received_activity, objects::{person::ApubPerson, private_message::ApubPrivateMessage}, protocol::activities::{ create_or_update::chat_message::CreateOrUpdateChatMessage, @@ -109,6 +109,7 @@ impl ActivityHandler for CreateOrUpdateChatMessage { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_person(&self.actor, context).await?; verify_domains_match(self.actor.inner(), self.object.id.inner())?; verify_domains_match(self.to[0].inner(), self.object.to[0].inner())?; @@ -118,7 +119,6 @@ impl ActivityHandler for CreateOrUpdateChatMessage { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; ApubPrivateMessage::from_json(self.object, context).await?; Ok(()) } diff --git a/crates/apub/src/activities/deletion/delete.rs b/crates/apub/src/activities/deletion/delete.rs index 8ad104173..fcdede8d7 100644 --- a/crates/apub/src/activities/deletion/delete.rs +++ b/crates/apub/src/activities/deletion/delete.rs @@ -3,7 +3,7 @@ use crate::{ deletion::{receive_delete_action, verify_delete_activity, DeletableObjects}, generate_activity_id, }, - insert_activity, + insert_received_activity, objects::person::ApubPerson, protocol::{activities::deletion::delete::Delete, IdOrNestedObject}, }; @@ -43,13 +43,13 @@ impl ActivityHandler for Delete { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_delete_activity(self, self.summary.is_some(), context).await?; Ok(()) } #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; if let Some(reason) = self.summary { // We set reason to empty string if it doesn't exist, to distinguish between delete and // remove. Here we change it back to option, so we don't write it to db. diff --git a/crates/apub/src/activities/deletion/delete_user.rs b/crates/apub/src/activities/deletion/delete_user.rs index d74a3c8aa..b388ed9e1 100644 --- a/crates/apub/src/activities/deletion/delete_user.rs +++ b/crates/apub/src/activities/deletion/delete_user.rs @@ -1,6 +1,6 @@ use crate::{ activities::{generate_activity_id, send_lemmy_activity, verify_is_public, verify_person}, - insert_activity, + insert_received_activity, objects::{instance::remote_instance_inboxes, person::ApubPerson}, protocol::activities::deletion::delete_user::DeleteUser, SendActivity, @@ -73,6 +73,7 @@ impl ActivityHandler for DeleteUser { } async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_is_public(&self.to, &[])?; verify_person(&self.actor, context).await?; verify_urls_match(self.actor.inner(), self.object.inner())?; @@ -80,7 +81,6 @@ impl ActivityHandler for DeleteUser { } async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; let actor = self.actor.dereference(context).await?; delete_user_account( actor.id, diff --git a/crates/apub/src/activities/deletion/undo_delete.rs b/crates/apub/src/activities/deletion/undo_delete.rs index e10bd0660..541a7455f 100644 --- a/crates/apub/src/activities/deletion/undo_delete.rs +++ b/crates/apub/src/activities/deletion/undo_delete.rs @@ -3,7 +3,7 @@ use crate::{ deletion::{receive_delete_action, verify_delete_activity, DeletableObjects}, generate_activity_id, }, - insert_activity, + insert_received_activity, objects::person::ApubPerson, protocol::activities::deletion::{delete::Delete, undo_delete::UndoDelete}, }; @@ -42,6 +42,7 @@ impl ActivityHandler for UndoDelete { } async fn verify(&self, data: &Data) -> Result<(), Self::Error> { + insert_received_activity(&self.id, data).await?; self.object.verify(data).await?; verify_delete_activity(&self.object, self.object.summary.is_some(), data).await?; Ok(()) @@ -49,7 +50,6 @@ impl ActivityHandler for UndoDelete { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, false, context).await?; if self.object.summary.is_some() { UndoDelete::receive_undo_remove_action( &self.actor.dereference(context).await?, diff --git a/crates/apub/src/activities/following/accept.rs b/crates/apub/src/activities/following/accept.rs index af7d63725..adaad51d1 100644 --- a/crates/apub/src/activities/following/accept.rs +++ b/crates/apub/src/activities/following/accept.rs @@ -1,6 +1,6 @@ use crate::{ activities::{generate_activity_id, send_lemmy_activity}, - insert_activity, + insert_received_activity, protocol::activities::following::{accept::AcceptFollow, follow::Follow}, }; use activitypub_federation::{ @@ -50,6 +50,7 @@ impl ActivityHandler for AcceptFollow { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_urls_match(self.actor.inner(), self.object.object.inner())?; self.object.verify(context).await?; if let Some(to) = &self.to { @@ -60,7 +61,6 @@ impl ActivityHandler for AcceptFollow { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; let community = self.actor.dereference(context).await?; let person = self.object.actor.dereference(context).await?; // This will throw an error if no follow was requested diff --git a/crates/apub/src/activities/following/follow.rs b/crates/apub/src/activities/following/follow.rs index 073784da1..2f0f5037a 100644 --- a/crates/apub/src/activities/following/follow.rs +++ b/crates/apub/src/activities/following/follow.rs @@ -6,7 +6,7 @@ use crate::{ verify_person_in_community, }, fetcher::user_or_community::UserOrCommunity, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, protocol::activities::following::{ accept::AcceptFollow, @@ -90,6 +90,7 @@ impl ActivityHandler for Follow { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_person(&self.actor, context).await?; let object = self.object.dereference(context).await?; if let UserOrCommunity::Community(c) = object { @@ -103,7 +104,6 @@ impl ActivityHandler for Follow { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; let actor = self.actor.dereference(context).await?; let object = self.object.dereference(context).await?; match object { diff --git a/crates/apub/src/activities/following/undo_follow.rs b/crates/apub/src/activities/following/undo_follow.rs index 9f18ccfbc..c36b36df8 100644 --- a/crates/apub/src/activities/following/undo_follow.rs +++ b/crates/apub/src/activities/following/undo_follow.rs @@ -1,7 +1,7 @@ use crate::{ activities::{generate_activity_id, send_lemmy_activity, verify_person}, fetcher::user_or_community::UserOrCommunity, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, protocol::activities::following::{follow::Follow, undo_follow::UndoFollow}, }; @@ -60,6 +60,7 @@ impl ActivityHandler for UndoFollow { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; verify_urls_match(self.actor.inner(), self.object.actor.inner())?; verify_person(&self.actor, context).await?; self.object.verify(context).await?; @@ -71,7 +72,6 @@ impl ActivityHandler for UndoFollow { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; let person = self.actor.dereference(context).await?; let object = self.object.object.dereference(context).await?; diff --git a/crates/apub/src/activities/mod.rs b/crates/apub/src/activities/mod.rs index 41a66c7dc..02ad0b6b1 100644 --- a/crates/apub/src/activities/mod.rs +++ b/crates/apub/src/activities/mod.rs @@ -1,6 +1,6 @@ use crate::{ - insert_activity, objects::{community::ApubCommunity, person::ApubPerson}, + protocol::activities::{create_or_update::page::CreateOrUpdatePage, CreateOrUpdateType}, CONTEXT, }; use activitypub_federation::{ @@ -12,12 +12,28 @@ use activitypub_federation::{ traits::{ActivityHandler, Actor}, }; use anyhow::anyhow; -use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::{newtypes::CommunityId, source::community::Community}; +use lemmy_api_common::{ + context::LemmyContext, + send_activity::{ActivityChannel, SendActivityData}, +}; +use lemmy_db_schema::{ + newtypes::CommunityId, + source::{ + activity::{SentActivity, SentActivityForm}, + community::Community, + instance::Instance, + }, +}; use lemmy_db_views_actor::structs::{CommunityPersonBanView, CommunityView}; -use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType}; +use lemmy_utils::{ + error::{LemmyError, LemmyErrorExt, LemmyErrorType, LemmyResult}, + spawn_try_task, + SYNCHRONOUS_FEDERATION, +}; +use moka::future::Cache; +use once_cell::sync::Lazy; use serde::Serialize; -use std::ops::Deref; +use std::{ops::Deref, sync::Arc, time::Duration}; use tracing::info; use url::{ParseError, Url}; use uuid::Uuid; @@ -30,6 +46,10 @@ pub mod following; pub mod unfederated; pub mod voting; +/// Amount of time that the list of dead instances is cached. This is only updated once a day, +/// so there is no harm in caching it for a longer time. +pub static DEAD_INSTANCE_LIST_CACHE_DURATION: Duration = Duration::from_secs(30 * 60); + /// Checks that the specified Url actually identifies a Person (by fetching it), and that the person /// doesn't have a site ban. #[tracing::instrument(skip_all)] @@ -148,7 +168,7 @@ async fn send_lemmy_activity( data: &Data, activity: Activity, actor: &ActorT, - inbox: Vec, + mut inbox: Vec, sensitive: bool, ) -> Result<(), LemmyError> where @@ -156,11 +176,62 @@ where ActorT: Actor, Activity: ActivityHandler, { + static CACHE: Lazy>>> = Lazy::new(|| { + Cache::builder() + .max_capacity(1) + .time_to_live(DEAD_INSTANCE_LIST_CACHE_DURATION) + .build() + }); + let dead_instances = CACHE + .try_get_with((), async { + Ok::<_, diesel::result::Error>(Arc::new(Instance::dead_instances(&mut data.pool()).await?)) + }) + .await?; + + inbox.retain(|i| { + let domain = i.domain().expect("has domain").to_string(); + !dead_instances.contains(&domain) + }); info!("Sending activity {}", activity.id().to_string()); let activity = WithContext::new(activity, CONTEXT.deref().clone()); - insert_activity(activity.id(), &activity, true, sensitive, data).await?; + let form = SentActivityForm { + ap_id: activity.id().clone().into(), + data: serde_json::to_value(activity.clone())?, + sensitive, + }; + SentActivity::create(&mut data.pool(), form).await?; send_activity(activity, actor, inbox, data).await?; Ok(()) } + +pub async fn handle_outgoing_activities(context: Data) -> LemmyResult<()> { + while let Some(data) = ActivityChannel::retrieve_activity().await { + match_outgoing_activities(data, &context.reset_request_count()).await? + } + Ok(()) +} + +pub async fn match_outgoing_activities( + data: SendActivityData, + context: &Data, +) -> LemmyResult<()> { + let fed_task = match data { + SendActivityData::CreatePost(post) => { + let creator_id = post.creator_id; + CreateOrUpdatePage::send( + post, + creator_id, + CreateOrUpdateType::Create, + context.reset_request_count(), + ) + } + }; + if *SYNCHRONOUS_FEDERATION { + fed_task.await?; + } else { + spawn_try_task(fed_task); + } + Ok(()) +} diff --git a/crates/apub/src/activities/voting/undo_vote.rs b/crates/apub/src/activities/voting/undo_vote.rs index bcb8ee406..9616c651f 100644 --- a/crates/apub/src/activities/voting/undo_vote.rs +++ b/crates/apub/src/activities/voting/undo_vote.rs @@ -4,7 +4,7 @@ use crate::{ verify_person_in_community, voting::{undo_vote_comment, undo_vote_post}, }, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, protocol::{ activities::voting::{undo_vote::UndoVote, vote::Vote}, @@ -57,6 +57,7 @@ impl ActivityHandler for UndoVote { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; verify_urls_match(self.actor.inner(), self.object.actor.inner())?; @@ -66,7 +67,6 @@ impl ActivityHandler for UndoVote { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; let actor = self.actor.dereference(context).await?; let object = self.object.object.dereference(context).await?; match object { diff --git a/crates/apub/src/activities/voting/vote.rs b/crates/apub/src/activities/voting/vote.rs index 4de9a8c17..ef4572986 100644 --- a/crates/apub/src/activities/voting/vote.rs +++ b/crates/apub/src/activities/voting/vote.rs @@ -4,7 +4,7 @@ use crate::{ verify_person_in_community, voting::{vote_comment, vote_post}, }, - insert_activity, + insert_received_activity, objects::{community::ApubCommunity, person::ApubPerson}, protocol::{ activities::voting::vote::{Vote, VoteType}, @@ -56,6 +56,7 @@ impl ActivityHandler for Vote { #[tracing::instrument(skip_all)] async fn verify(&self, context: &Data) -> Result<(), LemmyError> { + insert_received_activity(&self.id, context).await?; let community = self.community(context).await?; verify_person_in_community(&self.actor, &community, context).await?; let enable_downvotes = LocalSite::read(&mut context.pool()) @@ -70,7 +71,6 @@ impl ActivityHandler for Vote { #[tracing::instrument(skip_all)] async fn receive(self, context: &Data) -> Result<(), LemmyError> { - insert_activity(&self.id, &self, false, true, context).await?; let actor = self.actor.dereference(context).await?; let object = self.object.dereference(context).await?; match object { diff --git a/crates/apub/src/activity_lists.rs b/crates/apub/src/activity_lists.rs index 705849552..4cce3372f 100644 --- a/crates/apub/src/activity_lists.rs +++ b/crates/apub/src/activity_lists.rs @@ -134,6 +134,9 @@ impl InCommunity for AnnouncableActivities { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ activity_lists::{ GroupInboxActivities, diff --git a/crates/apub/src/api/list_comments.rs b/crates/apub/src/api/list_comments.rs index e3b7d0659..f07ce3dad 100644 --- a/crates/apub/src/api/list_comments.rs +++ b/crates/apub/src/api/list_comments.rs @@ -39,7 +39,11 @@ pub async fn list_comments( let limit = data.limit; let parent_id = data.parent_id; - let listing_type = listing_type_with_default(data.type_, &local_site, community_id)?; + let listing_type = Some(listing_type_with_default( + data.type_, + &local_site, + community_id, + )?); // If a parent_id is given, fetch the comment to get the path let parent_path = if let Some(parent_id) = parent_id { @@ -50,23 +54,22 @@ pub async fn list_comments( let parent_path_cloned = parent_path.clone(); let post_id = data.post_id; - let local_user = local_user_view.map(|l| l.local_user); - let comments = CommentQuery::builder() - .pool(&mut context.pool()) - .listing_type(Some(listing_type)) - .sort(sort) - .max_depth(max_depth) - .saved_only(saved_only) - .community_id(community_id) - .parent_path(parent_path_cloned) - .post_id(post_id) - .local_user(local_user.as_ref()) - .page(page) - .limit(limit) - .build() - .list() - .await - .with_lemmy_type(LemmyErrorType::CouldntGetComments)?; + let comments = CommentQuery { + listing_type, + sort, + max_depth, + saved_only, + community_id, + parent_path: parent_path_cloned, + post_id, + local_user: local_user_view.as_ref(), + page, + limit, + ..Default::default() + } + .list(&mut context.pool()) + .await + .with_lemmy_type(LemmyErrorType::CouldntGetComments)?; Ok(Json(GetCommentsResponse { comments })) } diff --git a/crates/apub/src/api/list_posts.rs b/crates/apub/src/api/list_posts.rs index 63d6800e8..2ebd6b766 100644 --- a/crates/apub/src/api/list_posts.rs +++ b/crates/apub/src/api/list_posts.rs @@ -8,7 +8,7 @@ use actix_web::web::{Json, Query}; use lemmy_api_common::{ context::LemmyContext, post::{GetPosts, GetPostsResponse}, - utils::{check_private_instance, is_mod_or_admin_opt, local_user_view_from_jwt_opt}, + utils::{check_private_instance, local_user_view_from_jwt_opt}, }; use lemmy_db_schema::source::{community::Community, local_site::LocalSite}; use lemmy_db_views::post_view::PostQuery; @@ -36,27 +36,25 @@ pub async fn list_posts( }; let saved_only = data.saved_only; - let listing_type = listing_type_with_default(data.type_, &local_site, community_id)?; + let listing_type = Some(listing_type_with_default( + data.type_, + &local_site, + community_id, + )?); - let is_mod_or_admin = - is_mod_or_admin_opt(&mut context.pool(), local_user_view.as_ref(), community_id) - .await - .is_ok(); - - let posts = PostQuery::builder() - .pool(&mut context.pool()) - .local_user(local_user_view.map(|l| l.local_user).as_ref()) - .listing_type(Some(listing_type)) - .sort(sort) - .community_id(community_id) - .saved_only(saved_only) - .page(page) - .limit(limit) - .is_mod_or_admin(Some(is_mod_or_admin)) - .build() - .list() - .await - .with_lemmy_type(LemmyErrorType::CouldntGetPosts)?; + let posts = PostQuery { + local_user: local_user_view.as_ref(), + listing_type, + sort, + community_id, + saved_only, + page, + limit, + ..Default::default() + } + .list(&mut context.pool()) + .await + .with_lemmy_type(LemmyErrorType::CouldntGetPosts)?; Ok(Json(GetPostsResponse { posts })) } diff --git a/crates/apub/src/api/read_person.rs b/crates/apub/src/api/read_person.rs index 71f4abd51..5d3c73c30 100644 --- a/crates/apub/src/api/read_person.rs +++ b/crates/apub/src/api/read_person.rs @@ -4,7 +4,7 @@ use actix_web::web::{Json, Query}; use lemmy_api_common::{ context::LemmyContext, person::{GetPersonDetails, GetPersonDetailsResponse}, - utils::{check_private_instance, is_admin, local_user_view_from_jwt_opt}, + utils::{check_private_instance, local_user_view_from_jwt_opt}, }; use lemmy_db_schema::{ source::{local_site::LocalSite, person::Person}, @@ -26,7 +26,6 @@ pub async fn read_person( let local_user_view = local_user_view_from_jwt_opt(data.auth.as_ref(), &context).await; let local_site = LocalSite::read(&mut context.pool()).await?; - let is_admin = local_user_view.as_ref().map(|luv| is_admin(luv).is_ok()); check_private_instance(&local_user_view, &local_site)?; @@ -53,52 +52,42 @@ pub async fn read_person( let limit = data.limit; let saved_only = data.saved_only; let community_id = data.community_id; - let local_user = local_user_view.map(|l| l.local_user); - let local_user_clone = local_user.clone(); + // If its saved only, you don't care what creator it was + // Or, if its not saved, then you only want it for that specific creator + let creator_id = if !saved_only.unwrap_or(false) { + Some(person_details_id) + } else { + None + }; - let posts = PostQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .saved_only(saved_only) - .local_user(local_user.as_ref()) - .community_id(community_id) - .is_mod_or_admin(is_admin) - .page(page) - .limit(limit) - .creator_id( - // If its saved only, you don't care what creator it was - // Or, if its not saved, then you only want it for that specific creator - if !saved_only.unwrap_or(false) { - Some(person_details_id) - } else { - None - }, - ) - .build() - .list() - .await?; + let posts = PostQuery { + sort, + saved_only, + local_user: local_user_view.as_ref(), + community_id, + is_profile_view: Some(true), + page, + limit, + creator_id, + ..Default::default() + } + .list(&mut context.pool()) + .await?; - let comments = CommentQuery::builder() - .pool(&mut context.pool()) - .local_user(local_user_clone.as_ref()) - .sort(sort.map(post_to_comment_sort_type)) - .saved_only(saved_only) - .show_deleted_and_removed(Some(false)) - .community_id(community_id) - .page(page) - .limit(limit) - .creator_id( - // If its saved only, you don't care what creator it was - // Or, if its not saved, then you only want it for that specific creator - if !saved_only.unwrap_or(false) { - Some(person_details_id) - } else { - None - }, - ) - .build() - .list() - .await?; + let comments = CommentQuery { + local_user: (local_user_view.as_ref()), + sort: (sort.map(post_to_comment_sort_type)), + saved_only: (saved_only), + show_deleted_and_removed: (Some(false)), + community_id: (community_id), + is_profile_view: Some(true), + page: (page), + limit: (limit), + creator_id, + ..Default::default() + } + .list(&mut context.pool()) + .await?; let moderates = CommunityModeratorView::for_person(&mut context.pool(), person_details_id).await?; diff --git a/crates/apub/src/api/search.rs b/crates/apub/src/api/search.rs index b7723a6f4..ca84606ff 100644 --- a/crates/apub/src/api/search.rs +++ b/crates/apub/src/api/search.rs @@ -50,119 +50,116 @@ pub async fn search( data.community_id }; let creator_id = data.creator_id; - let local_user = local_user_view.map(|l| l.local_user); + let local_user = local_user_view.as_ref().map(|l| l.local_user.clone()); match search_type { SearchType::Posts => { - posts = PostQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .listing_type(listing_type) - .community_id(community_id) - .creator_id(creator_id) - .local_user(local_user.as_ref()) - .search_term(Some(q)) - .is_mod_or_admin(is_admin) - .page(page) - .limit(limit) - .build() - .list() - .await?; + posts = PostQuery { + sort: (sort), + listing_type: (listing_type), + community_id: (community_id), + creator_id: (creator_id), + local_user: (local_user_view.as_ref()), + search_term: (Some(q)), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await?; } SearchType::Comments => { - comments = CommentQuery::builder() - .pool(&mut context.pool()) - .sort(sort.map(post_to_comment_sort_type)) - .listing_type(listing_type) - .search_term(Some(q)) - .community_id(community_id) - .creator_id(creator_id) - .local_user(local_user.as_ref()) - .page(page) - .limit(limit) - .build() - .list() - .await?; + comments = CommentQuery { + sort: (sort.map(post_to_comment_sort_type)), + listing_type: (listing_type), + search_term: (Some(q)), + community_id: (community_id), + creator_id: (creator_id), + local_user: (local_user_view.as_ref()), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await?; } SearchType::Communities => { - communities = CommunityQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .listing_type(listing_type) - .search_term(Some(q)) - .local_user(local_user.as_ref()) - .is_mod_or_admin(is_admin) - .page(page) - .limit(limit) - .build() - .list() - .await?; + communities = CommunityQuery { + sort: (sort), + listing_type: (listing_type), + search_term: (Some(q)), + local_user: (local_user.as_ref()), + is_mod_or_admin: (is_admin), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await?; } SearchType::Users => { - users = PersonQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .search_term(Some(q)) - .page(page) - .limit(limit) - .build() - .list() - .await?; + users = PersonQuery { + sort: (sort), + search_term: (Some(q)), + page: (page), + limit: (limit), + } + .list(&mut context.pool()) + .await?; } SearchType::All => { // If the community or creator is included, dont search communities or users let community_or_creator_included = data.community_id.is_some() || data.community_name.is_some() || data.creator_id.is_some(); - let local_user_ = local_user.clone(); - posts = PostQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .listing_type(listing_type) - .community_id(community_id) - .creator_id(creator_id) - .local_user(local_user_.as_ref()) - .search_term(Some(q)) - .is_mod_or_admin(is_admin) - .page(page) - .limit(limit) - .build() - .list() - .await?; + let q = data.q.clone(); + + posts = PostQuery { + sort: (sort), + listing_type: (listing_type), + community_id: (community_id), + creator_id: (creator_id), + local_user: (local_user_view.as_ref()), + search_term: (Some(q)), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await?; let q = data.q.clone(); - let local_user_ = local_user.clone(); - comments = CommentQuery::builder() - .pool(&mut context.pool()) - .sort(sort.map(post_to_comment_sort_type)) - .listing_type(listing_type) - .search_term(Some(q)) - .community_id(community_id) - .creator_id(creator_id) - .local_user(local_user_.as_ref()) - .page(page) - .limit(limit) - .build() - .list() - .await?; + comments = CommentQuery { + sort: (sort.map(post_to_comment_sort_type)), + listing_type: (listing_type), + search_term: (Some(q)), + community_id: (community_id), + creator_id: (creator_id), + local_user: (local_user_view.as_ref()), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await?; let q = data.q.clone(); communities = if community_or_creator_included { vec![] } else { - CommunityQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .listing_type(listing_type) - .search_term(Some(q)) - .local_user(local_user.as_ref()) - .is_mod_or_admin(is_admin) - .page(page) - .limit(limit) - .build() - .list() - .await? + CommunityQuery { + sort: (sort), + listing_type: (listing_type), + search_term: (Some(q)), + local_user: (local_user.as_ref()), + is_mod_or_admin: (is_admin), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await? }; let q = data.q.clone(); @@ -170,31 +167,29 @@ pub async fn search( users = if community_or_creator_included { vec![] } else { - PersonQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .search_term(Some(q)) - .page(page) - .limit(limit) - .build() - .list() - .await? + PersonQuery { + sort: (sort), + search_term: (Some(q)), + page: (page), + limit: (limit), + } + .list(&mut context.pool()) + .await? }; } SearchType::Url => { - posts = PostQuery::builder() - .pool(&mut context.pool()) - .sort(sort) - .listing_type(listing_type) - .community_id(community_id) - .creator_id(creator_id) - .url_search(Some(q)) - .is_mod_or_admin(is_admin) - .page(page) - .limit(limit) - .build() - .list() - .await?; + posts = PostQuery { + sort: (sort), + listing_type: (listing_type), + community_id: (community_id), + creator_id: (creator_id), + url_search: (Some(q)), + page: (page), + limit: (limit), + ..Default::default() + } + .list(&mut context.pool()) + .await?; } }; diff --git a/crates/apub/src/collections/community_moderators.rs b/crates/apub/src/collections/community_moderators.rs index 336bbfb12..cdaf985ea 100644 --- a/crates/apub/src/collections/community_moderators.rs +++ b/crates/apub/src/collections/community_moderators.rs @@ -78,18 +78,20 @@ impl Collection for ApubCommunityModerators { // Add new mods to database which have been added to moderators collection for mod_id in apub.ordered_items { - let mod_user: ApubPerson = mod_id.dereference(data).await?; - - if !current_moderators - .iter() - .map(|c| c.moderator.actor_id.clone()) - .any(|x| x == mod_user.actor_id) - { - let community_moderator_form = CommunityModeratorForm { - community_id: owner.id, - person_id: mod_user.id, - }; - CommunityModerator::join(&mut data.pool(), &community_moderator_form).await?; + // Ignore errors as mod accounts might be deleted or instances unavailable. + let mod_user: Option = mod_id.dereference(data).await.ok(); + if let Some(mod_user) = mod_user { + if !current_moderators + .iter() + .map(|c| c.moderator.actor_id.clone()) + .any(|x| x == mod_user.actor_id) + { + let community_moderator_form = CommunityModeratorForm { + community_id: owner.id, + person_id: mod_user.id, + }; + CommunityModerator::join(&mut data.pool(), &community_moderator_form).await?; + } } } @@ -100,6 +102,9 @@ impl Collection for ApubCommunityModerators { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ objects::{ diff --git a/crates/apub/src/http/mod.rs b/crates/apub/src/http/mod.rs index 52a014434..c261d9e49 100644 --- a/crates/apub/src/http/mod.rs +++ b/crates/apub/src/http/mod.rs @@ -13,7 +13,7 @@ use activitypub_federation::{ use actix_web::{web, web::Bytes, HttpRequest, HttpResponse}; use http::StatusCode; use lemmy_api_common::context::LemmyContext; -use lemmy_db_schema::source::activity::Activity; +use lemmy_db_schema::source::activity::SentActivity; use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult}; use serde::{Deserialize, Serialize}; use std::ops::Deref; @@ -88,12 +88,10 @@ pub(crate) async fn get_activity( info.id ))? .into(); - let activity = Activity::read_from_apub_id(&mut context.pool(), &activity_id).await?; + let activity = SentActivity::read_from_apub_id(&mut context.pool(), &activity_id).await?; let sensitive = activity.sensitive; - if !activity.local { - Err(err_object_not_local()) - } else if sensitive { + if sensitive { Ok(HttpResponse::Forbidden().finish()) } else { create_apub_response(&activity.data) diff --git a/crates/apub/src/lib.rs b/crates/apub/src/lib.rs index 8d8186022..9a45284f2 100644 --- a/crates/apub/src/lib.rs +++ b/crates/apub/src/lib.rs @@ -3,18 +3,12 @@ use activitypub_federation::config::{Data, UrlVerifier}; use async_trait::async_trait; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ - source::{ - activity::{Activity, ActivityInsertForm}, - instance::Instance, - local_site::LocalSite, - }, - traits::Crud, + source::{activity::ReceivedActivity, instance::Instance, local_site::LocalSite}, utils::{ActualDbPool, DbPool}, }; use lemmy_utils::error::{LemmyError, LemmyErrorType, LemmyResult}; use moka::future::Cache; use once_cell::sync::Lazy; -use serde::Serialize; use std::{sync::Arc, time::Duration}; use url::Url; @@ -178,30 +172,16 @@ pub(crate) async fn check_apub_id_valid_with_strictness( Ok(()) } -/// Store a sent or received activity in the database. +/// Store received activities in the database. /// -/// Stored activities are served over the HTTP endpoint `GET /activities/{type_}/{id}`. This also -/// ensures that the same activity cannot be received more than once. -#[tracing::instrument(skip(data, activity))] -async fn insert_activity( +/// This ensures that the same activity doesnt get received and processed more than once, which +/// would be a waste of resources. +#[tracing::instrument(skip(data))] +async fn insert_received_activity( ap_id: &Url, - activity: &T, - local: bool, - sensitive: bool, data: &Data, -) -> Result<(), LemmyError> -where - T: Serialize, -{ - let ap_id = ap_id.clone().into(); - let form = ActivityInsertForm { - ap_id, - data: serde_json::to_value(activity)?, - local: Some(local), - sensitive: Some(sensitive), - updated: None, - }; - Activity::create(&mut data.pool(), &form).await?; +) -> Result<(), LemmyError> { + ReceivedActivity::create(&mut data.pool(), &ap_id.clone().into()).await?; Ok(()) } diff --git a/crates/apub/src/objects/comment.rs b/crates/apub/src/objects/comment.rs index 0a3185910..2954de096 100644 --- a/crates/apub/src/objects/comment.rs +++ b/crates/apub/src/objects/comment.rs @@ -179,6 +179,9 @@ impl Object for ApubComment { #[cfg(test)] pub(crate) mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ objects::{ diff --git a/crates/apub/src/objects/community.rs b/crates/apub/src/objects/community.rs index b25a60d73..75eb941b1 100644 --- a/crates/apub/src/objects/community.rs +++ b/crates/apub/src/objects/community.rs @@ -204,6 +204,9 @@ impl ApubCommunity { #[cfg(test)] pub(crate) mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ objects::{instance::tests::parse_lemmy_instance, tests::init_context}, diff --git a/crates/apub/src/objects/instance.rs b/crates/apub/src/objects/instance.rs index 026b37278..7933d4705 100644 --- a/crates/apub/src/objects/instance.rs +++ b/crates/apub/src/objects/instance.rs @@ -206,6 +206,9 @@ pub(crate) async fn remote_instance_inboxes(pool: &mut DbPool<'_>) -> Result Result<( #[cfg(test)] pub(crate) mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use activitypub_federation::config::{Data, FederationConfig}; use anyhow::anyhow; use lemmy_api_common::{context::LemmyContext, request::build_user_agent}; diff --git a/crates/apub/src/objects/person.rs b/crates/apub/src/objects/person.rs index 3c2b238b1..d28f8c7cf 100644 --- a/crates/apub/src/objects/person.rs +++ b/crates/apub/src/objects/person.rs @@ -195,6 +195,9 @@ impl Actor for ApubPerson { #[cfg(test)] pub(crate) mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ objects::{ diff --git a/crates/apub/src/objects/post.rs b/crates/apub/src/objects/post.rs index d252a8ea7..48b573d30 100644 --- a/crates/apub/src/objects/post.rs +++ b/crates/apub/src/objects/post.rs @@ -280,6 +280,9 @@ impl Object for ApubPost { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ objects::{ diff --git a/crates/apub/src/objects/private_message.rs b/crates/apub/src/objects/private_message.rs index c986e576e..69a2638ad 100644 --- a/crates/apub/src/objects/private_message.rs +++ b/crates/apub/src/objects/private_message.rs @@ -136,6 +136,9 @@ impl Object for ApubPrivateMessage { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ objects::{ diff --git a/crates/apub/src/protocol/activities/block/mod.rs b/crates/apub/src/protocol/activities/block/mod.rs index eaf05b9ac..1b00245f8 100644 --- a/crates/apub/src/protocol/activities/block/mod.rs +++ b/crates/apub/src/protocol/activities/block/mod.rs @@ -3,6 +3,9 @@ pub mod undo_block_user; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::block::{block_user::BlockUser, undo_block_user::UndoBlockUser}, tests::test_parse_lemmy_item, diff --git a/crates/apub/src/protocol/activities/community/mod.rs b/crates/apub/src/protocol/activities/community/mod.rs index d43e111e0..99a6d0b9c 100644 --- a/crates/apub/src/protocol/activities/community/mod.rs +++ b/crates/apub/src/protocol/activities/community/mod.rs @@ -7,6 +7,9 @@ pub mod update; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::community::{ announce::AnnounceActivity, diff --git a/crates/apub/src/protocol/activities/create_or_update/mod.rs b/crates/apub/src/protocol/activities/create_or_update/mod.rs index 9e41d57fb..a516bf9b4 100644 --- a/crates/apub/src/protocol/activities/create_or_update/mod.rs +++ b/crates/apub/src/protocol/activities/create_or_update/mod.rs @@ -4,6 +4,9 @@ pub mod page; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::create_or_update::{ chat_message::CreateOrUpdateChatMessage, diff --git a/crates/apub/src/protocol/activities/deletion/mod.rs b/crates/apub/src/protocol/activities/deletion/mod.rs index fe22c0010..226cd2cac 100644 --- a/crates/apub/src/protocol/activities/deletion/mod.rs +++ b/crates/apub/src/protocol/activities/deletion/mod.rs @@ -4,6 +4,9 @@ pub mod undo_delete; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::deletion::{delete::Delete, delete_user::DeleteUser, undo_delete::UndoDelete}, tests::test_parse_lemmy_item, diff --git a/crates/apub/src/protocol/activities/following/mod.rs b/crates/apub/src/protocol/activities/following/mod.rs index e1b366530..329807343 100644 --- a/crates/apub/src/protocol/activities/following/mod.rs +++ b/crates/apub/src/protocol/activities/following/mod.rs @@ -4,6 +4,9 @@ pub mod undo_follow; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::following::{accept::AcceptFollow, follow::Follow, undo_follow::UndoFollow}, tests::test_parse_lemmy_item, diff --git a/crates/apub/src/protocol/activities/mod.rs b/crates/apub/src/protocol/activities/mod.rs index a7ce37015..24095faba 100644 --- a/crates/apub/src/protocol/activities/mod.rs +++ b/crates/apub/src/protocol/activities/mod.rs @@ -16,6 +16,9 @@ pub enum CreateOrUpdateType { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::{ community::announce::AnnounceActivity, diff --git a/crates/apub/src/protocol/activities/voting/mod.rs b/crates/apub/src/protocol/activities/voting/mod.rs index 94f759ed8..26a9d01a2 100644 --- a/crates/apub/src/protocol/activities/voting/mod.rs +++ b/crates/apub/src/protocol/activities/voting/mod.rs @@ -3,6 +3,9 @@ pub mod vote; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ activities::voting::{undo_vote::UndoVote, vote::Vote}, tests::test_parse_lemmy_item, diff --git a/crates/apub/src/protocol/collections/mod.rs b/crates/apub/src/protocol/collections/mod.rs index 41b4a9f58..2362a998a 100644 --- a/crates/apub/src/protocol/collections/mod.rs +++ b/crates/apub/src/protocol/collections/mod.rs @@ -6,6 +6,9 @@ pub(crate) mod group_outbox; #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ collections::{ empty_outbox::EmptyOutbox, diff --git a/crates/apub/src/protocol/mod.rs b/crates/apub/src/protocol/mod.rs index bfc9df772..dba21f99d 100644 --- a/crates/apub/src/protocol/mod.rs +++ b/crates/apub/src/protocol/mod.rs @@ -89,6 +89,9 @@ pub trait InCommunity { #[cfg(test)] pub(crate) mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use activitypub_federation::protocol::context::WithContext; use assert_json_diff::assert_json_include; use lemmy_utils::error::LemmyError; diff --git a/crates/apub/src/protocol/objects/mod.rs b/crates/apub/src/protocol/objects/mod.rs index f93308bb3..9a3dab185 100644 --- a/crates/apub/src/protocol/objects/mod.rs +++ b/crates/apub/src/protocol/objects/mod.rs @@ -95,6 +95,9 @@ impl LanguageTag { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{ objects::{ chat_message::ChatMessage, diff --git a/crates/apub/src/protocol/objects/page.rs b/crates/apub/src/protocol/objects/page.rs index c48cabfa7..f3308b075 100644 --- a/crates/apub/src/protocol/objects/page.rs +++ b/crates/apub/src/protocol/objects/page.rs @@ -242,6 +242,9 @@ where #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::protocol::{objects::page::Page, tests::test_parse_lemmy_item}; #[test] diff --git a/crates/db_schema/Cargo.toml b/crates/db_schema/Cargo.toml index 26f9b7901..a5d3e21f5 100644 --- a/crates/db_schema/Cargo.toml +++ b/crates/db_schema/Cargo.toml @@ -14,9 +14,27 @@ path = "src/lib.rs" doctest = false [features] -full = ["diesel", "diesel-derive-newtype", "diesel-derive-enum", "diesel_migrations", "bcrypt", "lemmy_utils", - "activitypub_federation", "sha2", "regex", "once_cell", "serde_json", "diesel_ltree", - "diesel-async", "deadpool", "ts-rs"] +full = [ + "diesel", + "diesel-derive-newtype", + "diesel-derive-enum", + "diesel_migrations", + "bcrypt", + "lemmy_utils", + "activitypub_federation", + "sha2", + "regex", + "once_cell", + "serde_json", + "diesel_ltree", + "diesel-async", + "deadpool", + "ts-rs", + "tokio", + "tokio-postgres", + "tokio-postgres-rustls", + "rustls", +] [dependencies] chrono = { workspace = true } @@ -29,25 +47,33 @@ serde_json = { workspace = true, optional = true } activitypub_federation = { workspace = true, optional = true } lemmy_utils = { workspace = true, optional = true } bcrypt = { workspace = true, optional = true } -diesel = { workspace = true, features = ["postgres","chrono", "serde_json", "uuid"], optional = true } +diesel = { workspace = true, features = [ + "postgres", + "chrono", + "serde_json", + "uuid", +], optional = true } diesel-derive-newtype = { workspace = true, optional = true } diesel-derive-enum = { workspace = true, optional = true } diesel_migrations = { workspace = true, optional = true } -diesel-async = { workspace = true, features = ["postgres", "deadpool"], optional = true } +diesel-async = { workspace = true, features = [ + "postgres", + "deadpool", +], optional = true } sha2 = { workspace = true, optional = true } regex = { workspace = true, optional = true } once_cell = { workspace = true, optional = true } diesel_ltree = { workspace = true, optional = true } typed-builder = { workspace = true } async-trait = { workspace = true } -tokio = { workspace = true } tracing = { workspace = true } deadpool = { version = "0.9.5", features = ["rt_tokio_1"], optional = true } ts-rs = { workspace = true, optional = true } -rustls = { workspace = true } futures-util = { workspace = true } -tokio-postgres = { workspace = true } -tokio-postgres-rustls = { workspace = true } +tokio = { workspace = true, optional = true } +tokio-postgres = { workspace = true, optional = true } +tokio-postgres-rustls = { workspace = true, optional = true } +rustls = { workspace = true, optional = true } uuid = { workspace = true, features = ["v4"] } [dev-dependencies] diff --git a/crates/db_schema/src/aggregates/comment_aggregates.rs b/crates/db_schema/src/aggregates/comment_aggregates.rs index 12b572228..e081d1a1e 100644 --- a/crates/db_schema/src/aggregates/comment_aggregates.rs +++ b/crates/db_schema/src/aggregates/comment_aggregates.rs @@ -35,6 +35,9 @@ impl CommentAggregates { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ aggregates::comment_aggregates::CommentAggregates, source::{ diff --git a/crates/db_schema/src/aggregates/community_aggregates.rs b/crates/db_schema/src/aggregates/community_aggregates.rs index 61abd193c..1cd23e03f 100644 --- a/crates/db_schema/src/aggregates/community_aggregates.rs +++ b/crates/db_schema/src/aggregates/community_aggregates.rs @@ -19,6 +19,9 @@ impl CommunityAggregates { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ aggregates::community_aggregates::CommunityAggregates, source::{ diff --git a/crates/db_schema/src/aggregates/person_aggregates.rs b/crates/db_schema/src/aggregates/person_aggregates.rs index e03497da0..43feadd45 100644 --- a/crates/db_schema/src/aggregates/person_aggregates.rs +++ b/crates/db_schema/src/aggregates/person_aggregates.rs @@ -19,6 +19,9 @@ impl PersonAggregates { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ aggregates::person_aggregates::PersonAggregates, source::{ diff --git a/crates/db_schema/src/aggregates/post_aggregates.rs b/crates/db_schema/src/aggregates/post_aggregates.rs index 8ce2d38fe..020370825 100644 --- a/crates/db_schema/src/aggregates/post_aggregates.rs +++ b/crates/db_schema/src/aggregates/post_aggregates.rs @@ -35,6 +35,9 @@ impl PostAggregates { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ aggregates::post_aggregates::PostAggregates, source::{ diff --git a/crates/db_schema/src/aggregates/site_aggregates.rs b/crates/db_schema/src/aggregates/site_aggregates.rs index 1fe90e0fe..ea3da85ae 100644 --- a/crates/db_schema/src/aggregates/site_aggregates.rs +++ b/crates/db_schema/src/aggregates/site_aggregates.rs @@ -15,6 +15,9 @@ impl SiteAggregates { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ aggregates::site_aggregates::SiteAggregates, source::{ diff --git a/crates/db_schema/src/aggregates/structs.rs b/crates/db_schema/src/aggregates/structs.rs index 592c63abb..1af94a800 100644 --- a/crates/db_schema/src/aggregates/structs.rs +++ b/crates/db_schema/src/aggregates/structs.rs @@ -96,6 +96,8 @@ pub struct PostAggregates { pub featured_local: bool, pub hot_rank: i32, pub hot_rank_active: i32, + pub community_id: CommunityId, + pub creator_id: PersonId, } #[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)] diff --git a/crates/db_schema/src/impls/activity.rs b/crates/db_schema/src/impls/activity.rs index 0fff4d5b2..5bb39183a 100644 --- a/crates/db_schema/src/impls/activity.rs +++ b/crates/db_schema/src/impls/activity.rs @@ -1,139 +1,115 @@ use crate::{ + diesel::OptionalExtension, newtypes::DbUrl, - schema::activity::dsl::{activity, ap_id}, - source::activity::{Activity, ActivityInsertForm, ActivityUpdateForm}, - traits::Crud, + source::activity::{ReceivedActivity, SentActivity, SentActivityForm}, utils::{get_conn, DbPool}, }; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel::{ + dsl::insert_into, + result::{DatabaseErrorKind, Error, Error::DatabaseError}, + ExpressionMethods, + QueryDsl, +}; use diesel_async::RunQueryDsl; #[async_trait] impl Crud for Activity { - type InsertForm = ActivityInsertForm; - type UpdateForm = ActivityUpdateForm; - type IdType = i32; - - async fn create(pool: &mut DbPool<'_>, new_activity: &Self::InsertForm) -> Result { + pub async fn create(pool: &mut DbPool<'_>, form: SentActivityForm) -> Result { + use crate::schema::sent_activity::dsl::sent_activity; let conn = &mut get_conn(pool).await?; - insert_into(activity) - .values(new_activity) + insert_into(sent_activity) + .values(form) .get_result::(conn) .await } - async fn update( - pool: &mut DbPool<'_>, - activity_id: i32, - new_activity: &Self::UpdateForm, - ) -> Result { + pub async fn read_from_apub_id(pool: &mut DbPool<'_>, object_id: &DbUrl) -> Result { + use crate::schema::sent_activity::dsl::{ap_id, sent_activity}; let conn = &mut get_conn(pool).await?; - diesel::update(activity.find(activity_id)) - .set(new_activity) - .get_result::(conn) - .await - } - async fn delete(pool: &mut DbPool<'_>, activity_id: i32) -> Result { - let conn = &mut get_conn(pool).await?; - diesel::delete(activity.find(activity_id)) - .execute(conn) - .await - } -} - -impl Activity { - pub async fn read_from_apub_id( - pool: &mut DbPool<'_>, - object_id: &DbUrl, - ) -> Result { - let conn = &mut get_conn(pool).await?; - activity + sent_activity .filter(ap_id.eq(object_id)) .first::(conn) .await } } +impl ReceivedActivity { + pub async fn create(pool: &mut DbPool<'_>, ap_id_: &DbUrl) -> Result<(), Error> { + use crate::schema::received_activity::dsl::{ap_id, id, received_activity}; + let conn = &mut get_conn(pool).await?; + let res = insert_into(received_activity) + .values(ap_id.eq(ap_id_)) + .on_conflict_do_nothing() + .returning(id) + .get_result::(conn) + .await + .optional()?; + if res.is_some() { + // new activity inserted successfully + Ok(()) + } else { + // duplicate activity + Err(DatabaseError( + DatabaseErrorKind::UniqueViolation, + Box::::default(), + )) + } + } +} + #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; - use crate::{ - newtypes::DbUrl, - source::{ - activity::{Activity, ActivityInsertForm}, - instance::Instance, - person::{Person, PersonInsertForm}, - }, - utils::build_db_pool_for_tests, - }; - use serde_json::Value; + use crate::utils::build_db_pool_for_tests; + use serde_json::json; use serial_test::serial; use url::Url; #[tokio::test] #[serial] - async fn test_crud() { + async fn receive_activity_duplicate() { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); + let ap_id: DbUrl = Url::parse("http://example.com/activity/531") + .unwrap() + .into(); - let inserted_instance = Instance::read_or_create(pool, "my_domain.tld".to_string()) - .await - .unwrap(); + // inserting activity for first time + let res = ReceivedActivity::create(pool, &ap_id).await; + assert!(res.is_ok()); - let creator_form = PersonInsertForm::builder() - .name("activity_creator_ pm".into()) - .public_key("pubkey".to_string()) - .instance_id(inserted_instance.id) - .build(); + let res = ReceivedActivity::create(pool, &ap_id).await; + assert!(res.is_err()); + } - let inserted_creator = Person::create(pool, &creator_form).await.unwrap(); + #[tokio::test] + #[serial] + async fn sent_activity_write_read() { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let ap_id: DbUrl = Url::parse("http://example.com/activity/412") + .unwrap() + .into(); + let data = json!({ + "key1": "0xF9BA143B95FF6D82", + "key2": "42", + }); + let sensitive = false; - let ap_id_: DbUrl = Url::parse( - "https://enterprise.lemmy.ml/activities/delete/f1b5d57c-80f8-4e03-a615-688d552e946c", - ) - .unwrap() - .into(); - let test_json: Value = serde_json::from_str( - r#"{ - "@context": "https://www.w3.org/ns/activitystreams", - "id": "https://enterprise.lemmy.ml/activities/delete/f1b5d57c-80f8-4e03-a615-688d552e946c", - "type": "Delete", - "actor": "https://enterprise.lemmy.ml/u/riker", - "to": "https://www.w3.org/ns/activitystreams#Public", - "cc": [ - "https://enterprise.lemmy.ml/c/main/" - ], - "object": "https://enterprise.lemmy.ml/post/32" - }"#, - ) - .unwrap(); - let activity_form = ActivityInsertForm { - ap_id: ap_id_.clone(), - data: test_json.clone(), - local: Some(true), - sensitive: Some(false), - updated: None, + let form = SentActivityForm { + ap_id: ap_id.clone(), + data: data.clone(), + sensitive, }; - let inserted_activity = Activity::create(pool, &activity_form).await.unwrap(); + SentActivity::create(pool, form).await.unwrap(); - let expected_activity = Activity { - ap_id: ap_id_.clone(), - id: inserted_activity.id, - data: test_json, - local: true, - sensitive: false, - published: inserted_activity.published, - updated: None, - }; - - let read_activity = Activity::read(pool, inserted_activity.id).await.unwrap(); - let read_activity_by_apub_id = Activity::read_from_apub_id(pool, &ap_id_).await.unwrap(); - Person::delete(pool, inserted_creator.id).await.unwrap(); - Activity::delete(pool, inserted_activity.id).await.unwrap(); - - assert_eq!(expected_activity, read_activity); - assert_eq!(expected_activity, read_activity_by_apub_id); - assert_eq!(expected_activity, inserted_activity); + let res = SentActivity::read_from_apub_id(pool, &ap_id).await.unwrap(); + assert_eq!(res.ap_id, ap_id); + assert_eq!(res.data, data); + assert_eq!(res.sensitive, sensitive); } } diff --git a/crates/db_schema/src/impls/actor_language.rs b/crates/db_schema/src/impls/actor_language.rs index 592d62a30..313762a72 100644 --- a/crates/db_schema/src/impls/actor_language.rs +++ b/crates/db_schema/src/impls/actor_language.rs @@ -275,25 +275,37 @@ impl CommunityLanguage { return Ok(()); } + let form = lang_ids + .into_iter() + .map(|language_id| CommunityLanguageForm { + community_id: for_community_id, + language_id, + }) + .collect::>(); + conn .build_transaction() .run(|conn| { Box::pin(async move { use crate::schema::community_language::dsl::{community_id, community_language}; + use diesel::result::DatabaseErrorKind::UniqueViolation; // Clear the current languages delete(community_language.filter(community_id.eq(for_community_id))) .execute(conn) .await?; - for l in lang_ids { - let form = CommunityLanguageForm { - community_id: for_community_id, - language_id: l, - }; - insert_into(community_language) - .values(form) - .get_result::(conn) - .await?; + let insert_res = insert_into(community_language) + .values(form) + .get_result::(conn) + .await; + + if let Err(Error::DatabaseError(UniqueViolation, _info)) = insert_res { + // race condition: this function was probably called simultaneously from another caller. ignore error + // tracing::warn!("unique error: {_info:#?}"); + // _info.constraint_name() should be = "community_language_community_id_language_id_key" + return Ok(()); + } else { + insert_res?; } Ok(()) }) as _ @@ -372,6 +384,9 @@ async fn convert_read_languages( #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::*; use crate::{ impls::actor_language::{ diff --git a/crates/db_schema/src/impls/captcha_answer.rs b/crates/db_schema/src/impls/captcha_answer.rs index fe85b78b2..0404ce005 100644 --- a/crates/db_schema/src/impls/captcha_answer.rs +++ b/crates/db_schema/src/impls/captcha_answer.rs @@ -50,6 +50,9 @@ impl CaptchaAnswer { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::captcha_answer::{CaptchaAnswer, CaptchaAnswerForm, CheckCaptchaAnswer}, utils::build_db_pool_for_tests, diff --git a/crates/db_schema/src/impls/comment.rs b/crates/db_schema/src/impls/comment.rs index 50ce75f88..d6d2ab949 100644 --- a/crates/db_schema/src/impls/comment.rs +++ b/crates/db_schema/src/impls/comment.rs @@ -247,6 +247,9 @@ impl Saveable for CommentSaved { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ newtypes::LanguageId, source::{ diff --git a/crates/db_schema/src/impls/comment_reply.rs b/crates/db_schema/src/impls/comment_reply.rs index 83be5ce40..c5b5a3c6a 100644 --- a/crates/db_schema/src/impls/comment_reply.rs +++ b/crates/db_schema/src/impls/comment_reply.rs @@ -74,6 +74,9 @@ impl CommentReply { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ comment::{Comment, CommentInsertForm}, diff --git a/crates/db_schema/src/impls/community.rs b/crates/db_schema/src/impls/community.rs index 9610f0237..d76770203 100644 --- a/crates/db_schema/src/impls/community.rs +++ b/crates/db_schema/src/impls/community.rs @@ -324,6 +324,9 @@ impl ApubActor for Community { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ community::{ diff --git a/crates/db_schema/src/impls/federation_allowlist.rs b/crates/db_schema/src/impls/federation_allowlist.rs index d4aed4846..eb67acce8 100644 --- a/crates/db_schema/src/impls/federation_allowlist.rs +++ b/crates/db_schema/src/impls/federation_allowlist.rs @@ -49,6 +49,9 @@ impl FederationAllowList { } #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{federation_allowlist::FederationAllowList, instance::Instance}, utils::build_db_pool_for_tests, diff --git a/crates/db_schema/src/impls/instance.rs b/crates/db_schema/src/impls/instance.rs index 068e317fc..d6a23a712 100644 --- a/crates/db_schema/src/impls/instance.rs +++ b/crates/db_schema/src/impls/instance.rs @@ -1,10 +1,17 @@ use crate::{ + diesel::dsl::IntervalDsl, newtypes::InstanceId, schema::{federation_allowlist, federation_blocklist, instance}, source::instance::{Instance, InstanceForm}, utils::{get_conn, naive_now, DbPool}, }; -use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl}; +use diesel::{ + dsl::{insert_into, now}, + result::Error, + sql_types::{Nullable, Timestamp}, + ExpressionMethods, + QueryDsl, +}; use diesel_async::RunQueryDsl; impl Instance { @@ -46,6 +53,24 @@ impl Instance { .execute(conn) .await } + + pub async fn read_all(pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + instance::table + .select(instance::all_columns) + .get_results(conn) + .await + } + + pub async fn dead_instances(pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; + instance::table + .select(instance::domain) + .filter(coalesce(instance::updated, instance::published).lt(now - 3.days())) + .get_results(conn) + .await + } + #[cfg(test)] pub async fn delete_all(pool: &mut DbPool<'_>) -> Result { let conn = &mut get_conn(pool).await?; @@ -79,3 +104,5 @@ impl Instance { .await } } + +sql_function! { fn coalesce(x: Nullable, y: Timestamp) -> Timestamp; } diff --git a/crates/db_schema/src/impls/language.rs b/crates/db_schema/src/impls/language.rs index e459d9899..53aadbac8 100644 --- a/crates/db_schema/src/impls/language.rs +++ b/crates/db_schema/src/impls/language.rs @@ -42,6 +42,9 @@ impl Language { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{source::language::Language, utils::build_db_pool_for_tests}; use serial_test::serial; diff --git a/crates/db_schema/src/impls/moderator.rs b/crates/db_schema/src/impls/moderator.rs index 635c97cae..a4c300b2a 100644 --- a/crates/db_schema/src/impls/moderator.rs +++ b/crates/db_schema/src/impls/moderator.rs @@ -466,6 +466,9 @@ impl Crud for AdminPurgeComment { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ comment::{Comment, CommentInsertForm}, diff --git a/crates/db_schema/src/impls/password_reset_request.rs b/crates/db_schema/src/impls/password_reset_request.rs index 141a5bf92..9daaa1664 100644 --- a/crates/db_schema/src/impls/password_reset_request.rs +++ b/crates/db_schema/src/impls/password_reset_request.rs @@ -101,6 +101,9 @@ fn bytes_to_hex(bytes: Vec) -> String { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ instance::Instance, diff --git a/crates/db_schema/src/impls/person.rs b/crates/db_schema/src/impls/person.rs index c2ac43a15..5c808a9c0 100644 --- a/crates/db_schema/src/impls/person.rs +++ b/crates/db_schema/src/impls/person.rs @@ -193,6 +193,9 @@ impl PersonFollower { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ instance::Instance, diff --git a/crates/db_schema/src/impls/person_mention.rs b/crates/db_schema/src/impls/person_mention.rs index 51e1224c2..f2441f00c 100644 --- a/crates/db_schema/src/impls/person_mention.rs +++ b/crates/db_schema/src/impls/person_mention.rs @@ -75,6 +75,9 @@ impl PersonMention { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ comment::{Comment, CommentInsertForm}, diff --git a/crates/db_schema/src/impls/post.rs b/crates/db_schema/src/impls/post.rs index 8d6d3029d..9e05786b2 100644 --- a/crates/db_schema/src/impls/post.rs +++ b/crates/db_schema/src/impls/post.rs @@ -325,6 +325,9 @@ impl Readable for PostRead { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ community::{Community, CommunityInsertForm}, diff --git a/crates/db_schema/src/impls/private_message.rs b/crates/db_schema/src/impls/private_message.rs index 2f46412b1..7c6ec4673 100644 --- a/crates/db_schema/src/impls/private_message.rs +++ b/crates/db_schema/src/impls/private_message.rs @@ -81,6 +81,9 @@ impl PrivateMessage { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::{ source::{ instance::Instance, diff --git a/crates/db_schema/src/impls/site.rs b/crates/db_schema/src/impls/site.rs index 7faefa51f..ca6f00497 100644 --- a/crates/db_schema/src/impls/site.rs +++ b/crates/db_schema/src/impls/site.rs @@ -95,7 +95,6 @@ impl Site { ) } - // TODO this needs fixed pub async fn read_remote_sites(pool: &mut DbPool<'_>) -> Result, Error> { let conn = &mut get_conn(pool).await?; site.order_by(id).offset(1).get_results::(conn).await diff --git a/crates/db_schema/src/schema.rs b/crates/db_schema/src/schema.rs index 01aafa1d2..17a0f99f8 100644 --- a/crates/db_schema/src/schema.rs +++ b/crates/db_schema/src/schema.rs @@ -14,18 +14,6 @@ pub mod sql_types { pub struct SortTypeEnum; } -diesel::table! { - activity (id) { - id -> Int4, - data -> Jsonb, - local -> Bool, - published -> Timestamp, - updated -> Nullable, - ap_id -> Text, - sensitive -> Bool, - } -} - diesel::table! { admin_purge_comment (id) { id -> Int4, @@ -407,6 +395,7 @@ diesel::table! { totp_2fa_secret -> Nullable, totp_2fa_url -> Nullable, open_links_in_new_tab -> Bool, + infinite_scroll_enabled -> Bool, } } @@ -683,6 +672,8 @@ diesel::table! { featured_local -> Bool, hot_rank -> Int4, hot_rank_active -> Int4, + community_id -> Int4, + creator_id -> Int4, } } @@ -761,6 +752,14 @@ diesel::table! { } } +diesel::table! { + received_activity (id) { + id -> Int8, + ap_id -> Text, + published -> Timestamp, + } +} + diesel::table! { registration_application (id) { id -> Int4, @@ -779,6 +778,16 @@ diesel::table! { } } +diesel::table! { + sent_activity (id) { + id -> Int8, + ap_id -> Text, + data -> Json, + sensitive -> Bool, + published -> Timestamp, + } +} + diesel::table! { site (id) { id -> Int4, @@ -901,6 +910,8 @@ diesel::joinable!(person_post_aggregates -> post (post_id)); diesel::joinable!(post -> community (community_id)); diesel::joinable!(post -> language (language_id)); diesel::joinable!(post -> person (creator_id)); +diesel::joinable!(post_aggregates -> community (community_id)); +diesel::joinable!(post_aggregates -> person (creator_id)); diesel::joinable!(post_aggregates -> post (post_id)); diesel::joinable!(post_like -> person (person_id)); diesel::joinable!(post_like -> post (post_id)); @@ -919,7 +930,6 @@ diesel::joinable!(site_language -> site (site_id)); diesel::joinable!(tagline -> local_site (local_site_id)); diesel::allow_tables_to_appear_in_same_query!( - activity, admin_purge_comment, admin_purge_community, admin_purge_person, @@ -976,8 +986,10 @@ diesel::allow_tables_to_appear_in_same_query!( post_saved, private_message, private_message_report, + received_activity, registration_application, secret, + sent_activity, site, site_aggregates, site_language, diff --git a/crates/db_schema/src/source/activity.rs b/crates/db_schema/src/source/activity.rs index c5c8dd359..85b193f51 100644 --- a/crates/db_schema/src/source/activity.rs +++ b/crates/db_schema/src/source/activity.rs @@ -1,34 +1,28 @@ -use crate::{newtypes::DbUrl, schema::activity}; +use crate::{newtypes::DbUrl, schema::sent_activity}; use serde_json::Value; use std::fmt::Debug; -#[derive(PartialEq, Eq, Debug, Queryable, Identifiable)] -#[diesel(table_name = activity)] -pub struct Activity { - pub id: i32, - pub data: Value, - pub local: bool, - pub published: chrono::NaiveDateTime, - pub updated: Option, +#[derive(PartialEq, Eq, Debug, Queryable)] +#[diesel(table_name = sent_activity)] +pub struct SentActivity { + pub id: i64, pub ap_id: DbUrl, + pub data: Value, + pub sensitive: bool, + pub published: chrono::NaiveDateTime, +} +#[derive(Insertable)] +#[diesel(table_name = sent_activity)] +pub struct SentActivityForm { + pub ap_id: DbUrl, + pub data: Value, pub sensitive: bool, } -#[derive(Insertable)] -#[diesel(table_name = activity)] -pub struct ActivityInsertForm { - pub data: Value, - pub local: Option, - pub updated: Option, +#[derive(PartialEq, Eq, Debug, Queryable)] +#[diesel(table_name = received_activity)] +pub struct ReceivedActivity { + pub id: i64, pub ap_id: DbUrl, - pub sensitive: Option, -} - -#[derive(AsChangeset)] -#[diesel(table_name = activity)] -pub struct ActivityUpdateForm { - pub data: Option, - pub local: Option, - pub updated: Option>, - pub sensitive: Option, + pub published: chrono::NaiveDateTime, } diff --git a/crates/db_schema/src/source/local_user.rs b/crates/db_schema/src/source/local_user.rs index d6c999713..d9e1bde75 100644 --- a/crates/db_schema/src/source/local_user.rs +++ b/crates/db_schema/src/source/local_user.rs @@ -53,6 +53,8 @@ pub struct LocalUser { pub totp_2fa_url: Option, /// Open links in a new tab. pub open_links_in_new_tab: bool, + /// Whether infinite scroll is enabled. + pub infinite_scroll_enabled: bool, } #[derive(Clone, TypedBuilder)] @@ -81,6 +83,7 @@ pub struct LocalUserInsertForm { pub totp_2fa_secret: Option>, pub totp_2fa_url: Option>, pub open_links_in_new_tab: Option, + pub infinite_scroll_enabled: Option, } #[derive(Clone, TypedBuilder)] @@ -106,4 +109,5 @@ pub struct LocalUserUpdateForm { pub totp_2fa_secret: Option>, pub totp_2fa_url: Option>, pub open_links_in_new_tab: Option, + pub infinite_scroll_enabled: Option, } diff --git a/crates/db_schema/src/source/site.rs b/crates/db_schema/src/source/site.rs index f6a19b217..12b30c584 100644 --- a/crates/db_schema/src/source/site.rs +++ b/crates/db_schema/src/source/site.rs @@ -1,6 +1,7 @@ use crate::newtypes::{DbUrl, InstanceId, SiteId}; #[cfg(feature = "full")] use crate::schema::site; +use chrono::NaiveDateTime; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; #[cfg(feature = "full")] @@ -18,8 +19,8 @@ pub struct Site { pub name: String, /// A sidebar for the site in markdown. pub sidebar: Option, - pub published: chrono::NaiveDateTime, - pub updated: Option, + pub published: NaiveDateTime, + pub updated: Option, /// An icon URL. pub icon: Option, /// A banner url. @@ -29,7 +30,7 @@ pub struct Site { /// The federated actor_id. pub actor_id: DbUrl, /// The time the site was last refreshed. - pub last_refreshed_at: chrono::NaiveDateTime, + pub last_refreshed_at: NaiveDateTime, /// The site inbox pub inbox_url: DbUrl, pub private_key: Option, @@ -45,12 +46,12 @@ pub struct SiteInsertForm { #[builder(!default)] pub name: String, pub sidebar: Option, - pub updated: Option, + pub updated: Option, pub icon: Option, pub banner: Option, pub description: Option, pub actor_id: Option, - pub last_refreshed_at: Option, + pub last_refreshed_at: Option, pub inbox_url: Option, pub private_key: Option, pub public_key: Option, @@ -65,13 +66,13 @@ pub struct SiteInsertForm { pub struct SiteUpdateForm { pub name: Option, pub sidebar: Option>, - pub updated: Option>, + pub updated: Option>, // when you want to null out a column, you have to send Some(None)), since sending None means you just don't want to update that column. pub icon: Option>, pub banner: Option>, pub description: Option>, pub actor_id: Option, - pub last_refreshed_at: Option, + pub last_refreshed_at: Option, pub inbox_url: Option, pub private_key: Option>, pub public_key: Option, diff --git a/crates/db_schema/src/utils.rs b/crates/db_schema/src/utils.rs index 5d5bfafa2..94c867d6b 100644 --- a/crates/db_schema/src/utils.rs +++ b/crates/db_schema/src/utils.rs @@ -406,6 +406,9 @@ where #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::{fuzzy_search, *}; use crate::utils::is_email_regex; diff --git a/crates/db_views/Cargo.toml b/crates/db_views/Cargo.toml index 8a1b90b5c..54a3aab6c 100644 --- a/crates/db_views/Cargo.toml +++ b/crates/db_views/Cargo.toml @@ -12,18 +12,24 @@ repository.workspace = true doctest = false [features] -full = ["lemmy_db_schema/full", "diesel", "diesel-async", "diesel_ltree", "tracing", "ts-rs"] +full = [ + "lemmy_db_schema/full", + "diesel", + "diesel-async", + "diesel_ltree", + "tracing", + "ts-rs", +] [dependencies] lemmy_db_schema = { workspace = true } diesel = { workspace = true, optional = true } -diesel-async = { workspace = true, optional = true} -diesel_ltree = { workspace = true, optional = true} +diesel-async = { workspace = true, optional = true } +diesel_ltree = { workspace = true, optional = true } serde = { workspace = true } serde_with = { workspace = true } tracing = { workspace = true, optional = true } -typed-builder = { workspace = true } -ts-rs = { workspace = true, optional = true } +ts-rs = { workspace = true, optional = true } [dev-dependencies] serial_test = { workspace = true } diff --git a/crates/db_views/src/comment_report_view.rs b/crates/db_views/src/comment_report_view.rs index 9c50eb67f..a09971dbe 100644 --- a/crates/db_views/src/comment_report_view.rs +++ b/crates/db_views/src/comment_report_view.rs @@ -33,7 +33,6 @@ use lemmy_db_schema::{ traits::JoinView, utils::{get_conn, limit_and_offset, DbPool}, }; -use typed_builder::TypedBuilder; impl CommentReportView { /// returns the CommentReportView for the provided report_id @@ -137,24 +136,21 @@ impl CommentReportView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct CommentReportQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - #[builder(!default)] - my_person_id: PersonId, - #[builder(!default)] - admin: bool, - community_id: Option, - page: Option, - limit: Option, - unresolved_only: Option, +#[derive(Default)] +pub struct CommentReportQuery { + pub community_id: Option, + pub page: Option, + pub limit: Option, + pub unresolved_only: Option, } -impl<'a, 'b: 'a> CommentReportQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl CommentReportQuery { + pub async fn list( + self, + pool: &mut DbPool<'_>, + my_person: &Person, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let (person_alias_1, person_alias_2) = diesel::alias!(person as person1, person as person2); @@ -183,7 +179,7 @@ impl<'a, 'b: 'a> CommentReportQuery<'a, 'b> { comment_like::table.on( comment::id .eq(comment_like::comment_id) - .and(comment_like::person_id.eq(self.my_person_id)), + .and(comment_like::person_id.eq(my_person.id)), ), ) .left_join( @@ -220,13 +216,13 @@ impl<'a, 'b: 'a> CommentReportQuery<'a, 'b> { .offset(offset); // If its not an admin, get only the ones you mod - let res = if !self.admin { + let res = if !my_person.admin { query .inner_join( community_moderator::table.on( community_moderator::community_id .eq(post::community_id) - .and(community_moderator::person_id.eq(self.my_person_id)), + .and(community_moderator::person_id.eq(my_person.id)), ), ) .load::<::JoinTuple>(conn) @@ -273,6 +269,9 @@ impl JoinView for CommentReportView { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::comment_report_view::{CommentReportQuery, CommentReportView}; use lemmy_db_schema::{ aggregates::structs::CommentAggregates, @@ -514,12 +513,8 @@ mod tests { }; // Do a batch read of timmys reports - let reports = CommentReportQuery::builder() - .pool(pool) - .my_person_id(inserted_timmy.id) - .admin(false) - .build() - .list() + let reports = CommentReportQuery::default() + .list(pool, &inserted_timmy) .await .unwrap(); @@ -590,15 +585,13 @@ mod tests { // Do a batch read of timmys reports // It should only show saras, which is unresolved - let reports_after_resolve = CommentReportQuery::builder() - .pool(pool) - .my_person_id(inserted_timmy.id) - .admin(false) - .unresolved_only(Some(true)) - .build() - .list() - .await - .unwrap(); + let reports_after_resolve = CommentReportQuery { + unresolved_only: (Some(true)), + ..Default::default() + } + .list(pool, &inserted_timmy) + .await + .unwrap(); assert_eq!(reports_after_resolve[0], expected_sara_report_view); assert_eq!(reports_after_resolve.len(), 1); diff --git a/crates/db_views/src/comment_view.rs b/crates/db_views/src/comment_view.rs index 3556f8501..1b77168d8 100644 --- a/crates/db_views/src/comment_view.rs +++ b/crates/db_views/src/comment_view.rs @@ -1,4 +1,4 @@ -use crate::structs::CommentView; +use crate::structs::{CommentView, LocalUserView}; use diesel::{ result::Error, BoolExpressionMethods, @@ -30,7 +30,6 @@ use lemmy_db_schema::{ source::{ comment::{Comment, CommentSaved}, community::{Community, CommunityFollower, CommunityPersonBan}, - local_user::LocalUser, person::Person, person_block::PersonBlock, post::Post, @@ -40,7 +39,6 @@ use lemmy_db_schema::{ CommentSortType, ListingType, }; -use typed_builder::TypedBuilder; type CommentViewTuple = ( Comment, @@ -156,33 +154,34 @@ impl CommentView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct CommentQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - listing_type: Option, - sort: Option, - community_id: Option, - post_id: Option, - parent_path: Option, - creator_id: Option, - local_user: Option<&'a LocalUser>, - search_term: Option, - saved_only: Option, - show_deleted_and_removed: Option, - page: Option, - limit: Option, - max_depth: Option, +#[derive(Default)] +pub struct CommentQuery<'a> { + pub listing_type: Option, + pub sort: Option, + pub community_id: Option, + pub post_id: Option, + pub parent_path: Option, + pub creator_id: Option, + pub local_user: Option<&'a LocalUserView>, + pub search_term: Option, + pub saved_only: Option, + pub is_profile_view: Option, + pub show_deleted_and_removed: Option, + pub page: Option, + pub limit: Option, + pub max_depth: Option, } -impl<'a, 'b: 'a> CommentQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl<'a> CommentQuery<'a> { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; // The left join below will return None in this case - let person_id_join = self.local_user.map(|l| l.person_id).unwrap_or(PersonId(-1)); - let local_user_id_join = self.local_user.map(|l| l.id).unwrap_or(LocalUserId(-1)); + let person_id_join = self.local_user.map(|l| l.person.id).unwrap_or(PersonId(-1)); + let local_user_id_join = self + .local_user + .map(|l| l.local_user.id) + .unwrap_or(LocalUserId(-1)); let mut query = comment::table .inner_join(person::table) @@ -298,12 +297,24 @@ impl<'a, 'b: 'a> CommentQuery<'a, 'b> { query = query.filter(comment_saved::comment_id.is_not_null()); } - if !self.show_deleted_and_removed.unwrap_or(true) { + let is_profile_view = self.is_profile_view.unwrap_or(false); + let is_creator = self.creator_id == self.local_user.map(|l| l.person.id); + // only show deleted comments to creator + if !is_creator { query = query.filter(comment::deleted.eq(false)); + } + + let is_admin = self.local_user.map(|l| l.person.admin).unwrap_or(false); + // only show removed comments to admin when viewing user profile + if !(is_profile_view && is_admin) { query = query.filter(comment::removed.eq(false)); } - if !self.local_user.map(|l| l.show_bot_accounts).unwrap_or(true) { + if !self + .local_user + .map(|l| l.local_user.show_bot_accounts) + .unwrap_or(true) + { query = query.filter(person::bot_account.eq(false)); }; @@ -349,7 +360,9 @@ impl<'a, 'b: 'a> CommentQuery<'a, 'b> { }; query = match self.sort.unwrap_or(CommentSortType::Hot) { - CommentSortType::Hot => query.then_order_by(comment_aggregates::hot_rank.desc()), + CommentSortType::Hot => query + .then_order_by(comment_aggregates::hot_rank.desc()) + .then_order_by(comment_aggregates::score.desc()), CommentSortType::New => query.then_order_by(comment::published.desc()), CommentSortType::Old => query.then_order_by(comment::published.asc()), CommentSortType::Top => query.order_by(comment_aggregates::score.desc()), @@ -386,17 +399,22 @@ impl JoinView for CommentView { #[cfg(test)] mod tests { - use crate::comment_view::{ - Comment, - CommentQuery, - CommentSortType, - CommentView, - Community, - DbPool, - LocalUser, - Person, - PersonBlock, - Post, + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + + use crate::{ + comment_view::{ + Comment, + CommentQuery, + CommentSortType, + CommentView, + Community, + DbPool, + Person, + PersonBlock, + Post, + }, + structs::LocalUserView, }; use lemmy_db_schema::{ aggregates::structs::CommentAggregates, @@ -408,7 +426,7 @@ mod tests { community::CommunityInsertForm, instance::Instance, language::Language, - local_user::LocalUserInsertForm, + local_user::{LocalUser, LocalUserInsertForm}, person::PersonInsertForm, person_block::PersonBlockForm, post::PostInsertForm, @@ -425,8 +443,7 @@ mod tests { inserted_comment_1: Comment, inserted_comment_2: Comment, inserted_post: Post, - inserted_person: Person, - inserted_local_user: LocalUser, + local_user_view: LocalUserView, inserted_person_2: Person, inserted_community: Community, } @@ -577,14 +594,18 @@ mod tests { let _inserted_comment_like = CommentLike::like(pool, &comment_like_form).await.unwrap(); + let local_user_view = LocalUserView { + local_user: inserted_local_user.clone(), + person: inserted_person.clone(), + counts: Default::default(), + }; Data { inserted_instance, inserted_comment_0, inserted_comment_1, inserted_comment_2, inserted_post, - inserted_person, - inserted_local_user, + local_user_view, inserted_person_2, inserted_community, } @@ -602,29 +623,29 @@ mod tests { let mut expected_comment_view_with_person = expected_comment_view_no_person.clone(); expected_comment_view_with_person.my_vote = Some(1); - let read_comment_views_no_person = CommentQuery::builder() - .pool(pool) - .sort(Some(CommentSortType::Old)) - .post_id(Some(data.inserted_post.id)) - .build() - .list() - .await - .unwrap(); + let read_comment_views_no_person = CommentQuery { + sort: (Some(CommentSortType::Old)), + post_id: (Some(data.inserted_post.id)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!( expected_comment_view_no_person, read_comment_views_no_person[0] ); - let read_comment_views_with_person = CommentQuery::builder() - .pool(pool) - .sort(Some(CommentSortType::Old)) - .post_id(Some(data.inserted_post.id)) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let read_comment_views_with_person = CommentQuery { + sort: (Some(CommentSortType::Old)), + post_id: (Some(data.inserted_post.id)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!( expected_comment_view_with_person, @@ -637,7 +658,7 @@ mod tests { let read_comment_from_blocked_person = CommentView::read( pool, data.inserted_comment_1.id, - Some(data.inserted_person.id), + Some(data.local_user_view.person.id), ) .await .unwrap(); @@ -656,24 +677,24 @@ mod tests { let data = init_data(pool).await; let top_path = data.inserted_comment_0.path.clone(); - let read_comment_views_top_path = CommentQuery::builder() - .pool(pool) - .post_id(Some(data.inserted_post.id)) - .parent_path(Some(top_path)) - .build() - .list() - .await - .unwrap(); + let read_comment_views_top_path = CommentQuery { + post_id: (Some(data.inserted_post.id)), + parent_path: (Some(top_path)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); let child_path = data.inserted_comment_1.path.clone(); - let read_comment_views_child_path = CommentQuery::builder() - .pool(pool) - .post_id(Some(data.inserted_post.id)) - .parent_path(Some(child_path)) - .build() - .list() - .await - .unwrap(); + let read_comment_views_child_path = CommentQuery { + post_id: (Some(data.inserted_post.id)), + parent_path: (Some(child_path)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // Make sure the comment parent-limited fetch is correct assert_eq!(6, read_comment_views_top_path.len()); @@ -687,14 +708,14 @@ mod tests { assert!(child_comments.contains(&data.inserted_comment_1)); assert!(!child_comments.contains(&data.inserted_comment_2)); - let read_comment_views_top_max_depth = CommentQuery::builder() - .pool(pool) - .post_id(Some(data.inserted_post.id)) - .max_depth(Some(1)) - .build() - .list() - .await - .unwrap(); + let read_comment_views_top_max_depth = CommentQuery { + post_id: (Some(data.inserted_post.id)), + max_depth: (Some(1)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // Make sure a depth limited one only has the top comment assert_eq!( @@ -704,16 +725,16 @@ mod tests { assert_eq!(1, read_comment_views_top_max_depth.len()); let child_path = data.inserted_comment_1.path.clone(); - let read_comment_views_parent_max_depth = CommentQuery::builder() - .pool(pool) - .post_id(Some(data.inserted_post.id)) - .parent_path(Some(child_path)) - .max_depth(Some(1)) - .sort(Some(CommentSortType::New)) - .build() - .list() - .await - .unwrap(); + let read_comment_views_parent_max_depth = CommentQuery { + post_id: (Some(data.inserted_post.id)), + parent_path: (Some(child_path)), + max_depth: (Some(1)), + sort: (Some(CommentSortType::New)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // Make sure a depth limited one, and given child comment 1, has 3 assert!(read_comment_views_parent_max_depth[2] @@ -734,13 +755,13 @@ mod tests { // by default, user has all languages enabled and should see all comments // (except from blocked user) - let all_languages = CommentQuery::builder() - .pool(pool) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let all_languages = CommentQuery { + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!(5, all_languages.len()); // change user lang to finnish, should only show one post in finnish and one undetermined @@ -748,16 +769,16 @@ mod tests { .await .unwrap() .unwrap(); - LocalUserLanguage::update(pool, vec![finnish_id], data.inserted_local_user.id) - .await - .unwrap(); - let finnish_comments = CommentQuery::builder() - .pool(pool) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() + LocalUserLanguage::update(pool, vec![finnish_id], data.local_user_view.local_user.id) .await .unwrap(); + let finnish_comments = CommentQuery { + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!(2, finnish_comments.len()); let finnish_comment = finnish_comments .iter() @@ -769,25 +790,33 @@ mod tests { ); // now show all comments with undetermined language (which is the default value) - LocalUserLanguage::update(pool, vec![UNDETERMINED_ID], data.inserted_local_user.id) - .await - .unwrap(); - let undetermined_comment = CommentQuery::builder() - .pool(pool) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + LocalUserLanguage::update( + pool, + vec![UNDETERMINED_ID], + data.local_user_view.local_user.id, + ) + .await + .unwrap(); + let undetermined_comment = CommentQuery { + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!(1, undetermined_comment.len()); cleanup(data, pool).await; } async fn cleanup(data: Data, pool: &mut DbPool<'_>) { - CommentLike::remove(pool, data.inserted_person.id, data.inserted_comment_0.id) - .await - .unwrap(); + CommentLike::remove( + pool, + data.local_user_view.person.id, + data.inserted_comment_0.id, + ) + .await + .unwrap(); Comment::delete(pool, data.inserted_comment_0.id) .await .unwrap(); @@ -798,7 +827,9 @@ mod tests { Community::delete(pool, data.inserted_community.id) .await .unwrap(); - Person::delete(pool, data.inserted_person.id).await.unwrap(); + Person::delete(pool, data.local_user_view.person.id) + .await + .unwrap(); Person::delete(pool, data.inserted_person_2.id) .await .unwrap(); @@ -820,7 +851,7 @@ mod tests { comment: Comment { id: data.inserted_comment_0.id, content: "Comment 0".into(), - creator_id: data.inserted_person.id, + creator_id: data.local_user_view.person.id, post_id: data.inserted_post.id, removed: false, deleted: false, @@ -833,12 +864,12 @@ mod tests { language_id: LanguageId(37), }, creator: Person { - id: data.inserted_person.id, + id: data.local_user_view.person.id, name: "timmy".into(), display_name: None, - published: data.inserted_person.published, + published: data.local_user_view.person.published, avatar: None, - actor_id: data.inserted_person.actor_id.clone(), + actor_id: data.local_user_view.person.actor_id.clone(), local: true, banned: false, deleted: false, @@ -847,19 +878,19 @@ mod tests { bio: None, banner: None, updated: None, - inbox_url: data.inserted_person.inbox_url.clone(), + inbox_url: data.local_user_view.person.inbox_url.clone(), shared_inbox_url: None, matrix_user_id: None, ban_expires: None, instance_id: data.inserted_instance.id, - private_key: data.inserted_person.private_key.clone(), - public_key: data.inserted_person.public_key.clone(), - last_refreshed_at: data.inserted_person.last_refreshed_at, + private_key: data.local_user_view.person.private_key.clone(), + public_key: data.local_user_view.person.public_key.clone(), + last_refreshed_at: data.local_user_view.person.last_refreshed_at, }, post: Post { id: data.inserted_post.id, name: data.inserted_post.name.clone(), - creator_id: data.inserted_person.id, + creator_id: data.local_user_view.person.id, url: None, body: None, published: data.inserted_post.published, diff --git a/crates/db_views/src/post_report_view.rs b/crates/db_views/src/post_report_view.rs index 1219e0db5..a53762e21 100644 --- a/crates/db_views/src/post_report_view.rs +++ b/crates/db_views/src/post_report_view.rs @@ -30,7 +30,6 @@ use lemmy_db_schema::{ traits::JoinView, utils::{get_conn, limit_and_offset, DbPool}, }; -use typed_builder::TypedBuilder; type PostReportViewTuple = ( PostReport, @@ -159,24 +158,21 @@ impl PostReportView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct PostReportQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - #[builder(!default)] - my_person_id: PersonId, - #[builder(!default)] - admin: bool, - community_id: Option, - page: Option, - limit: Option, - unresolved_only: Option, +#[derive(Default)] +pub struct PostReportQuery { + pub community_id: Option, + pub page: Option, + pub limit: Option, + pub unresolved_only: Option, } -impl<'a, 'b: 'a> PostReportQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl PostReportQuery { + pub async fn list( + self, + pool: &mut DbPool<'_>, + my_person: &Person, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let (person_alias_1, person_alias_2) = diesel::alias!(person as person1, person as person2); let mut query = post_report::table @@ -195,7 +191,7 @@ impl<'a, 'b: 'a> PostReportQuery<'a, 'b> { post_like::table.on( post::id .eq(post_like::post_id) - .and(post_like::person_id.eq(self.my_person_id)), + .and(post_like::person_id.eq(my_person.id)), ), ) .inner_join(post_aggregates::table.on(post_report::post_id.eq(post_aggregates::post_id))) @@ -231,13 +227,13 @@ impl<'a, 'b: 'a> PostReportQuery<'a, 'b> { .offset(offset); // If its not an admin, get only the ones you mod - let res = if !self.admin { + let res = if !my_person.admin { query .inner_join( community_moderator::table.on( community_moderator::community_id .eq(post::community_id) - .and(community_moderator::person_id.eq(self.my_person_id)), + .and(community_moderator::person_id.eq(my_person.id)), ), ) .load::(conn) @@ -269,6 +265,9 @@ impl JoinView for PostReportView { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::post_report_view::{PostReportQuery, PostReportView}; use lemmy_db_schema::{ aggregates::structs::PostAggregates, @@ -471,6 +470,8 @@ mod tests { featured_local: false, hot_rank: 1728, hot_rank_active: 1728, + community_id: inserted_post.community_id, + creator_id: inserted_post.creator_id, }, resolver: None, }; @@ -506,12 +507,8 @@ mod tests { }; // Do a batch read of timmys reports - let reports = PostReportQuery::builder() - .pool(pool) - .my_person_id(inserted_timmy.id) - .admin(false) - .build() - .list() + let reports = PostReportQuery::default() + .list(pool, &inserted_timmy) .await .unwrap(); @@ -580,15 +577,13 @@ mod tests { // Do a batch read of timmys reports // It should only show saras, which is unresolved - let reports_after_resolve = PostReportQuery::builder() - .pool(pool) - .my_person_id(inserted_timmy.id) - .admin(false) - .unresolved_only(Some(true)) - .build() - .list() - .await - .unwrap(); + let reports_after_resolve = PostReportQuery { + unresolved_only: (Some(true)), + ..Default::default() + } + .list(pool, &inserted_timmy) + .await + .unwrap(); assert_eq!(reports_after_resolve[0], expected_sara_report_view); // Make sure the counts are correct diff --git a/crates/db_views/src/post_view.rs b/crates/db_views/src/post_view.rs index 07433667a..d2f6ab759 100644 --- a/crates/db_views/src/post_view.rs +++ b/crates/db_views/src/post_view.rs @@ -1,4 +1,4 @@ -use crate::structs::PostView; +use crate::structs::{LocalUserView, PostView}; use diesel::{ debug_query, dsl::{now, IntervalDsl}, @@ -34,7 +34,6 @@ use lemmy_db_schema::{ }, source::{ community::{Community, CommunityFollower, CommunityPersonBan}, - local_user::LocalUser, person::Person, person_block::PersonBlock, post::{Post, PostRead, PostSaved}, @@ -45,7 +44,6 @@ use lemmy_db_schema::{ SortType, }; use tracing::debug; -use typed_builder::TypedBuilder; type PostViewTuple = ( Post, @@ -74,56 +72,56 @@ impl PostView { // The left join below will return None in this case let person_id_join = my_person_id.unwrap_or(PersonId(-1)); - let mut query = post::table - .find(post_id) + let mut query = post_aggregates::table + .filter(post_aggregates::post_id.eq(post_id)) .inner_join(person::table) .inner_join(community::table) .left_join( community_person_ban::table.on( - post::community_id + post_aggregates::community_id .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(post::creator_id)), + .and(community_person_ban::person_id.eq(post_aggregates::creator_id)), ), ) - .inner_join(post_aggregates::table) + .inner_join(post::table) .left_join( community_follower::table.on( - post::community_id + post_aggregates::community_id .eq(community_follower::community_id) .and(community_follower::person_id.eq(person_id_join)), ), ) .left_join( post_saved::table.on( - post::id + post_aggregates::post_id .eq(post_saved::post_id) .and(post_saved::person_id.eq(person_id_join)), ), ) .left_join( post_read::table.on( - post::id + post_aggregates::post_id .eq(post_read::post_id) .and(post_read::person_id.eq(person_id_join)), ), ) .left_join( person_block::table.on( - post::creator_id + post_aggregates::creator_id .eq(person_block::target_id) .and(person_block::person_id.eq(person_id_join)), ), ) .left_join( post_like::table.on( - post::id + post_aggregates::post_id .eq(post_like::post_id) .and(post_like::person_id.eq(person_id_join)), ), ) .left_join( person_post_aggregates::table.on( - post::id + post_aggregates::post_id .eq(person_post_aggregates::post_id) .and(person_post_aggregates::person_id.eq(person_id_join)), ), @@ -150,9 +148,18 @@ impl PostView { if !is_mod_or_admin.unwrap_or(false) { query = query .filter(community::removed.eq(false)) - .filter(community::deleted.eq(false)) .filter(post::removed.eq(false)) - .filter(post::deleted.eq(false)); + // users can see their own deleted posts + .filter( + community::deleted + .eq(false) + .or(post::creator_id.eq(person_id_join)), + ) + .filter( + post::deleted + .eq(false) + .or(post::creator_id.eq(person_id_join)), + ); } let ( @@ -193,89 +200,88 @@ impl PostView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct PostQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - listing_type: Option, - sort: Option, - creator_id: Option, - community_id: Option, - local_user: Option<&'a LocalUser>, - search_term: Option, - url_search: Option, - saved_only: Option, - /// Used to show deleted or removed posts for admins - is_mod_or_admin: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct PostQuery<'a> { + pub listing_type: Option, + pub sort: Option, + pub creator_id: Option, + pub community_id: Option, + pub local_user: Option<&'a LocalUserView>, + pub search_term: Option, + pub url_search: Option, + pub saved_only: Option, + pub is_profile_view: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> PostQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl<'a> PostQuery<'a> { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; // The left join below will return None in this case - let person_id_join = self.local_user.map(|l| l.person_id).unwrap_or(PersonId(-1)); - let local_user_id_join = self.local_user.map(|l| l.id).unwrap_or(LocalUserId(-1)); + let person_id_join = self.local_user.map(|l| l.person.id).unwrap_or(PersonId(-1)); + let local_user_id_join = self + .local_user + .map(|l| l.local_user.id) + .unwrap_or(LocalUserId(-1)); - let mut query = post::table + let mut query = post_aggregates::table .inner_join(person::table) + .inner_join(post::table) .inner_join(community::table) .left_join( community_person_ban::table.on( - post::community_id + post_aggregates::community_id .eq(community_person_ban::community_id) - .and(community_person_ban::person_id.eq(post::creator_id)), + .and(community_person_ban::person_id.eq(post_aggregates::creator_id)), ), ) - .inner_join(post_aggregates::table) .left_join( community_follower::table.on( - post::community_id + post_aggregates::community_id .eq(community_follower::community_id) .and(community_follower::person_id.eq(person_id_join)), ), ) .left_join( post_saved::table.on( - post::id + post_aggregates::post_id .eq(post_saved::post_id) .and(post_saved::person_id.eq(person_id_join)), ), ) .left_join( post_read::table.on( - post::id + post_aggregates::post_id .eq(post_read::post_id) .and(post_read::person_id.eq(person_id_join)), ), ) .left_join( person_block::table.on( - post::creator_id + post_aggregates::creator_id .eq(person_block::target_id) .and(person_block::person_id.eq(person_id_join)), ), ) .left_join( community_block::table.on( - post::community_id + post_aggregates::community_id .eq(community_block::community_id) .and(community_block::person_id.eq(person_id_join)), ), ) .left_join( post_like::table.on( - post::id + post_aggregates::post_id .eq(post_like::post_id) .and(post_like::person_id.eq(person_id_join)), ), ) .left_join( person_post_aggregates::table.on( - post::id + post_aggregates::post_id .eq(person_post_aggregates::post_id) .and(person_post_aggregates::person_id.eq(person_id_join)), ), @@ -305,26 +311,33 @@ impl<'a, 'b: 'a> PostQuery<'a, 'b> { )) .into_boxed(); - // Hide deleted and removed for non-admins or mods - // TODO This eventually needs to show posts where you are the creator - if !self.is_mod_or_admin.unwrap_or(false) { + let is_profile_view = self.is_profile_view.unwrap_or(false); + let is_creator = self.creator_id == self.local_user.map(|l| l.person.id); + // only show deleted posts to creator + if is_creator { + query = query + .filter(community::deleted.eq(false)) + .filter(post::deleted.eq(false)); + } + + let is_admin = self.local_user.map(|l| l.person.admin).unwrap_or(false); + // only show removed posts to admin when viewing user profile + if !(is_profile_view && is_admin) { query = query .filter(community::removed.eq(false)) - .filter(community::deleted.eq(false)) - .filter(post::removed.eq(false)) - .filter(post::deleted.eq(false)); + .filter(post::removed.eq(false)); } if self.community_id.is_none() { query = query.then_order_by(post_aggregates::featured_local.desc()); } else if let Some(community_id) = self.community_id { query = query - .filter(post::community_id.eq(community_id)) + .filter(post_aggregates::community_id.eq(community_id)) .then_order_by(post_aggregates::featured_community.desc()); } if let Some(creator_id) = self.creator_id { - query = query.filter(post::creator_id.eq(creator_id)); + query = query.filter(post_aggregates::creator_id.eq(creator_id)); } if let Some(listing_type) = self.listing_type { @@ -362,13 +375,21 @@ impl<'a, 'b: 'a> PostQuery<'a, 'b> { ); } - if !self.local_user.map(|l| l.show_nsfw).unwrap_or(false) { + if !self + .local_user + .map(|l| l.local_user.show_nsfw) + .unwrap_or(false) + { query = query .filter(post::nsfw.eq(false)) .filter(community::nsfw.eq(false)); }; - if !self.local_user.map(|l| l.show_bot_accounts).unwrap_or(true) { + if !self + .local_user + .map(|l| l.local_user.show_bot_accounts) + .unwrap_or(true) + { query = query.filter(person::bot_account.eq(false)); }; @@ -377,7 +398,11 @@ impl<'a, 'b: 'a> PostQuery<'a, 'b> { } // Only hide the read posts, if the saved_only is false. Otherwise ppl with the hide_read // setting wont be able to see saved posts. - else if !self.local_user.map(|l| l.show_read_posts).unwrap_or(true) { + else if !self + .local_user + .map(|l| l.local_user.show_read_posts) + .unwrap_or(true) + { query = query.filter(post_read::post_id.is_null()); } @@ -391,8 +416,12 @@ impl<'a, 'b: 'a> PostQuery<'a, 'b> { } query = match self.sort.unwrap_or(SortType::Hot) { - SortType::Active => query.then_order_by(post_aggregates::hot_rank_active.desc()), - SortType::Hot => query.then_order_by(post_aggregates::hot_rank.desc()), + SortType::Active => query + .then_order_by(post_aggregates::hot_rank_active.desc()) + .then_order_by(post_aggregates::published.desc()), + SortType::Hot => query + .then_order_by(post_aggregates::hot_rank.desc()) + .then_order_by(post_aggregates::published.desc()), SortType::New => query.then_order_by(post_aggregates::published.desc()), SortType::Old => query.then_order_by(post_aggregates::published.asc()), SortType::NewComments => query.then_order_by(post_aggregates::newest_comment_time.desc()), @@ -477,7 +506,13 @@ impl JoinView for PostView { #[cfg(test)] mod tests { - use crate::post_view::{PostQuery, PostView}; + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + + use crate::{ + post_view::{PostQuery, PostView}, + structs::LocalUserView, + }; use lemmy_db_schema::{ aggregates::structs::PostAggregates, impls::actor_language::UNDETERMINED_ID, @@ -502,8 +537,7 @@ mod tests { struct Data { inserted_instance: Instance, - inserted_person: Person, - inserted_local_user: LocalUser, + local_user_view: LocalUserView, inserted_blocked_person: Person, inserted_bot: Person, inserted_community: Community, @@ -592,11 +626,15 @@ mod tests { .build(); let _inserted_bot_post = Post::create(pool, &new_bot_post).await.unwrap(); + let local_user_view = LocalUserView { + local_user: inserted_local_user, + person: inserted_person, + counts: Default::default(), + }; Data { inserted_instance, - inserted_person, - inserted_local_user, + local_user_view, inserted_blocked_person, inserted_bot, inserted_community, @@ -609,30 +647,31 @@ mod tests { async fn post_listing_with_person() { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let mut data = init_data(pool).await; let local_user_form = LocalUserUpdateForm::builder() .show_bot_accounts(Some(false)) .build(); let inserted_local_user = - LocalUser::update(pool, data.inserted_local_user.id, &local_user_form) + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form) .await .unwrap(); + data.local_user_view.local_user = inserted_local_user; - let read_post_listing = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .community_id(Some(data.inserted_community.id)) - .local_user(Some(&inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let read_post_listing = PostQuery { + sort: (Some(SortType::New)), + community_id: (Some(data.inserted_community.id)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); let post_listing_single_with_person = PostView::read( pool, data.inserted_post.id, - Some(data.inserted_person.id), + Some(data.local_user_view.person.id), None, ) .await @@ -654,19 +693,20 @@ mod tests { .show_bot_accounts(Some(true)) .build(); let inserted_local_user = - LocalUser::update(pool, data.inserted_local_user.id, &local_user_form) + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form) .await .unwrap(); + data.local_user_view.local_user = inserted_local_user; - let post_listings_with_bots = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .community_id(Some(data.inserted_community.id)) - .local_user(Some(&inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let post_listings_with_bots = PostQuery { + sort: (Some(SortType::New)), + community_id: (Some(data.inserted_community.id)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // should include bot post which has "undetermined" language assert_eq!(2, post_listings_with_bots.len()); @@ -680,14 +720,14 @@ mod tests { let pool = &mut pool.into(); let data = init_data(pool).await; - let read_post_listing_multiple_no_person = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .community_id(Some(data.inserted_community.id)) - .build() - .list() - .await - .unwrap(); + let read_post_listing_multiple_no_person = PostQuery { + sort: (Some(SortType::New)), + community_id: (Some(data.inserted_community.id)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); let read_post_listing_single_no_person = PostView::read(pool, data.inserted_post.id, None, None) @@ -719,20 +759,20 @@ mod tests { let data = init_data(pool).await; let community_block = CommunityBlockForm { - person_id: data.inserted_person.id, + person_id: data.local_user_view.person.id, community_id: data.inserted_community.id, }; CommunityBlock::block(pool, &community_block).await.unwrap(); - let read_post_listings_with_person_after_block = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .community_id(Some(data.inserted_community.id)) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let read_post_listings_with_person_after_block = PostQuery { + sort: (Some(SortType::New)), + community_id: (Some(data.inserted_community.id)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // Should be 0 posts after the community block assert_eq!(0, read_post_listings_with_person_after_block.len()); @@ -747,11 +787,11 @@ mod tests { async fn post_listing_like() { let pool = &build_db_pool_for_tests().await; let pool = &mut pool.into(); - let data = init_data(pool).await; + let mut data = init_data(pool).await; let post_like_form = PostLikeForm { post_id: data.inserted_post.id, - person_id: data.inserted_person.id, + person_id: data.local_user_view.person.id, score: 1, }; @@ -760,7 +800,7 @@ mod tests { let expected_post_like = PostLike { id: inserted_post_like.id, post_id: data.inserted_post.id, - person_id: data.inserted_person.id, + person_id: data.local_user_view.person.id, published: inserted_post_like.published, score: 1, }; @@ -769,7 +809,7 @@ mod tests { let post_listing_single_with_person = PostView::read( pool, data.inserted_post.id, - Some(data.inserted_person.id), + Some(data.local_user_view.person.id), None, ) .await @@ -785,26 +825,28 @@ mod tests { .show_bot_accounts(Some(false)) .build(); let inserted_local_user = - LocalUser::update(pool, data.inserted_local_user.id, &local_user_form) + LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form) .await .unwrap(); + data.local_user_view.local_user = inserted_local_user; - let read_post_listing = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .community_id(Some(data.inserted_community.id)) - .local_user(Some(&inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let read_post_listing = PostQuery { + sort: (Some(SortType::New)), + community_id: (Some(data.inserted_community.id)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!(1, read_post_listing.len()); assert_eq!(expected_post_with_upvote, read_post_listing[0]); - let like_removed = PostLike::remove(pool, data.inserted_person.id, data.inserted_post.id) - .await - .unwrap(); + let like_removed = + PostLike::remove(pool, data.local_user_view.person.id, data.inserted_post.id) + .await + .unwrap(); assert_eq!(1, like_removed); cleanup(data, pool).await; } @@ -822,21 +864,21 @@ mod tests { .unwrap(); let post_spanish = PostInsertForm::builder() .name("asffgdsc".to_string()) - .creator_id(data.inserted_person.id) + .creator_id(data.local_user_view.person.id) .community_id(data.inserted_community.id) .language_id(Some(spanish_id)) .build(); Post::create(pool, &post_spanish).await.unwrap(); - let post_listings_all = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let post_listings_all = PostQuery { + sort: (Some(SortType::New)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // no language filters specified, all posts should be returned assert_eq!(3, post_listings_all.len()); @@ -845,18 +887,18 @@ mod tests { .await .unwrap() .unwrap(); - LocalUserLanguage::update(pool, vec![french_id], data.inserted_local_user.id) + LocalUserLanguage::update(pool, vec![french_id], data.local_user_view.local_user.id) .await .unwrap(); - let post_listing_french = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let post_listing_french = PostQuery { + sort: (Some(SortType::New)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // only one post in french and one undetermined should be returned assert_eq!(2, post_listing_french.len()); @@ -867,18 +909,18 @@ mod tests { LocalUserLanguage::update( pool, vec![french_id, UNDETERMINED_ID], - data.inserted_local_user.id, + data.local_user_view.local_user.id, ) .await .unwrap(); - let post_listings_french_und = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .local_user(Some(&data.inserted_local_user)) - .build() - .list() - .await - .unwrap(); + let post_listings_french_und = PostQuery { + sort: (Some(SortType::New)), + local_user: (Some(&data.local_user_view)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); // french post and undetermined language post should be returned assert_eq!(2, post_listings_french_und.len()); @@ -891,6 +933,49 @@ mod tests { cleanup(data, pool).await; } + #[tokio::test] + #[serial] + async fn post_listings_removed() { + let pool = &build_db_pool_for_tests().await; + let pool = &mut pool.into(); + let mut data = init_data(pool).await; + + // Remove the post + Post::update( + pool, + data.inserted_post.id, + &PostUpdateForm::builder().removed(Some(true)).build(), + ) + .await + .unwrap(); + + // Make sure you don't see the removed post in the results + let post_listings_no_admin = PostQuery { + sort: Some(SortType::New), + local_user: Some(&data.local_user_view), + ..Default::default() + } + .list(pool) + .await + .unwrap(); + assert_eq!(1, post_listings_no_admin.len()); + + // Removed post is shown to admins on profile page + data.local_user_view.person.admin = true; + let post_listings_is_admin = PostQuery { + sort: Some(SortType::New), + local_user: Some(&data.local_user_view), + is_profile_view: Some(true), + ..Default::default() + } + .list(pool) + .await + .unwrap(); + assert_eq!(2, post_listings_is_admin.len()); + + cleanup(data, pool).await; + } + #[tokio::test] #[serial] async fn post_listings_deleted() { @@ -908,30 +993,33 @@ mod tests { .unwrap(); // Make sure you don't see the deleted post in the results - let post_listings_no_admin = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .local_user(Some(&data.inserted_local_user)) - .is_mod_or_admin(Some(false)) - .build() - .list() - .await - .unwrap(); + let post_listings_no_creator = PostQuery { + sort: Some(SortType::New), + ..Default::default() + } + .list(pool) + .await + .unwrap(); + let not_contains_deleted = post_listings_no_creator + .iter() + .map(|p| p.post.id) + .all(|p| p != data.inserted_post.id); + assert!(not_contains_deleted); - assert_eq!(1, post_listings_no_admin.len()); - - // Make sure they see both - let post_listings_is_admin = PostQuery::builder() - .pool(pool) - .sort(Some(SortType::New)) - .local_user(Some(&data.inserted_local_user)) - .is_mod_or_admin(Some(true)) - .build() - .list() - .await - .unwrap(); - - assert_eq!(2, post_listings_is_admin.len()); + // Deleted post is shown to creator + let post_listings_is_creator = PostQuery { + sort: Some(SortType::New), + local_user: Some(&data.local_user_view), + ..Default::default() + } + .list(pool) + .await + .unwrap(); + let contains_deleted = post_listings_is_creator + .iter() + .map(|p| p.post.id) + .any(|p| p == data.inserted_post.id); + assert!(contains_deleted); cleanup(data, pool).await; } @@ -941,7 +1029,9 @@ mod tests { Community::delete(pool, data.inserted_community.id) .await .unwrap(); - Person::delete(pool, data.inserted_person.id).await.unwrap(); + Person::delete(pool, data.local_user_view.person.id) + .await + .unwrap(); Person::delete(pool, data.inserted_bot.id).await.unwrap(); Person::delete(pool, data.inserted_blocked_person.id) .await @@ -954,7 +1044,7 @@ mod tests { async fn expected_post_view(data: &Data, pool: &mut DbPool<'_>) -> PostView { let (inserted_person, inserted_community, inserted_post) = ( - &data.inserted_person, + &data.local_user_view.person, &data.inserted_community, &data.inserted_post, ); @@ -1051,6 +1141,8 @@ mod tests { featured_local: false, hot_rank: 1728, hot_rank_active: 1728, + community_id: inserted_post.community_id, + creator_id: inserted_post.creator_id, }, subscribed: SubscribedType::NotSubscribed, read: false, diff --git a/crates/db_views/src/private_message_report_view.rs b/crates/db_views/src/private_message_report_view.rs index 74209f33a..7ceca271a 100644 --- a/crates/db_views/src/private_message_report_view.rs +++ b/crates/db_views/src/private_message_report_view.rs @@ -12,7 +12,6 @@ use lemmy_db_schema::{ traits::JoinView, utils::{get_conn, limit_and_offset, DbPool}, }; -use typed_builder::TypedBuilder; type PrivateMessageReportViewTuple = ( PrivateMessageReport, @@ -81,19 +80,16 @@ impl PrivateMessageReportView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct PrivateMessageReportQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - page: Option, - limit: Option, - unresolved_only: Option, +#[derive(Default)] +pub struct PrivateMessageReportQuery { + pub page: Option, + pub limit: Option, + pub unresolved_only: Option, } -impl<'a, 'b: 'a> PrivateMessageReportQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl PrivateMessageReportQuery { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let (person_alias_1, person_alias_2) = diesel::alias!(person as person1, person as person2); let mut query = private_message_report::table @@ -152,6 +148,9 @@ impl JoinView for PrivateMessageReportView { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::private_message_report_view::PrivateMessageReportQuery; use lemmy_db_schema::{ source::{ @@ -208,10 +207,8 @@ mod tests { .await .unwrap(); - let reports = PrivateMessageReportQuery::builder() - .pool(pool) - .build() - .list() + let reports = PrivateMessageReportQuery::default() + .list(pool) .await .unwrap(); assert_eq!(1, reports.len()); @@ -233,13 +230,13 @@ mod tests { .await .unwrap(); - let reports = PrivateMessageReportQuery::builder() - .pool(pool) - .unresolved_only(Some(false)) - .build() - .list() - .await - .unwrap(); + let reports = PrivateMessageReportQuery { + unresolved_only: (Some(false)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!(1, reports.len()); assert!(reports[0].private_message_report.resolved); assert!(reports[0].resolver.is_some()); diff --git a/crates/db_views/src/private_message_view.rs b/crates/db_views/src/private_message_view.rs index 3d8fd42d0..863db8125 100644 --- a/crates/db_views/src/private_message_view.rs +++ b/crates/db_views/src/private_message_view.rs @@ -17,7 +17,6 @@ use lemmy_db_schema::{ utils::{get_conn, limit_and_offset, DbPool}, }; use tracing::debug; -use typed_builder::TypedBuilder; type PrivateMessageViewTuple = (PrivateMessage, Person, Person); @@ -68,21 +67,20 @@ impl PrivateMessageView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct PrivateMessageQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - #[builder(!default)] - recipient_id: PersonId, - unread_only: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct PrivateMessageQuery { + pub unread_only: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> PrivateMessageQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl PrivateMessageQuery { + pub async fn list( + self, + pool: &mut DbPool<'_>, + recipient_id: PersonId, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let person_alias_1 = diesel::alias!(person as person1); let mut query = private_message::table @@ -101,14 +99,14 @@ impl<'a, 'b: 'a> PrivateMessageQuery<'a, 'b> { if self.unread_only.unwrap_or(false) { query = query .filter(private_message::read.eq(false)) - .filter(private_message::recipient_id.eq(self.recipient_id)); + .filter(private_message::recipient_id.eq(recipient_id)); } // Otherwise, I want the ALL view to show both sent and received else { query = query.filter( private_message::recipient_id - .eq(self.recipient_id) - .or(private_message::creator_id.eq(self.recipient_id)), + .eq(recipient_id) + .or(private_message::creator_id.eq(recipient_id)), ) } diff --git a/crates/db_views/src/registration_application_view.rs b/crates/db_views/src/registration_application_view.rs index ad6a7e9d3..106e41e43 100644 --- a/crates/db_views/src/registration_application_view.rs +++ b/crates/db_views/src/registration_application_view.rs @@ -18,7 +18,6 @@ use lemmy_db_schema::{ traits::JoinView, utils::{get_conn, limit_and_offset, DbPool}, }; -use typed_builder::TypedBuilder; type RegistrationApplicationViewTuple = (RegistrationApplication, LocalUser, Person, Option); @@ -89,20 +88,20 @@ impl RegistrationApplicationView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct RegistrationApplicationQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - unread_only: Option, - verified_email_only: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct RegistrationApplicationQuery { + pub unread_only: Option, + pub verified_email_only: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> RegistrationApplicationQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl RegistrationApplicationQuery { + pub async fn list( + self, + pool: &mut DbPool<'_>, + ) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let person_alias_1 = diesel::alias!(person as person1); let mut query = registration_application::table @@ -161,6 +160,9 @@ impl JoinView for RegistrationApplicationView { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::registration_application_view::{ RegistrationApplicationQuery, RegistrationApplicationView, @@ -295,6 +297,7 @@ mod tests { totp_2fa_url: inserted_sara_local_user.totp_2fa_url, password_encrypted: inserted_sara_local_user.password_encrypted, open_links_in_new_tab: inserted_sara_local_user.open_links_in_new_tab, + infinite_scroll_enabled: inserted_sara_local_user.infinite_scroll_enabled, }, creator: Person { id: inserted_sara_person.id, @@ -326,13 +329,13 @@ mod tests { assert_eq!(read_sara_app_view, expected_sara_app_view); // Do a batch read of the applications - let apps = RegistrationApplicationQuery::builder() - .pool(pool) - .unread_only(Some(true)) - .build() - .list() - .await - .unwrap(); + let apps = RegistrationApplicationQuery { + unread_only: (Some(true)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!( apps, @@ -402,13 +405,13 @@ mod tests { // Do a batch read of apps again // It should show only jessicas which is unresolved - let apps_after_resolve = RegistrationApplicationQuery::builder() - .pool(pool) - .unread_only(Some(true)) - .build() - .list() - .await - .unwrap(); + let apps_after_resolve = RegistrationApplicationQuery { + unread_only: (Some(true)), + ..Default::default() + } + .list(pool) + .await + .unwrap(); assert_eq!(apps_after_resolve, vec![read_jess_app_view]); // Make sure the counts are correct @@ -418,10 +421,8 @@ mod tests { assert_eq!(unread_count_after_approve, 1); // Make sure the not undenied_only has all the apps - let all_apps = RegistrationApplicationQuery::builder() - .pool(pool) - .build() - .list() + let all_apps = RegistrationApplicationQuery::default() + .list(pool) .await .unwrap(); assert_eq!(all_apps.len(), 2); diff --git a/crates/db_views_actor/Cargo.toml b/crates/db_views_actor/Cargo.toml index e977ca958..069013d71 100644 --- a/crates/db_views_actor/Cargo.toml +++ b/crates/db_views_actor/Cargo.toml @@ -16,9 +16,15 @@ full = ["lemmy_db_schema/full", "diesel", "diesel-async", "ts-rs"] [dependencies] lemmy_db_schema = { workspace = true } -diesel = { workspace = true, features = ["postgres","chrono","serde_json"], optional = true } -diesel-async = { workspace = true, features = ["postgres", "deadpool"], optional = true } +diesel = { workspace = true, features = [ + "postgres", + "chrono", + "serde_json", +], optional = true } +diesel-async = { workspace = true, features = [ + "postgres", + "deadpool", +], optional = true } serde = { workspace = true } serde_with = { workspace = true } -typed-builder = { workspace = true } -ts-rs = { workspace = true, optional = true } +ts-rs = { workspace = true, optional = true } diff --git a/crates/db_views_actor/src/comment_reply_view.rs b/crates/db_views_actor/src/comment_reply_view.rs index 08cc5a451..4d7a8eac4 100644 --- a/crates/db_views_actor/src/comment_reply_view.rs +++ b/crates/db_views_actor/src/comment_reply_view.rs @@ -36,7 +36,6 @@ use lemmy_db_schema::{ utils::{get_conn, limit_and_offset, DbPool}, CommentSortType, }; -use typed_builder::TypedBuilder; type CommentReplyViewTuple = ( CommentReply, @@ -175,23 +174,20 @@ impl CommentReplyView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct CommentReplyQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - my_person_id: Option, - recipient_id: Option, - sort: Option, - unread_only: Option, - show_bot_accounts: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct CommentReplyQuery { + pub my_person_id: Option, + pub recipient_id: Option, + pub sort: Option, + pub unread_only: Option, + pub show_bot_accounts: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> CommentReplyQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl CommentReplyQuery { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let person_alias_1 = diesel::alias!(person as person1); diff --git a/crates/db_views_actor/src/community_view.rs b/crates/db_views_actor/src/community_view.rs index 74aba52f2..64dc09090 100644 --- a/crates/db_views_actor/src/community_view.rs +++ b/crates/db_views_actor/src/community_view.rs @@ -23,7 +23,6 @@ use lemmy_db_schema::{ ListingType, SortType, }; -use typed_builder::TypedBuilder; type CommunityViewTuple = ( Community, @@ -100,26 +99,23 @@ impl CommunityView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct CommunityQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - listing_type: Option, - sort: Option, - local_user: Option<&'a LocalUser>, - search_term: Option, - is_mod_or_admin: Option, - show_nsfw: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct CommunityQuery<'a> { + pub listing_type: Option, + pub sort: Option, + pub local_user: Option<&'a LocalUser>, + pub search_term: Option, + pub is_mod_or_admin: Option, + pub show_nsfw: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> CommunityQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { +impl<'a> CommunityQuery<'a> { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { use SortType::*; - let conn = &mut get_conn(self.pool).await?; + let conn = &mut get_conn(pool).await?; // The left join below will return None in this case let person_id_join = self.local_user.map(|l| l.person_id).unwrap_or(PersonId(-1)); diff --git a/crates/db_views_actor/src/person_mention_view.rs b/crates/db_views_actor/src/person_mention_view.rs index caa911608..3e142254a 100644 --- a/crates/db_views_actor/src/person_mention_view.rs +++ b/crates/db_views_actor/src/person_mention_view.rs @@ -37,7 +37,6 @@ use lemmy_db_schema::{ utils::{get_conn, limit_and_offset, DbPool}, CommentSortType, }; -use typed_builder::TypedBuilder; type PersonMentionViewTuple = ( PersonMention, @@ -175,23 +174,20 @@ impl PersonMentionView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct PersonMentionQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - my_person_id: Option, - recipient_id: Option, - sort: Option, - unread_only: Option, - show_bot_accounts: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct PersonMentionQuery { + pub my_person_id: Option, + pub recipient_id: Option, + pub sort: Option, + pub unread_only: Option, + pub show_bot_accounts: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> PersonMentionQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl PersonMentionQuery { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let person_alias_1 = diesel::alias!(person as person1); diff --git a/crates/db_views_actor/src/person_view.rs b/crates/db_views_actor/src/person_view.rs index 3aee145c9..e6baa1fc6 100644 --- a/crates/db_views_actor/src/person_view.rs +++ b/crates/db_views_actor/src/person_view.rs @@ -19,7 +19,6 @@ use lemmy_db_schema::{ SortType, }; use std::iter::Iterator; -use typed_builder::TypedBuilder; type PersonViewTuple = (Person, PersonAggregates); @@ -79,20 +78,17 @@ impl PersonView { } } -#[derive(TypedBuilder)] -#[builder(field_defaults(default))] -pub struct PersonQuery<'a, 'b: 'a> { - #[builder(!default)] - pool: &'a mut DbPool<'b>, - sort: Option, - search_term: Option, - page: Option, - limit: Option, +#[derive(Default)] +pub struct PersonQuery { + pub sort: Option, + pub search_term: Option, + pub page: Option, + pub limit: Option, } -impl<'a, 'b: 'a> PersonQuery<'a, 'b> { - pub async fn list(self) -> Result, Error> { - let conn = &mut get_conn(self.pool).await?; +impl PersonQuery { + pub async fn list(self, pool: &mut DbPool<'_>) -> Result, Error> { + let conn = &mut get_conn(pool).await?; let mut query = person::table .inner_join(person_aggregates::table) .select((person::all_columns, person_aggregates::all_columns)) diff --git a/crates/db_views_moderator/Cargo.toml b/crates/db_views_moderator/Cargo.toml index 4420feeab..7efa922e6 100644 --- a/crates/db_views_moderator/Cargo.toml +++ b/crates/db_views_moderator/Cargo.toml @@ -16,8 +16,15 @@ full = ["lemmy_db_schema/full", "diesel", "diesel-async", "ts-rs"] [dependencies] lemmy_db_schema = { workspace = true } -diesel = { workspace = true, features = ["postgres","chrono","serde_json"], optional = true } -diesel-async = { workspace = true, features = ["postgres", "deadpool"], optional = true } +diesel = { workspace = true, features = [ + "postgres", + "chrono", + "serde_json", +], optional = true } +diesel-async = { workspace = true, features = [ + "postgres", + "deadpool", +], optional = true } serde = { workspace = true } serde_with = { workspace = true } -ts-rs = { workspace = true, optional = true } +ts-rs = { workspace = true, optional = true } diff --git a/crates/routes/src/feeds.rs b/crates/routes/src/feeds.rs index 8429f8606..3abd8eed0 100644 --- a/crates/routes/src/feeds.rs +++ b/crates/routes/src/feeds.rs @@ -13,14 +13,19 @@ use lemmy_db_schema::{ }; use lemmy_db_views::{ post_view::PostQuery, - structs::{PostView, SiteView}, + structs::{LocalUserView, PostView, SiteView}, }; use lemmy_db_views_actor::{ comment_reply_view::CommentReplyQuery, person_mention_view::PersonMentionQuery, structs::{CommentReplyView, PersonMentionView}, }; -use lemmy_utils::{claims::Claims, error::LemmyError, utils::markdown::markdown_to_html}; +use lemmy_utils::{ + cache_header::cache_1hour, + claims::Claims, + error::LemmyError, + utils::markdown::markdown_to_html, +}; use once_cell::sync::Lazy; use rss::{ extension::dublincore::DublinCoreExtensionBuilder, @@ -65,10 +70,15 @@ enum RequestType { } pub fn config(cfg: &mut web::ServiceConfig) { - cfg - .route("/feeds/{type}/{name}.xml", web::get().to(get_feed)) - .route("/feeds/all.xml", web::get().to(get_all_feed)) - .route("/feeds/local.xml", web::get().to(get_local_feed)); + cfg.service( + web::scope("/feeds") + .route("/{type}/{name}.xml", web::get().to(get_feed)) + .route("/all.xml", web::get().to(get_all_feed).wrap(cache_1hour())) + .route( + "/local.xml", + web::get().to(get_local_feed).wrap(cache_1hour()), + ), + ); } static RSS_NAMESPACE: Lazy> = Lazy::new(|| { @@ -124,15 +134,15 @@ async fn get_feed_data( ) -> Result { let site_view = SiteView::read_local(&mut context.pool()).await?; - let posts = PostQuery::builder() - .pool(&mut context.pool()) - .listing_type(Some(listing_type)) - .sort(Some(sort_type)) - .limit(Some(limit)) - .page(Some(page)) - .build() - .list() - .await?; + let posts = PostQuery { + listing_type: (Some(listing_type)), + sort: (Some(sort_type)), + limit: (Some(limit)), + page: (Some(page)), + ..Default::default() + } + .list(&mut context.pool()) + .await?; let items = create_post_items(posts, &context.settings().get_protocol_and_hostname())?; @@ -243,16 +253,16 @@ async fn get_feed_user( let site_view = SiteView::read_local(pool).await?; let person = Person::read_from_name(pool, user_name, false).await?; - let posts = PostQuery::builder() - .pool(pool) - .listing_type(Some(ListingType::All)) - .sort(Some(*sort_type)) - .creator_id(Some(person.id)) - .limit(Some(*limit)) - .page(Some(*page)) - .build() - .list() - .await?; + let posts = PostQuery { + listing_type: (Some(ListingType::All)), + sort: (Some(*sort_type)), + creator_id: (Some(person.id)), + limit: (Some(*limit)), + page: (Some(*page)), + ..Default::default() + } + .list(pool) + .await?; let items = create_post_items(posts, protocol_and_hostname)?; @@ -278,15 +288,15 @@ async fn get_feed_community( let site_view = SiteView::read_local(pool).await?; let community = Community::read_from_name(pool, community_name, false).await?; - let posts = PostQuery::builder() - .pool(pool) - .sort(Some(*sort_type)) - .community_id(Some(community.id)) - .limit(Some(*limit)) - .page(Some(*page)) - .build() - .list() - .await?; + let posts = PostQuery { + sort: (Some(*sort_type)), + community_id: (Some(community.id)), + limit: (Some(*limit)), + page: (Some(*page)), + ..Default::default() + } + .list(pool) + .await?; let items = create_post_items(posts, protocol_and_hostname)?; @@ -316,18 +326,18 @@ async fn get_feed_front( ) -> Result { let site_view = SiteView::read_local(pool).await?; let local_user_id = LocalUserId(Claims::decode(jwt, jwt_secret)?.claims.sub); - let local_user = LocalUser::read(pool, local_user_id).await?; + let local_user = LocalUserView::read(pool, local_user_id).await?; - let posts = PostQuery::builder() - .pool(pool) - .listing_type(Some(ListingType::Subscribed)) - .local_user(Some(&local_user)) - .sort(Some(*sort_type)) - .limit(Some(*limit)) - .page(Some(*page)) - .build() - .list() - .await?; + let posts = PostQuery { + listing_type: (Some(ListingType::Subscribed)), + local_user: (Some(&local_user)), + sort: (Some(*sort_type)), + limit: (Some(*limit)), + page: (Some(*page)), + ..Default::default() + } + .list(pool) + .await?; let items = create_post_items(posts, protocol_and_hostname)?; @@ -360,27 +370,27 @@ async fn get_feed_inbox( let sort = CommentSortType::New; - let replies = CommentReplyQuery::builder() - .pool(pool) - .recipient_id(Some(person_id)) - .my_person_id(Some(person_id)) - .show_bot_accounts(Some(show_bot_accounts)) - .sort(Some(sort)) - .limit(Some(RSS_FETCH_LIMIT)) - .build() - .list() - .await?; + let replies = CommentReplyQuery { + recipient_id: (Some(person_id)), + my_person_id: (Some(person_id)), + show_bot_accounts: (Some(show_bot_accounts)), + sort: (Some(sort)), + limit: (Some(RSS_FETCH_LIMIT)), + ..Default::default() + } + .list(pool) + .await?; - let mentions = PersonMentionQuery::builder() - .pool(pool) - .recipient_id(Some(person_id)) - .my_person_id(Some(person_id)) - .show_bot_accounts(Some(show_bot_accounts)) - .sort(Some(sort)) - .limit(Some(RSS_FETCH_LIMIT)) - .build() - .list() - .await?; + let mentions = PersonMentionQuery { + recipient_id: (Some(person_id)), + my_person_id: (Some(person_id)), + show_bot_accounts: (Some(show_bot_accounts)), + sort: (Some(sort)), + limit: (Some(RSS_FETCH_LIMIT)), + ..Default::default() + } + .list(pool) + .await?; let items = create_reply_and_mention_items(replies, mentions, protocol_and_hostname)?; diff --git a/crates/routes/src/nodeinfo.rs b/crates/routes/src/nodeinfo.rs index ef6544622..f9df94122 100644 --- a/crates/routes/src/nodeinfo.rs +++ b/crates/routes/src/nodeinfo.rs @@ -3,14 +3,24 @@ use anyhow::anyhow; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::RegistrationMode; use lemmy_db_views::structs::SiteView; -use lemmy_utils::{error::LemmyError, version}; +use lemmy_utils::{ + cache_header::{cache_1hour, cache_3days}, + error::LemmyError, + version, +}; use serde::{Deserialize, Serialize}; use url::Url; pub fn config(cfg: &mut web::ServiceConfig) { cfg - .route("/nodeinfo/2.0.json", web::get().to(node_info)) - .route("/.well-known/nodeinfo", web::get().to(node_info_well_known)); + .route( + "/nodeinfo/2.0.json", + web::get().to(node_info).wrap(cache_1hour()), + ) + .route( + "/.well-known/nodeinfo", + web::get().to(node_info_well_known).wrap(cache_3days()), + ); } async fn node_info_well_known( diff --git a/crates/routes/src/webfinger.rs b/crates/routes/src/webfinger.rs index 72adc9502..e3a0a5615 100644 --- a/crates/routes/src/webfinger.rs +++ b/crates/routes/src/webfinger.rs @@ -8,7 +8,7 @@ use lemmy_db_schema::{ source::{community::Community, person::Person}, traits::ApubActor, }; -use lemmy_utils::error::LemmyError; +use lemmy_utils::{cache_header::cache_3days, error::LemmyError}; use serde::Deserialize; use std::collections::HashMap; use url::Url; @@ -21,7 +21,7 @@ struct Params { pub fn config(cfg: &mut web::ServiceConfig) { cfg.route( ".well-known/webfinger", - web::get().to(get_webfinger_response), + web::get().to(get_webfinger_response).wrap(cache_3days()), ); } diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index b97ce8bdc..9cafd0c11 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -55,4 +55,4 @@ enum-map = "2.6" reqwest = { workspace = true } [build-dependencies] -rosetta-build = "0.1.3" +rosetta-build = { version = "0.1.3", default-features = false } diff --git a/crates/utils/src/cache_header.rs b/crates/utils/src/cache_header.rs new file mode 100644 index 000000000..042c943a7 --- /dev/null +++ b/crates/utils/src/cache_header.rs @@ -0,0 +1,22 @@ +use actix_web::middleware::DefaultHeaders; + +/// Adds a cache header to requests +/// +/// Common cache amounts are: +/// * 1 hour = 60s * 60m = `3600` seconds +/// * 3 days = 60s * 60m * 24h * 3d = `259200` seconds +/// +/// Mastodon & other activitypub server defaults to 3d +pub fn cache_header(seconds: usize) -> DefaultHeaders { + DefaultHeaders::new().add(("Cache-Control", format!("public, max-age={seconds}"))) +} + +/// Set a 1 hour cache time +pub fn cache_1hour() -> DefaultHeaders { + cache_header(3600) +} + +/// Set a 3 day cache time +pub fn cache_3days() -> DefaultHeaders { + cache_header(259200) +} diff --git a/crates/utils/src/email.rs b/crates/utils/src/email.rs index 3c8d7a1a1..fdff19033 100644 --- a/crates/utils/src/email.rs +++ b/crates/utils/src/email.rs @@ -68,12 +68,10 @@ pub async fn send_email( // is bad. // Set the TLS - let builder_dangerous = AsyncSmtpTransport::builder_dangerous(smtp_server).port(smtp_port); - let mut builder = match email_config.tls_type.as_str() { - "starttls" => AsyncSmtpTransport::starttls_relay(smtp_server)?, - "tls" => AsyncSmtpTransport::relay(smtp_server)?, - _ => builder_dangerous, + "starttls" => AsyncSmtpTransport::starttls_relay(smtp_server)?.port(smtp_port), + "tls" => AsyncSmtpTransport::relay(smtp_server)?.port(smtp_port), + _ => AsyncSmtpTransport::builder_dangerous(smtp_server).port(smtp_port), }; // Set the creds if they exist diff --git a/crates/utils/src/error.rs b/crates/utils/src/error.rs index 78590a6a7..ffc1723b4 100644 --- a/crates/utils/src/error.rs +++ b/crates/utils/src/error.rs @@ -249,6 +249,8 @@ impl LemmyErrorExt2 for Result { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] use super::*; use actix_web::{body::MessageBody, ResponseError}; use std::fs::read_to_string; diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 9ca427cf9..1ef8a842c 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -4,6 +4,7 @@ extern crate strum_macros; extern crate smart_default; pub mod apub; +pub mod cache_header; pub mod email; pub mod rate_limit; pub mod settings; diff --git a/crates/utils/src/rate_limit/mod.rs b/crates/utils/src/rate_limit/mod.rs index 7a5c1ec68..1bb6f1b5f 100644 --- a/crates/utils/src/rate_limit/mod.rs +++ b/crates/utils/src/rate_limit/mod.rs @@ -275,6 +275,9 @@ fn parse_ip(addr: &str) -> Option { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + #[test] fn test_parse_ip() { let ip_addrs = [ diff --git a/crates/utils/src/rate_limit/rate_limiter.rs b/crates/utils/src/rate_limit/rate_limiter.rs index ed3dc569e..3acf23ba4 100644 --- a/crates/utils/src/rate_limit/rate_limiter.rs +++ b/crates/utils/src/rate_limit/rate_limiter.rs @@ -237,6 +237,9 @@ fn split_ipv6(ip: Ipv6Addr) -> ([u8; 6], u8, u8) { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + #[test] fn test_split_ipv6() { let ip = std::net::Ipv6Addr::new( diff --git a/crates/utils/src/utils/markdown.rs b/crates/utils/src/utils/markdown.rs index 451c86bc7..5f851589b 100644 --- a/crates/utils/src/utils/markdown.rs +++ b/crates/utils/src/utils/markdown.rs @@ -18,6 +18,9 @@ pub fn markdown_to_html(text: &str) -> String { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::utils::markdown::markdown_to_html; #[test] diff --git a/crates/utils/src/utils/markdown/spoiler_rule.rs b/crates/utils/src/utils/markdown/spoiler_rule.rs index 1a564f07c..bae858bfd 100644 --- a/crates/utils/src/utils/markdown/spoiler_rule.rs +++ b/crates/utils/src/utils/markdown/spoiler_rule.rs @@ -135,6 +135,9 @@ pub fn add(markdown_parser: &mut MarkdownIt) { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::utils::markdown::spoiler_rule::add; use markdown_it::MarkdownIt; diff --git a/crates/utils/src/utils/mention.rs b/crates/utils/src/utils/mention.rs index 1dcace37b..a2958e499 100644 --- a/crates/utils/src/utils/mention.rs +++ b/crates/utils/src/utils/mention.rs @@ -35,6 +35,9 @@ pub fn scrape_text_for_mentions(text: &str) -> Vec { #[cfg(test)] mod test { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::utils::mention::scrape_text_for_mentions; #[test] diff --git a/crates/utils/src/utils/slurs.rs b/crates/utils/src/utils/slurs.rs index b041eb460..cc2d6a3e6 100644 --- a/crates/utils/src/utils/slurs.rs +++ b/crates/utils/src/utils/slurs.rs @@ -65,6 +65,9 @@ pub(crate) fn slurs_vec_to_str(slurs: &[&str]) -> String { #[cfg(test)] mod test { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use crate::utils::slurs::{remove_slurs, slur_check, slurs_vec_to_str}; use regex::RegexBuilder; diff --git a/crates/utils/src/utils/validation.rs b/crates/utils/src/utils/validation.rs index b42fe1add..0c955b122 100644 --- a/crates/utils/src/utils/validation.rs +++ b/crates/utils/src/utils/validation.rs @@ -311,6 +311,9 @@ pub fn check_url_scheme(url: &Option) -> LemmyResult<()> { #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use super::build_totp_2fa; use crate::{ error::LemmyErrorType, diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 33a2d84ec..3a68ea131 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -24,9 +24,12 @@ services: logging: *default-logging lemmy: + # use "image" to pull down an already compiled lemmy. make sure to comment out "build". # image: dessalines/lemmy:0.18.1 - # use this to build your local lemmy server image for development - # run docker compose up --build + # platform: linux/x86_64 # no arm64 support. uncomment platform if using m1. + # use "build" to build your local lemmy server image for development. make sure to comment out "image". + # run: docker compose up --build + build: context: ../ dockerfile: docker/Dockerfile @@ -51,12 +54,14 @@ services: logging: *default-logging lemmy-ui: + # use "image" to pull down an already compiled lemmy-ui. make sure to comment out "build". image: dessalines/lemmy-ui:0.18.1 - # use this to build your local lemmy ui image for development - # run docker compose up --build - # assuming lemmy-ui is cloned besides lemmy directory + # platform: linux/x86_64 # no arm64 support. uncomment platform if using m1. + # use "build" to build your local lemmy ui image for development. make sure to comment out "image". + # run: docker compose up --build + # build: - # context: ../../lemmy-ui + # context: ../../lemmy-ui # assuming lemmy-ui is cloned besides lemmy directory # dockerfile: dev.dockerfile environment: # this needs to match the hostname defined in the lemmy service diff --git a/docker/docker_update.sh b/docker/docker_update.sh index f2d7fa43a..d64025cc1 100755 --- a/docker/docker_update.sh +++ b/docker/docker_update.sh @@ -1,6 +1,53 @@ #!/bin/sh set -e +Help() +{ + # Display help + echo "Usage: ./docker_update.sh [OPTIONS]" + echo "" + echo "Start all docker containers required to run Lemmy." + echo "" + echo "Options:" + echo "-u Docker username. Only required if managing Docker via Docker Desktop with a personal access token." + echo "-h Print this help." +} + +while getopts ":hu:" option; do + case $option in + h) Help + exit;; + u) DOCKER_USER=$OPTARG + ;; + *) echo "Invalid option $OPTARG." + exit;; + esac +done + +LOG_PREFIX="[🐀 lemmy]" +ARCH=$(uname -m 2>/dev/null || echo 'unknown') # uname may not exist on windows machines; default to unknown to be safe. + mkdir -p volumes/pictrs + +echo "$LOG_PREFIX Please provide your password to change ownership of the pictrs volume." sudo chown -R 991:991 volumes/pictrs -sudo docker compose up -d --build + +if [ "$ARCH" = 'arm64' ]; then + echo "$LOG_PREFIX WARN: If building from images, make sure to uncomment 'platform' in the docker-compose.yml file!" + + # You need a Docker account to pull images. Otherwise, you will get an error like: "error getting credentials" + if [ -z "$DOCKER_USER" ]; then + echo "$LOG_PREFIX Logging into Docker Hub..." + docker login + else + echo "$LOG_PREFIX Logging into Docker Hub. Please provide your personal access token." + docker login --username="$DOCKER_USER" + fi + + echo "$LOG_PREFIX Initializing images in the background. Please be patient if compiling from source..." + docker compose up -d --build +else + sudo docker compose up -d --build +fi + +echo "$LOG_PREFIX Complete! You can now access the UI at http://localhost:1236." diff --git a/migrations/2023-07-10-075550_add-infinite-scroll-setting/down.sql b/migrations/2023-07-10-075550_add-infinite-scroll-setting/down.sql new file mode 100644 index 000000000..66ff507d9 --- /dev/null +++ b/migrations/2023-07-10-075550_add-infinite-scroll-setting/down.sql @@ -0,0 +1 @@ +alter table local_user drop column infinite_scroll_enabled; diff --git a/migrations/2023-07-10-075550_add-infinite-scroll-setting/up.sql b/migrations/2023-07-10-075550_add-infinite-scroll-setting/up.sql new file mode 100644 index 000000000..905bc7109 --- /dev/null +++ b/migrations/2023-07-10-075550_add-infinite-scroll-setting/up.sql @@ -0,0 +1 @@ +alter table local_user add column infinite_scroll_enabled boolean default false not null; diff --git a/migrations/2023-07-11-084714_receive_activity_table/down.sql b/migrations/2023-07-11-084714_receive_activity_table/down.sql new file mode 100644 index 000000000..ea4f4d4a3 --- /dev/null +++ b/migrations/2023-07-11-084714_receive_activity_table/down.sql @@ -0,0 +1,21 @@ +create table activity ( + id serial primary key, + data jsonb not null, + local boolean not null default true, + published timestamp not null default now(), + updated timestamp, + ap_id text not null, + sensitive boolean not null default true +); + +insert into activity(ap_id, data, sensitive, published) + select ap_id, data, sensitive, published + from sent_activity + order by id desc + limit 100000; + +-- We cant copy received_activity entries back into activities table because we dont have data +-- which is mandatory. + +drop table sent_activity; +drop table received_activity; \ No newline at end of file diff --git a/migrations/2023-07-11-084714_receive_activity_table/up.sql b/migrations/2023-07-11-084714_receive_activity_table/up.sql new file mode 100644 index 000000000..c6b30b7b7 --- /dev/null +++ b/migrations/2023-07-11-084714_receive_activity_table/up.sql @@ -0,0 +1,35 @@ +-- outgoing activities, need to be stored to be later server over http +-- we change data column from jsonb to json for decreased size +-- https://stackoverflow.com/a/22910602 +create table sent_activity ( + id bigserial primary key, + ap_id text unique not null, + data json not null, + sensitive boolean not null, + published timestamp not null default now() +); + +-- incoming activities, we only need the id to avoid processing the same activity multiple times +create table received_activity ( + id bigserial primary key, + ap_id text unique not null, + published timestamp not null default now() +); + +-- copy sent activities to new table. only copy last 100k for faster migration +insert into sent_activity(ap_id, data, sensitive, published) + select ap_id, data, sensitive, published + from activity + where local = true + order by id desc + limit 100000; + +-- copy received activities to new table. only last 1m for faster migration +insert into received_activity(ap_id, published) + select ap_id, published + from activity + where local = false + order by id desc + limit 1000000; + +drop table activity; diff --git a/migrations/2023-07-14-154840_add_optimized_indexes_published/down.sql b/migrations/2023-07-14-154840_add_optimized_indexes_published/down.sql new file mode 100644 index 000000000..5661a3146 --- /dev/null +++ b/migrations/2023-07-14-154840_add_optimized_indexes_published/down.sql @@ -0,0 +1,26 @@ +-- Drop the new indexes +drop index idx_post_aggregates_featured_local_most_comments; +drop index idx_post_aggregates_featured_local_hot; +drop index idx_post_aggregates_featured_local_active; +drop index idx_post_aggregates_featured_local_score; +drop index idx_post_aggregates_featured_community_hot; +drop index idx_post_aggregates_featured_community_active; +drop index idx_post_aggregates_featured_community_score; +drop index idx_post_aggregates_featured_community_most_comments; +drop index idx_comment_aggregates_hot; +drop index idx_comment_aggregates_score; + +-- Add the old ones back in +-- featured_local +create index idx_post_aggregates_featured_local_hot on post_aggregates (featured_local desc, hot_rank desc); +create index idx_post_aggregates_featured_local_active on post_aggregates (featured_local desc, hot_rank_active desc); +create index idx_post_aggregates_featured_local_score on post_aggregates (featured_local desc, score desc); + +-- featured_community +create index idx_post_aggregates_featured_community_hot on post_aggregates (featured_community desc, hot_rank desc); +create index idx_post_aggregates_featured_community_active on post_aggregates (featured_community desc, hot_rank_active desc); +create index idx_post_aggregates_featured_community_score on post_aggregates (featured_community desc, score desc); + +create index idx_comment_aggregates_hot on comment_aggregates (hot_rank desc); +create index idx_comment_aggregates_score on comment_aggregates (score desc); + diff --git a/migrations/2023-07-14-154840_add_optimized_indexes_published/up.sql b/migrations/2023-07-14-154840_add_optimized_indexes_published/up.sql new file mode 100644 index 000000000..94e426fc7 --- /dev/null +++ b/migrations/2023-07-14-154840_add_optimized_indexes_published/up.sql @@ -0,0 +1,30 @@ +-- Drop the old indexes +drop index idx_post_aggregates_featured_local_hot; +drop index idx_post_aggregates_featured_local_active; +drop index idx_post_aggregates_featured_local_score; +drop index idx_post_aggregates_featured_community_hot; +drop index idx_post_aggregates_featured_community_active; +drop index idx_post_aggregates_featured_community_score; +drop index idx_comment_aggregates_hot; +drop index idx_comment_aggregates_score; + +-- Add a published desc, to the end of the hot and active ranks + +-- Add missing most comments index +create index idx_post_aggregates_featured_local_most_comments on post_aggregates (featured_local desc, comments desc, published desc); +create index idx_post_aggregates_featured_community_most_comments on post_aggregates (featured_community desc, comments desc, published desc); + +-- featured_local +create index idx_post_aggregates_featured_local_hot on post_aggregates (featured_local desc, hot_rank desc, published desc); +create index idx_post_aggregates_featured_local_active on post_aggregates (featured_local desc, hot_rank_active desc, published desc); +create index idx_post_aggregates_featured_local_score on post_aggregates (featured_local desc, score desc, published desc); + +-- featured_community +create index idx_post_aggregates_featured_community_hot on post_aggregates (featured_community desc, hot_rank desc, published desc); +create index idx_post_aggregates_featured_community_active on post_aggregates (featured_community desc, hot_rank_active desc, published desc); +create index idx_post_aggregates_featured_community_score on post_aggregates (featured_community desc, score desc, published desc); + +-- Fixing some comment aggregates ones +create index idx_comment_aggregates_hot on comment_aggregates (hot_rank desc, published desc); +create index idx_comment_aggregates_score on comment_aggregates (score desc, published desc); + diff --git a/migrations/2023-07-14-215339_aggregates_nonzero_indexes/down.sql b/migrations/2023-07-14-215339_aggregates_nonzero_indexes/down.sql new file mode 100644 index 000000000..3e247b58c --- /dev/null +++ b/migrations/2023-07-14-215339_aggregates_nonzero_indexes/down.sql @@ -0,0 +1,4 @@ +-- This file should undo anything in `up.sql` +DROP INDEX idx_community_aggregates_nonzero_hotrank; +DROP INDEX idx_comment_aggregates_nonzero_hotrank; +DROP INDEX idx_post_aggregates_nonzero_hotrank; \ No newline at end of file diff --git a/migrations/2023-07-14-215339_aggregates_nonzero_indexes/up.sql b/migrations/2023-07-14-215339_aggregates_nonzero_indexes/up.sql new file mode 100644 index 000000000..2d3cd3b29 --- /dev/null +++ b/migrations/2023-07-14-215339_aggregates_nonzero_indexes/up.sql @@ -0,0 +1,4 @@ +-- Your SQL goes here +CREATE INDEX idx_community_aggregates_nonzero_hotrank ON community_aggregates (published) WHERE hot_rank != 0; +CREATE INDEX idx_comment_aggregates_nonzero_hotrank ON comment_aggregates (published) WHERE hot_rank != 0; +CREATE INDEX idx_post_aggregates_nonzero_hotrank ON post_aggregates (published DESC) WHERE hot_rank != 0 OR hot_rank_active != 0; \ No newline at end of file diff --git a/migrations/2023-07-18-082614_post_aggregates_community_id/down.sql b/migrations/2023-07-18-082614_post_aggregates_community_id/down.sql new file mode 100644 index 000000000..91e2dc862 --- /dev/null +++ b/migrations/2023-07-18-082614_post_aggregates_community_id/down.sql @@ -0,0 +1,20 @@ +-- This file should undo anything in `up.sql` + +CREATE OR REPLACE FUNCTION post_aggregates_post() + RETURNS trigger + LANGUAGE plpgsql +AS +$$ +BEGIN + IF (TG_OP = 'INSERT') THEN + INSERT INTO post_aggregates (post_id, published, newest_comment_time, newest_comment_time_necro) + VALUES (NEW.id, NEW.published, NEW.published, NEW.published); + ELSIF (TG_OP = 'DELETE') THEN + DELETE FROM post_aggregates WHERE post_id = OLD.id; + END IF; + RETURN NULL; +END +$$; + +ALTER TABLE post_aggregates DROP COLUMN community_id, DROP COLUMN creator_id; + diff --git a/migrations/2023-07-18-082614_post_aggregates_community_id/up.sql b/migrations/2023-07-18-082614_post_aggregates_community_id/up.sql new file mode 100644 index 000000000..f28701da0 --- /dev/null +++ b/migrations/2023-07-18-082614_post_aggregates_community_id/up.sql @@ -0,0 +1,35 @@ +-- Your SQL goes here +ALTER TABLE post_aggregates + ADD COLUMN community_id integer REFERENCES community ON UPDATE CASCADE ON DELETE CASCADE, + ADD COLUMN creator_id integer REFERENCES person ON UPDATE CASCADE ON DELETE CASCADE; + +CREATE OR REPLACE FUNCTION post_aggregates_post() + RETURNS trigger + LANGUAGE plpgsql +AS +$$ +BEGIN + IF (TG_OP = 'INSERT') THEN + INSERT INTO post_aggregates (post_id, + published, + newest_comment_time, + newest_comment_time_necro, + community_id, + creator_id) + VALUES (NEW.id, NEW.published, NEW.published, NEW.published, NEW.community_id, NEW.creator_id); + ELSIF (TG_OP = 'DELETE') THEN + DELETE FROM post_aggregates WHERE post_id = OLD.id; + END IF; + RETURN NULL; +END +$$; + +UPDATE post_aggregates +SET community_id=post.community_id, + creator_id=post.creator_id +FROM post +WHERE post.id = post_aggregates.post_id; + +ALTER TABLE post_aggregates + ALTER COLUMN community_id SET NOT NULL, + ALTER COLUMN creator_id SET NOT NULL; \ No newline at end of file diff --git a/migrations/2023-07-19-163511_comment_sort_hot_rank_then_score/down.sql b/migrations/2023-07-19-163511_comment_sort_hot_rank_then_score/down.sql new file mode 100644 index 000000000..9f784c067 --- /dev/null +++ b/migrations/2023-07-19-163511_comment_sort_hot_rank_then_score/down.sql @@ -0,0 +1,4 @@ +drop index idx_comment_aggregates_hot, idx_comment_aggregates_score; + +create index idx_comment_aggregates_hot on comment_aggregates (hot_rank desc, published desc); +create index idx_comment_aggregates_score on comment_aggregates (score desc, published desc); diff --git a/migrations/2023-07-19-163511_comment_sort_hot_rank_then_score/up.sql b/migrations/2023-07-19-163511_comment_sort_hot_rank_then_score/up.sql new file mode 100644 index 000000000..02eff3ed2 --- /dev/null +++ b/migrations/2023-07-19-163511_comment_sort_hot_rank_then_score/up.sql @@ -0,0 +1,10 @@ +-- Alter the comment_aggregates hot sort to sort by score after hot_rank. +-- Reason being, is that hot_ranks go to zero after a few days, +-- and then comments should be sorted by score, not published. + +drop index idx_comment_aggregates_hot, idx_comment_aggregates_score; + +create index idx_comment_aggregates_hot on comment_aggregates (hot_rank desc, score desc); + +-- Remove published from this sort, its pointless +create index idx_comment_aggregates_score on comment_aggregates (score desc); diff --git a/readmes/README.es.md b/readmes/README.es.md index e062d1789..b53b2b020 100644 --- a/readmes/README.es.md +++ b/readmes/README.es.md @@ -16,7 +16,8 @@ Español | Русский | 汉语 | - 漢語 + 漢語 | + 日本語

diff --git a/readmes/README.ja.md b/readmes/README.ja.md new file mode 100644 index 000000000..e026a9091 --- /dev/null +++ b/readmes/README.ja.md @@ -0,0 +1,178 @@ +

+ +![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/LemmyNet/lemmy.svg) +[![Build Status](https://woodpecker.join-lemmy.org/api/badges/LemmyNet/lemmy/status.svg)](https://woodpecker.join-lemmy.org/LemmyNet/lemmy) +[![GitHub issues](https://img.shields.io/github/issues-raw/LemmyNet/lemmy.svg)](https://github.com/LemmyNet/lemmy/issues) +[![Docker Pulls](https://img.shields.io/docker/pulls/dessalines/lemmy.svg)](https://cloud.docker.com/repository/docker/dessalines/lemmy/) +[![Translation status](http://weblate.join-lemmy.org/widgets/lemmy/-/lemmy/svg-badge.svg)](http://weblate.join-lemmy.org/engage/lemmy/) +[![License](https://img.shields.io/github/license/LemmyNet/lemmy.svg)](LICENSE) +![GitHub stars](https://img.shields.io/github/stars/LemmyNet/lemmy?style=social) +[![Delightful Humane Tech](https://codeberg.org/teaserbot-labs/delightful-humane-design/raw/branch/main/humane-tech-badge.svg)](https://codeberg.org/teaserbot-labs/delightful-humane-design) + +
+ +

+ English | + Español | + Русский | + 汉语 | + 漢語 | + 日本語 +

+ +

+ + + +

Lemmy

+

+ フェディバースのリンクアグリゲーターとフォーラムです。 +
+
+ Lemmy に参加 + · + ドキュメント + · + マトリックスチャット + · + バグを報告 + · + 機能リクエスト + · + リリース + · + 行動規範 +

+

+ +## プロジェクトについて + +| デスクトップ | モバイル | +| ---------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | +| ![desktop](https://raw.githubusercontent.com/LemmyNet/joinlemmy-site/main/src/assets/images/main_img.webp) | ![mobile](https://raw.githubusercontent.com/LemmyNet/joinlemmy-site/main/src/assets/images/mobile_pic.webp) | + +[Lemmy](https://github.com/LemmyNet/lemmy) は、[Reddit](https://reddit.com)、[Lobste.rs](https://lobste.rs)、[Hacker News](https://news.ycombinator.com/) といったサイトに似ています。興味のあるフォーラムを購読してリンクや議論を掲載し、投票したり、コメントしたりしています。誰でも簡単にサーバーを運営することができ、これらのサーバーはすべて連合しており(電子メールを考えてください)、[Fediverse](https://en.wikipedia.org/wiki/Fediverse) と呼ばれる同じ宇宙に接続されています。 + +リンクアグリゲーターの場合、あるサーバーに登録したユーザーが他のサーバーのフォーラムを購読し、他のサーバーに登録したユーザーとディスカッションができることを意味します。 + +Reddit や他のリンクアグリゲーターに代わる、企業の支配や干渉を受けない、簡単にセルフホスティングできる分散型の代替手段です。 + +各 Lemmy サーバーは、独自のモデレーションポリシーを設定することができます。サイト全体の管理者やコミュニティモデレーターを任命し、荒らしを排除し、誰もが安心して貢献できる健全で毒気のない環境を育みます。 + +### なぜ Lemmy というのか? + +- [Motörhead](https://invidio.us/watch?v=3mbvWn1EY6g) のリードシンガー。 +- 古くは[ビデオゲーム]()。 +- [スーパーマリオのクッパ](https://www.mariowiki.com/Lemmy_Koopa)。 +- [毛むくじゃらの齧歯類](http://sunchild.fpwc.org/lemming-the-little-giant-of-the-north/)。 + +### こちらでビルド + +- [Rust](https://www.rust-lang.org) +- [Actix](https://actix.rs/) +- [Diesel](http://diesel.rs/) +- [Inferno](https://infernojs.org) +- [Typescript](https://www.typescriptlang.org/) + +## 特徴 + +- オープンソース、[AGPL License](/LICENSE) です。 +- セルフホスティングが可能で、デプロイが容易です。 + - [Docker](https://join-lemmy.org/docs/en/administration/install_docker.html) と [Ansible](https://join-lemmy.org/docs/en/administration/install_ansible.html) が付属しています。 +- クリーンでモバイルフレンドリーなインターフェイス。 + - サインアップに必要なのは、最低限のユーザー名とパスワードのみ! + - ユーザーアバター対応 + - ライブ更新のコメントスレッド + - 古い Reddit のような完全な投票スコア `(+/-)`. + - ライト、ダーク、ソラライズなどのテーマがあります。 + - オートコンプリートをサポートする絵文字。`:` と入力することでスタート + - ユーザータグは `@` で、コミュニティタグは `!` で入力できます。 + - 投稿とコメントの両方で、画像のアップロードが可能です。 + - 投稿は、タイトルと自己テキスト、URL、またはそれ以外の任意の組み合わせで構成できます。 + - コメントの返信や、タグ付けされたときに、通知します。 + - 通知はメールで送ることができます。 + - プライベートメッセージのサポート + - i18n / 国際化のサポート + - `All`、`Subscribed`、`Inbox`、`User`、`Community` の RSS / Atom フィードを提供します。 +- クロスポストのサポート。 + - 新しい投稿を作成する際の _類似投稿検索_。質問/回答コミュニティに最適です。 +- モデレーション機能。 + - モデレーションのログを公開。 + - コミュニティのトップページにスティッキー・ポストを貼ることができます。 + - サイト管理者、コミュニティモデレーターの両方が、他のモデレーターを任命することができます。 + - 投稿やコメントのロック、削除、復元が可能。 + - コミュニティやサイトの利用を禁止したり、禁止を解除したりすることができます。 + - サイトとコミュニティを他者に譲渡することができます。 +- すべての投稿とコメントを削除し、データを完全に消去することができます。 +- NSFW 投稿/コミュニティサポート +- 高いパフォーマンス。 + - サーバーは Rust で書かれています。 + - フロントエンドは `~80kB` gzipped です。 + - arm64 / Raspberry Pi をサポートします。 + +## インストール + +- [Docker](https://join-lemmy.org/docs/en/administration/install_docker.html) +- [Ansible](https://join-lemmy.org/docs/en/administration/install_ansible.html) + +## Lemmy プロジェクト + +### アプリ + +- [lemmy-ui - Lemmy の公式ウェブアプリ](https://github.com/LemmyNet/lemmy-ui) +- [lemmyBB -phpBB をベースにした Lemmy フォーラム UI](https://github.com/LemmyNet/lemmyBB) +- [Jerboa - Lemmy の開発者が作った Android ネイティブアプリ](https://github.com/dessalines/jerboa) +- [Mlem - iOS 用 Lemmy クライアント](https://github.com/buresdv/Mlem) + +### ライブラリ + +- [lemmy-js-client](https://github.com/LemmyNet/lemmy-js-client) +- [lemmy-rust-client](https://github.com/LemmyNet/lemmy/tree/main/crates/api_common) +- [go-lemmy](https://gitea.arsenm.dev/Arsen6331/go-lemmy) +- [Dart API client](https://github.com/LemmurOrg/lemmy_api_client) +- [Lemmy-Swift-Client](https://github.com/rrainn/Lemmy-Swift-Client) +- [Reddit -> Lemmy Importer](https://github.com/rileynull/RedditLemmyImporter) +- [lemmy-bot - Lemmy のボットを簡単に作るための Typescript ライブラリ](https://github.com/SleeplessOne1917/lemmy-bot) +- [Lemmy の Reddit API ラッパー](https://github.com/derivator/tafkars) +- [Pythörhead - Lemmy API と統合するための Python パッケージ](https://pypi.org/project/pythorhead/) + +## サポート / 寄付 + +Lemmy はフリーでオープンソースのソフトウェアです。つまり、広告やマネタイズ、ベンチャーキャピタルは一切ありません。あなたの寄付は、直接プロジェクトのフルタイム開発をサポートします。 + +- [Liberapay でのサポート](https://liberapay.com/Lemmy)。 +- [Patreon でのサポート](https://www.patreon.com/dessalines)。 +- [OpenCollective でのサポート](https://opencollective.com/lemmy)。 +- [スポンサーのリスト](https://join-lemmy.org/donate)。 + +### 暗号通貨 + +- bitcoin: `1Hefs7miXS5ff5Ck5xvmjKjXf5242KzRtK` +- ethereum: `0x400c96c96acbC6E7B3B43B1dc1BB446540a88A01` +- monero: `41taVyY6e1xApqKyMVDRVxJ76sPkfZhALLTjRvVKpaAh2pBd4wv9RgYj1tSPrx8wc6iE1uWUfjtQdTmTy2FGMeChGVKPQuV` +- cardano: `addr1q858t89l2ym6xmrugjs0af9cslfwvnvsh2xxp6x4dcez7pf5tushkp4wl7zxfhm2djp6gq60dk4cmc7seaza5p3slx0sakjutm` + +## コントリビュート + +- [コントリビュート手順](https://join-lemmy.org/docs/en/contributors/01-overview.html) +- [Docker 開発](https://join-lemmy.org/docs/en/contributors/03-docker-development.html) +- [Local 開発](https://join-lemmy.org/docs/en/contributors/02-local-development.html) + +### 翻訳について + +- 翻訳を手伝いたい方は、[Weblate](https://weblate.join-lemmy.org/projects/lemmy/) を見てみてください。また、[ドキュメントを翻訳する](https://github.com/LemmyNet/lemmy-docs#adding-a-new-language)ことでも支援できます。 + +## お問い合わせ + +- [Mastodon](https://mastodon.social/@LemmyDev) +- [Lemmy サポートフォーラム](https://lemmy.ml/c/lemmy_support) + +## コードのミラー + +- [GitHub](https://github.com/LemmyNet/lemmy) +- [Gitea](https://git.join-lemmy.org/LemmyNet/lemmy) +- [Codeberg](https://codeberg.org/LemmyNet/lemmy) + +## クレジット + +ロゴは Andy Cuccaro (@andycuccaro) が CC-BY-SA 4.0 ライセンスで作成しました。 diff --git a/readmes/README.ru.md b/readmes/README.ru.md index 42aa902f9..d9693901d 100644 --- a/readmes/README.ru.md +++ b/readmes/README.ru.md @@ -13,10 +13,11 @@

English | - Español | + Español | Русский | 汉语 | - 漢語 + 漢語 | + 日本語

diff --git a/readmes/README.zh.hans.md b/readmes/README.zh.hans.md index eba5ebe58..3c21e1fa4 100644 --- a/readmes/README.zh.hans.md +++ b/readmes/README.zh.hans.md @@ -16,7 +16,8 @@ Español | Русский | 汉语 | - 漢語 + 漢語 | + 日本語

diff --git a/readmes/README.zh.hant.md b/readmes/README.zh.hant.md index 9d6681c39..aa2c0ff7b 100644 --- a/readmes/README.zh.hant.md +++ b/readmes/README.zh.hant.md @@ -17,7 +17,8 @@ Español | Русский | 汉语 | - 漢語 + 漢語 | + 日本語

diff --git a/scripts/fix-clippy.sh b/scripts/fix-clippy.sh index 759de5773..5ad3ca8b4 100755 --- a/scripts/fix-clippy.sh +++ b/scripts/fix-clippy.sh @@ -17,10 +17,9 @@ cargo clippy --workspace --fix --allow-staged --allow-dirty --tests --all-target -D clippy::get_first \ -D clippy::explicit_into_iter_loop \ -D clippy::explicit_iter_loop \ - -D clippy::needless_collect - -cargo clippy --workspace --features console -- \ + -D clippy::needless_collect \ -D clippy::unwrap_used \ -D clippy::indexing_slicing cargo +nightly fmt +taplo format diff --git a/src/api_routes_http.rs b/src/api_routes_http.rs index cb735f807..bc4340e3c 100644 --- a/src/api_routes_http.rs +++ b/src/api_routes_http.rs @@ -52,7 +52,6 @@ use lemmy_api_common::{ VerifyEmail, }, post::{ - CreatePost, CreatePostLike, CreatePostReport, DeletePost, @@ -93,7 +92,7 @@ use lemmy_api_common::{ PurgePost, }, }; -use lemmy_api_crud::PerformCrud; +use lemmy_api_crud::{post::create::create_post, PerformCrud}; use lemmy_apub::{ api::{ list_comments::list_comments, @@ -175,7 +174,7 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) { web::resource("/post") .guard(guard::Post()) .wrap(rate_limit.post()) - .route(web::post().to(route_post_crud::)), + .route(web::post().to(create_post)), ) .service( web::scope("/post") diff --git a/src/lib.rs b/src/lib.rs index b50298b05..4950aff82 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,12 +21,17 @@ use lemmy_api_common::{ context::LemmyContext, lemmy_db_views::structs::SiteView, request::build_user_agent, + send_activity::{ActivityChannel, MATCH_OUTGOING_ACTIVITIES}, utils::{ check_private_instance_and_federation_enabled, local_site_rate_limit_to_rate_limit_config, }, }; -use lemmy_apub::{VerifyUrlData, FEDERATION_HTTP_FETCH_LIMIT}; +use lemmy_apub::{ + activities::{handle_outgoing_activities, match_outgoing_activities}, + VerifyUrlData, + FEDERATION_HTTP_FETCH_LIMIT, +}; use lemmy_db_schema::{ source::secret::Secret, utils::{build_db_pool, get_database_url, run_migrations}, @@ -163,11 +168,19 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> { let prom_api_metrics = PrometheusMetricsBuilder::new("lemmy_api") .registry(default_registry().clone()) .build() - .unwrap(); + .expect("Should always be buildable"); + + MATCH_OUTGOING_ACTIVITIES + .set(Box::new(move |d, c| { + Box::pin(match_outgoing_activities(d, c)) + })) + .expect("set function pointer"); + let request_data = federation_config.to_request_data(); + let outgoing_activities_task = tokio::task::spawn(handle_outgoing_activities(request_data)); // Create Http server with websocket support HttpServer::new(move || { - let cors_origin = std::env::var("LEMMY_CORS_ORIGIN"); + let cors_origin = env::var("LEMMY_CORS_ORIGIN"); let cors_config = match (cors_origin, cfg!(debug_assertions)) { (Ok(origin), false) => Cors::default() .allowed_origin(&origin) @@ -213,6 +226,9 @@ pub async fn start_lemmy_server() -> Result<(), LemmyError> { .run() .await?; + // Wait for outgoing apub sends to complete + ActivityChannel::close(outgoing_activities_task).await?; + Ok(()) } diff --git a/src/prometheus_metrics.rs b/src/prometheus_metrics.rs index 9e0ffde38..ad964263b 100644 --- a/src/prometheus_metrics.rs +++ b/src/prometheus_metrics.rs @@ -1,3 +1,5 @@ +// TODO: should really not unwrap everywhere here.... +#![allow(clippy::unwrap_used)] use actix_web::{rt::System, web, App, HttpResponse, HttpServer, Responder}; use lemmy_api_common::context::LemmyContext; use lemmy_utils::settings::structs::PrometheusConfig; diff --git a/src/scheduled_tasks.rs b/src/scheduled_tasks.rs index a052585ec..c44d61f27 100644 --- a/src/scheduled_tasks.rs +++ b/src/scheduled_tasks.rs @@ -13,15 +13,27 @@ use diesel::{ use diesel::{sql_query, PgConnection, RunQueryDsl}; use lemmy_api_common::context::LemmyContext; use lemmy_db_schema::{ - schema::{activity, captcha_answer, comment, community_person_ban, instance, person, post}, + schema::{ + captcha_answer, + comment, + community_person_ban, + instance, + person, + post, + received_activity, + sent_activity, + }, source::instance::{Instance, InstanceForm}, utils::{naive_now, DELETED_REPLACEMENT_TEXT}, }; use lemmy_routes::nodeinfo::NodeInfo; -use lemmy_utils::{error::LemmyError, REQWEST_TIMEOUT}; +use lemmy_utils::{ + error::{LemmyError, LemmyResult}, + REQWEST_TIMEOUT, +}; use reqwest::blocking::Client; use std::{thread, time::Duration}; -use tracing::{error, info}; +use tracing::{error, info, warn}; /// Schedules various cleanup tasks for lemmy in a background thread pub fn setup( @@ -37,30 +49,54 @@ pub fn setup( // Update active counts every hour let url = db_url.clone(); scheduler.every(CTimeUnits::hour(1)).run(move || { - let mut conn = PgConnection::establish(&url).expect("could not establish connection"); - active_counts(&mut conn); - update_banned_when_expired(&mut conn); + PgConnection::establish(&url) + .map(|mut conn| { + active_counts(&mut conn); + update_banned_when_expired(&mut conn); + }) + .map_err(|e| { + error!("Failed to establish db connection for active counts update: {e}"); + }) + .ok(); }); // Update hot ranks every 15 minutes let url = db_url.clone(); scheduler.every(CTimeUnits::minutes(15)).run(move || { - let mut conn = PgConnection::establish(&url).expect("could not establish connection"); - update_hot_ranks(&mut conn, true); + PgConnection::establish(&url) + .map(|mut conn| { + update_hot_ranks(&mut conn); + }) + .map_err(|e| { + error!("Failed to establish db connection for hot ranks update: {e}"); + }) + .ok(); }); // Delete any captcha answers older than ten minutes, every ten minutes let url = db_url.clone(); scheduler.every(CTimeUnits::minutes(10)).run(move || { - let mut conn = PgConnection::establish(&url).expect("could not establish connection"); - delete_expired_captcha_answers(&mut conn); + PgConnection::establish(&url) + .map(|mut conn| { + delete_expired_captcha_answers(&mut conn); + }) + .map_err(|e| { + error!("Failed to establish db connection for captcha cleanup: {e}"); + }) + .ok(); }); // Clear old activities every week let url = db_url.clone(); scheduler.every(CTimeUnits::weeks(1)).run(move || { - let mut conn = PgConnection::establish(&url).expect("could not establish connection"); - clear_old_activities(&mut conn); + PgConnection::establish(&url) + .map(|mut conn| { + clear_old_activities(&mut conn); + }) + .map_err(|e| { + error!("Failed to establish db connection for activity cleanup: {e}"); + }) + .ok(); }); // Remove old rate limit buckets after 1 to 2 hours of inactivity @@ -72,14 +108,28 @@ pub fn setup( // Overwrite deleted & removed posts and comments every day let url = db_url.clone(); scheduler.every(CTimeUnits::days(1)).run(move || { - let mut conn = PgConnection::establish(&url).expect("could not establish connection"); - overwrite_deleted_posts_and_comments(&mut conn); + PgConnection::establish(&db_url) + .map(|mut conn| { + overwrite_deleted_posts_and_comments(&mut conn); + }) + .map_err(|e| { + error!("Failed to establish db connection for deleted content cleanup: {e}"); + }) + .ok(); }); // Update the Instance Software scheduler.every(CTimeUnits::days(1)).run(move || { - let mut conn = PgConnection::establish(&db_url).expect("could not establish connection"); - update_instance_software(&mut conn, &user_agent); + PgConnection::establish(&url) + .map(|mut conn| { + update_instance_software(&mut conn, &user_agent) + .map_err(|e| warn!("Failed to update instance software: {e}")) + .ok(); + }) + .map_err(|e| { + error!("Failed to establish db connection for instance software update: {e}"); + }) + .ok(); }); // Manually run the scheduler in an event loop @@ -93,7 +143,7 @@ pub fn setup( fn startup_jobs(db_url: &str) { let mut conn = PgConnection::establish(db_url).expect("could not establish connection"); active_counts(&mut conn); - update_hot_ranks(&mut conn, false); + update_hot_ranks(&mut conn); update_banned_when_expired(&mut conn); clear_old_activities(&mut conn); overwrite_deleted_posts_and_comments(&mut conn); @@ -101,35 +151,29 @@ fn startup_jobs(db_url: &str) { /// Update the hot_rank columns for the aggregates tables /// Runs in batches until all necessary rows are updated once -fn update_hot_ranks(conn: &mut PgConnection, last_week_only: bool) { - let process_start_time = if last_week_only { - info!("Updating hot ranks for last week..."); - naive_now() - chrono::Duration::days(7) - } else { - info!("Updating hot ranks for all history..."); - NaiveDateTime::from_timestamp_opt(0, 0).expect("0 timestamp creation") - }; +fn update_hot_ranks(conn: &mut PgConnection) { + info!("Updating hot ranks for all history..."); process_hot_ranks_in_batches( conn, "post_aggregates", + "a.hot_rank != 0 OR a.hot_rank_active != 0", "SET hot_rank = hot_rank(a.score, a.published), hot_rank_active = hot_rank(a.score, a.newest_comment_time_necro)", - process_start_time, ); process_hot_ranks_in_batches( conn, "comment_aggregates", + "a.hot_rank != 0", "SET hot_rank = hot_rank(a.score, a.published)", - process_start_time, ); process_hot_ranks_in_batches( conn, "community_aggregates", + "a.hot_rank != 0", "SET hot_rank = hot_rank(a.subscribers, a.published)", - process_start_time, ); info!("Finished hot ranks update!"); @@ -141,18 +185,20 @@ struct HotRanksUpdateResult { published: NaiveDateTime, } -/// Runs the hot rank update query in batches until all rows after `process_start_time` have been -/// processed. -/// In `set_clause`, "a" will refer to the current aggregates table. +/// Runs the hot rank update query in batches until all rows have been processed. +/// In `where_clause` and `set_clause`, "a" will refer to the current aggregates table. /// Locked rows are skipped in order to prevent deadlocks (they will likely get updated on the next /// run) fn process_hot_ranks_in_batches( conn: &mut PgConnection, table_name: &str, + where_clause: &str, set_clause: &str, - process_start_time: NaiveDateTime, ) { + let process_start_time = NaiveDateTime::from_timestamp_opt(0, 0).expect("0 timestamp creation"); + let update_batch_size = 1000; // Bigger batches than this tend to cause seq scans + let mut processed_rows_count = 0; let mut previous_batch_result = Some(process_start_time); while let Some(previous_batch_last_published) = previous_batch_result { // Raw `sql_query` is used as a performance optimization - Diesel does not support doing this @@ -160,7 +206,7 @@ fn process_hot_ranks_in_batches( let result = sql_query(format!( r#"WITH batch AS (SELECT a.id FROM {aggregates_table} a - WHERE a.published > $1 + WHERE a.published > $1 AND ({where_clause}) ORDER BY a.published LIMIT $2 FOR UPDATE SKIP LOCKED) @@ -168,14 +214,18 @@ fn process_hot_ranks_in_batches( FROM batch WHERE a.id = batch.id RETURNING a.published; "#, aggregates_table = table_name, - set_clause = set_clause + set_clause = set_clause, + where_clause = where_clause )) .bind::(previous_batch_last_published) .bind::(update_batch_size) .get_results::(conn); match result { - Ok(updated_rows) => previous_batch_result = updated_rows.last().map(|row| row.published), + Ok(updated_rows) => { + processed_rows_count += updated_rows.len(); + previous_batch_result = updated_rows.last().map(|row| row.published); + } Err(e) => { error!("Failed to update {} hot_ranks: {}", table_name, e); break; @@ -183,45 +233,44 @@ fn process_hot_ranks_in_batches( } } info!( - "Finished process_hot_ranks_in_batches execution for {}", - table_name + "Finished process_hot_ranks_in_batches execution for {} (processed {} rows)", + table_name, processed_rows_count ); } fn delete_expired_captcha_answers(conn: &mut PgConnection) { - match diesel::delete( + diesel::delete( captcha_answer::table.filter(captcha_answer::published.lt(now - IntervalDsl::minutes(10))), ) .execute(conn) - { - Ok(_) => { - info!("Done."); - } - Err(e) => { - error!("Failed to clear old captcha answers: {}", e) - } - } + .map(|_| { + info!("Done."); + }) + .map_err(|e| error!("Failed to clear old captcha answers: {e}")) + .ok(); } /// Clear old activities (this table gets very large) fn clear_old_activities(conn: &mut PgConnection) { info!("Clearing old activities..."); - match diesel::delete(activity::table.filter(activity::published.lt(now - 6.months()))) + diesel::delete(sent_activity::table.filter(sent_activity::published.lt(now - 3.months()))) .execute(conn) - { - Ok(_) => { - info!("Done."); - } - Err(e) => { - error!("Failed to clear old activities: {}", e) - } - } + .map_err(|e| error!("Failed to clear old sent activities: {e}")) + .ok(); + + diesel::delete( + received_activity::table.filter(received_activity::published.lt(now - 3.months())), + ) + .execute(conn) + .map(|_| info!("Done.")) + .map_err(|e| error!("Failed to clear old received activities: {e}")) + .ok(); } /// overwrite posts and comments 30d after deletion fn overwrite_deleted_posts_and_comments(conn: &mut PgConnection) { info!("Overwriting deleted posts..."); - match diesel::update( + diesel::update( post::table .filter(post::deleted.eq(true)) .filter(post::updated.lt(now.nullable() - 1.months())) @@ -232,17 +281,14 @@ fn overwrite_deleted_posts_and_comments(conn: &mut PgConnection) { post::name.eq(DELETED_REPLACEMENT_TEXT), )) .execute(conn) - { - Ok(_) => { - info!("Done."); - } - Err(e) => { - error!("Failed to overwrite deleted posts: {}", e) - } - } + .map(|_| { + info!("Done."); + }) + .map_err(|e| error!("Failed to overwrite deleted posts: {e}")) + .ok(); info!("Overwriting deleted comments..."); - match diesel::update( + diesel::update( comment::table .filter(comment::deleted.eq(true)) .filter(comment::updated.lt(now.nullable() - 1.months())) @@ -250,14 +296,11 @@ fn overwrite_deleted_posts_and_comments(conn: &mut PgConnection) { ) .set(comment::content.eq(DELETED_REPLACEMENT_TEXT)) .execute(conn) - { - Ok(_) => { - info!("Done."); - } - Err(e) => { - error!("Failed to overwrite deleted comments: {}", e) - } - } + .map(|_| { + info!("Done."); + }) + .map_err(|e| error!("Failed to overwrite deleted comments: {e}")) + .ok(); } /// Re-calculate the site and community active counts every 12 hours @@ -276,20 +319,16 @@ fn active_counts(conn: &mut PgConnection) { "update site_aggregates set users_active_{} = (select * from site_aggregates_activity('{}')) where site_id = 1", i.1, i.0 ); - match sql_query(update_site_stmt).execute(conn) { - Ok(_) => {} - Err(e) => { - error!("Failed to update site stats: {}", e) - } - } + sql_query(update_site_stmt) + .execute(conn) + .map_err(|e| error!("Failed to update site stats: {e}")) + .ok(); let update_community_stmt = format!("update community_aggregates ca set users_active_{} = mv.count_ from community_aggregates_activity('{}') mv where ca.community_id = mv.community_id_", i.1, i.0); - match sql_query(update_community_stmt).execute(conn) { - Ok(_) => {} - Err(e) => { - error!("Failed to update community stats: {}", e) - } - } + sql_query(update_community_stmt) + .execute(conn) + .map_err(|e| error!("Failed to update community stats: {e}")) + .ok(); } info!("Done."); @@ -299,90 +338,89 @@ fn active_counts(conn: &mut PgConnection) { fn update_banned_when_expired(conn: &mut PgConnection) { info!("Updating banned column if it expires ..."); - match diesel::update( + diesel::update( person::table .filter(person::banned.eq(true)) .filter(person::ban_expires.lt(now)), ) .set(person::banned.eq(false)) .execute(conn) - { - Ok(_) => {} - Err(e) => { - error!("Failed to update person.banned when expires: {}", e) - } - } - match diesel::delete(community_person_ban::table.filter(community_person_ban::expires.lt(now))) + .map_err(|e| error!("Failed to update person.banned when expires: {e}")) + .ok(); + + diesel::delete(community_person_ban::table.filter(community_person_ban::expires.lt(now))) .execute(conn) - { - Ok(_) => {} - Err(e) => { - error!("Failed to remove community_ban expired rows: {}", e) - } - } + .map_err(|e| error!("Failed to remove community_ban expired rows: {e}")) + .ok(); } /// Updates the instance software and version -fn update_instance_software(conn: &mut PgConnection, user_agent: &str) { +/// +/// TODO: this should be async +/// TODO: if instance has been dead for a long time, it should be checked less frequently +fn update_instance_software(conn: &mut PgConnection, user_agent: &str) -> LemmyResult<()> { info!("Updating instances software and versions..."); - let client = match Client::builder() + let client = Client::builder() .user_agent(user_agent) .timeout(REQWEST_TIMEOUT) - .build() - { - Ok(client) => client, - Err(e) => { - error!("Failed to build reqwest client: {}", e); - return; - } - }; + .build()?; - let instances = match instance::table.get_results::(conn) { - Ok(instances) => instances, - Err(e) => { - error!("Failed to get instances: {}", e); - return; - } - }; + let instances = instance::table.get_results::(conn)?; for instance in instances { let node_info_url = format!("https://{}/nodeinfo/2.0.json", instance.domain); - // Skip it if it can't connect - let res = client - .get(&node_info_url) - .send() - .ok() - .and_then(|t| t.json::().ok()); - - if let Some(node_info) = res { - let software = node_info.software.as_ref(); - let form = InstanceForm::builder() - .domain(instance.domain) - .software(software.and_then(|s| s.name.clone())) - .version(software.and_then(|s| s.version.clone())) - .updated(Some(naive_now())) - .build(); - - match diesel::update(instance::table.find(instance.id)) - .set(form) - .execute(conn) - { - Ok(_) => { - info!("Done."); - } - Err(e) => { - error!("Failed to update site instance software: {}", e); - return; - } + // The `updated` column is used to check if instances are alive. If it is more than three days + // in the past, no outgoing activities will be sent to that instance. However not every + // Fediverse instance has a valid Nodeinfo endpoint (its not required for Activitypub). That's + // why we always need to mark instances as updated if they are alive. + let default_form = InstanceForm::builder() + .domain(instance.domain.clone()) + .updated(Some(naive_now())) + .build(); + let form = match client.get(&node_info_url).send() { + Ok(res) if res.status().is_client_error() => { + // Instance doesnt have nodeinfo but sent a response, consider it alive + Some(default_form) } + Ok(res) => match res.json::() { + Ok(node_info) => { + // Instance sent valid nodeinfo, write it to db + Some( + InstanceForm::builder() + .domain(instance.domain) + .updated(Some(naive_now())) + .software(node_info.software.and_then(|s| s.name)) + .version(node_info.version.clone()) + .build(), + ) + } + Err(_) => { + // No valid nodeinfo but valid HTTP response, consider instance alive + Some(default_form) + } + }, + Err(_) => { + // dead instance, do nothing + None + } + }; + if let Some(form) = form { + diesel::update(instance::table.find(instance.id)) + .set(form) + .execute(conn)?; } } + info!("Finished updating instances software and versions..."); + Ok(()) } #[cfg(test)] mod tests { + #![allow(clippy::unwrap_used)] + #![allow(clippy::indexing_slicing)] + use lemmy_routes::nodeinfo::NodeInfo; use reqwest::Client;