diff --git a/.drone.yml b/.drone.yml
index b9d68a586..9924af5f4 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -1,5 +1,10 @@
+---
kind: pipeline
-name: default
+name: amd64
+
+platform:
+ os: linux
+ arch: amd64
steps:
- name: fetch git submodules
@@ -54,21 +59,12 @@ steps:
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
DO_WRITE_HOSTS_FILE: 1
commands:
- - ls -la target/lemmy_server
- apk add bash curl postgresql-client
- bash api_tests/prepare-drone-federation-test.sh
- cd api_tests/
- yarn
- yarn api-test
- - name: create docker tags
- image: ekidd/rust-musl-builder:1.47.0
- commands:
- - echo "$(git describe),latest" > .tags
- when:
- ref:
- - refs/tags/*
-
- name: make release build and push to docker hub
image: plugins/docker
settings:
@@ -90,6 +86,69 @@ services:
POSTGRES_USER: lemmy
POSTGRES_PASSWORD: password
-volumes:
- - name: dieselcli
- temp: {}
+---
+kind: pipeline
+name: arm64
+
+platform:
+ os: linux
+ arch: arm64
+
+steps:
+
+ - name: cargo test
+ image: rust:1.47-slim-buster
+ environment:
+ LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
+ RUST_BACKTRACE: 1
+ RUST_TEST_THREADS: 1
+ commands:
+ - apt-get update
+ - apt-get -y install --no-install-recommends espeak postgresql-client libssl-dev pkg-config libpq-dev
+ - cargo test --workspace --no-fail-fast
+ - cargo build
+
+ # Using Debian here because there seems to be no official Alpine-based Rust docker image for ARM.
+ - name: cargo build
+ image: rust:1.47-slim-buster
+ commands:
+ - apt-get update
+ - apt-get -y install --no-install-recommends libssl-dev pkg-config libpq-dev
+ - cargo build
+ - mv target/debug/lemmy_server target/lemmy_server
+
+ - name: run federation tests
+ image: node:15-buster-slim
+ environment:
+ LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
+ DO_WRITE_HOSTS_FILE: 1
+ commands:
+ - mkdir -p /usr/share/man/man1 /usr/share/man/man7
+ - apt-get update
+ - apt-get -y install --no-install-recommends bash curl libssl-dev pkg-config libpq-dev postgresql-client libc6-dev
+ - bash api_tests/prepare-drone-federation-test.sh
+ - cd api_tests/
+ - yarn
+ - yarn api-test
+
+ - name: make release build and push to docker hub
+ image: plugins/docker
+ settings:
+ dockerfile: docker/prod/Dockerfile.arm
+ username:
+ from_secret: docker_username
+ password:
+ from_secret: docker_password
+ repo: dessalines/lemmy
+ auto_tag: true
+ auto_tag_suffix: arm64
+ when:
+ ref:
+ - refs/tags/*
+
+services:
+ - name: database
+ image: postgres:12-alpine
+ environment:
+ POSTGRES_USER: lemmy
+ POSTGRES_PASSWORD: password
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index 22d7d66c9..000000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# Code of Conduct
-
-- We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
-- Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all.
-- Please be kind and courteous. There’s no need to be mean or rude.
-- Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
-- Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
-- We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the Citizen Code of Conduct; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don’t tolerate behavior that excludes people in socially marginalized groups.
-- Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the Lemmy moderation team immediately. Whether you’re a regular contributor or a newcomer, we care about making this community a safe place for you and we’ve got your back.
-- Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
-
-[**Message the Moderation Team on Mastodon**](https://mastodon.social/@LemmyDev)
-
-[**Email The Moderation Team**](mailto:contact@lemmy.ml)
-
-## Moderation
-
-These are the policies for upholding our community’s standards of conduct. If you feel that a thread needs moderation, please contact the Lemmy moderation team .
-
-1. Remarks that violate the Lemmy standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
-2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
-3. Moderators will first respond to such remarks with a warning, at the same time the offending content will likely be removed whenever possible.
-4. If the warning is unheeded, the user will be “kicked,” i.e., kicked out of the communication channel to cool off.
-5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
-6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
-7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, in private. Complaints about bans in-channel are not allowed.
-8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
-
-In the Lemmy community we strive to go the extra step to look out for each other. Don’t just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they’re off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
-
-And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could’ve communicated better — remember that it’s your responsibility to make others comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
-
-The enforcement policies listed above apply to all official Lemmy venues; including git repositories under [github.com/LemmyNet/lemmy](https://github.com/LemmyNet/lemmy) and [yerbamate.ml/LemmyNet/lemmy](https://yerbamate.ml/LemmyNet/lemmy), the [Matrix channel](https://matrix.to/#/!BZVTUuEiNmRcbFeLeI:matrix.org?via=matrix.org&via=privacytools.io&via=permaweb.io); and all instances under lemmy.ml. For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
-
-Adapted from the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct), which is based on the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).
diff --git a/README.md b/README.md
index 6d1aede68..3a657ba64 100644
--- a/README.md
+++ b/README.md
@@ -27,6 +27,8 @@
Request Feature
·
Releases
+ ·
+ Code of Conduct
diff --git a/clean.sh b/clean.sh
deleted file mode 100755
index 3666a7299..000000000
--- a/clean.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-cargo update
-cargo fmt
-cargo check
-cargo clippy
-cargo outdated -R
diff --git a/docker/prod/Dockerfile.arm b/docker/prod/Dockerfile.arm
new file mode 100644
index 000000000..675b19bf5
--- /dev/null
+++ b/docker/prod/Dockerfile.arm
@@ -0,0 +1,48 @@
+ARG RUST_BUILDER_IMAGE=rust:1.47-slim-buster
+
+# Build Lemmy
+FROM $RUST_BUILDER_IMAGE as builder
+
+# Install compilation dependencies
+RUN apt-get update \
+ && apt-get -y install --no-install-recommends libssl-dev pkg-config libpq-dev \
+ && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /app
+
+COPY ./ ./
+
+RUN cargo build --release
+
+# reduce binary size
+RUN strip ./target/release/lemmy_server
+
+RUN cp ./target/release/lemmy_server /app/lemmy_server
+
+# Build the docs
+FROM $RUST_BUILDER_IMAGE as docs
+WORKDIR /app
+RUN cargo install --debug mdbook
+COPY docs ./docs
+RUN mdbook build docs/
+
+# The Debian runner
+FROM debian:buster-slim as lemmy
+
+# Install libpq for postgres and espeak for captchas
+RUN apt-get update \
+ && apt-get -y install --no-install-recommends espeak postgresql-client libc6 libssl1.1 \
+ && rm -rf /var/lib/apt/lists/*
+
+RUN addgroup --gid 1000 lemmy
+RUN adduser --no-create-home --shell /bin/sh --uid 1000 --gid 1000 lemmy
+
+# Copy resources
+COPY --chown=lemmy:lemmy config/defaults.hjson /config/defaults.hjson
+COPY --chown=lemmy:lemmy --from=builder /app/lemmy_server /app/lemmy
+COPY --chown=lemmy:lemmy --from=docs /app/docs/book/ /app/documentation/
+
+RUN chown lemmy:lemmy /app/lemmy
+USER lemmy
+EXPOSE 8536
+CMD ["/app/lemmy"]
diff --git a/lemmy_apub/src/activities/send/comment.rs b/lemmy_apub/src/activities/send/comment.rs
index 36917ecb2..e3c622edb 100644
--- a/lemmy_apub/src/activities/send/comment.rs
+++ b/lemmy_apub/src/activities/send/comment.rs
@@ -57,17 +57,14 @@ impl ApubObjectType for Comment {
})
.await??;
- let mut maa = collect_non_local_mentions_and_addresses(&self.content, context).await?;
- let mut ccs = vec![community.actor_id()?];
- ccs.append(&mut maa.addressed_ccs);
- ccs.push(get_comment_parent_creator_id(context.pool(), &self).await?);
+ let maa = collect_non_local_mentions(&self, &community, context).await?;
let mut create = Create::new(creator.actor_id.to_owned(), note.into_any_base()?);
create
.set_many_contexts(lemmy_context()?)
.set_id(generate_activity_id(CreateType::Create)?)
.set_to(public())
- .set_many_ccs(ccs)
+ .set_many_ccs(maa.ccs.to_owned())
// Set the mention tags
.set_many_tags(maa.get_tags()?);
@@ -90,17 +87,14 @@ impl ApubObjectType for Comment {
})
.await??;
- let mut maa = collect_non_local_mentions_and_addresses(&self.content, context).await?;
- let mut ccs = vec![community.actor_id()?];
- ccs.append(&mut maa.addressed_ccs);
- ccs.push(get_comment_parent_creator_id(context.pool(), &self).await?);
+ let maa = collect_non_local_mentions(&self, &community, context).await?;
let mut update = Update::new(creator.actor_id.to_owned(), note.into_any_base()?);
update
.set_many_contexts(lemmy_context()?)
.set_id(generate_activity_id(UpdateType::Update)?)
.set_to(public())
- .set_many_ccs(ccs)
+ .set_many_ccs(maa.ccs.to_owned())
// Set the mention tags
.set_many_tags(maa.get_tags()?);
@@ -295,7 +289,7 @@ impl ApubLikeableType for Comment {
}
struct MentionsAndAddresses {
- addressed_ccs: Vec,
+ ccs: Vec,
inboxes: Vec,
tags: Vec,
}
@@ -313,23 +307,26 @@ impl MentionsAndAddresses {
/// This takes a comment, and builds a list of to_addresses, inboxes,
/// and mention tags, so they know where to be sent to.
/// Addresses are the users / addresses that go in the cc field.
-async fn collect_non_local_mentions_and_addresses(
- content: &str,
+async fn collect_non_local_mentions(
+ comment: &Comment,
+ community: &Community,
context: &LemmyContext,
) -> Result {
- let mut addressed_ccs = vec![];
+ let parent_creator = get_comment_parent_creator(context.pool(), comment).await?;
+ let mut addressed_ccs = vec![community.actor_id()?, parent_creator.actor_id()?];
+ // Note: dont include community inbox here, as we send to it separately with `send_to_community()`
+ let mut inboxes = vec![parent_creator.get_shared_inbox_url()?];
// Add the mention tag
let mut tags = Vec::new();
- // Get the inboxes for any mentions
- let mentions = scrape_text_for_mentions(&content)
+ // Get the user IDs for any mentions
+ let mentions = scrape_text_for_mentions(&comment.content)
.into_iter()
// Filter only the non-local ones
.filter(|m| !m.is_local())
.collect::>();
- let mut mention_inboxes: Vec = Vec::new();
for mention in &mentions {
// TODO should it be fetching it every time?
if let Ok(actor_id) = fetch_webfinger_url(mention, context.client()).await {
@@ -337,19 +334,18 @@ async fn collect_non_local_mentions_and_addresses(
addressed_ccs.push(actor_id.to_owned().to_string().parse()?);
let mention_user = get_or_fetch_and_upsert_user(&actor_id, context, &mut 0).await?;
- let shared_inbox = mention_user.get_shared_inbox_url()?;
+ inboxes.push(mention_user.get_shared_inbox_url()?);
- mention_inboxes.push(shared_inbox);
let mut mention_tag = Mention::new();
mention_tag.set_href(actor_id).set_name(mention.full_name());
tags.push(mention_tag);
}
}
- let inboxes = mention_inboxes.into_iter().unique().collect();
+ let inboxes = inboxes.into_iter().unique().collect();
Ok(MentionsAndAddresses {
- addressed_ccs,
+ ccs: addressed_ccs,
inboxes,
tags,
})
@@ -357,10 +353,7 @@ async fn collect_non_local_mentions_and_addresses(
/// Returns the apub ID of the user this comment is responding to. Meaning, in case this is a
/// top-level comment, the creator of the post, otherwise the creator of the parent comment.
-async fn get_comment_parent_creator_id(
- pool: &DbPool,
- comment: &Comment,
-) -> Result {
+async fn get_comment_parent_creator(pool: &DbPool, comment: &Comment) -> Result {
let parent_creator_id = if let Some(parent_comment_id) = comment.parent_id {
let parent_comment =
blocking(pool, move |conn| Comment::read(conn, parent_comment_id)).await??;
@@ -370,8 +363,7 @@ async fn get_comment_parent_creator_id(
let parent_post = blocking(pool, move |conn| Post::read(conn, parent_post_id)).await??;
parent_post.creator_id
};
- let parent_creator = blocking(pool, move |conn| User_::read(conn, parent_creator_id)).await??;
- Ok(parent_creator.actor_id()?)
+ Ok(blocking(pool, move |conn| User_::read(conn, parent_creator_id)).await??)
}
/// Turns a user id like `@name@example.com` into an apub ID, like `https://example.com/user/name`,
diff --git a/lemmy_apub/src/activity_queue.rs b/lemmy_apub/src/activity_queue.rs
index a8f2ab180..ff792c4da 100644
--- a/lemmy_apub/src/activity_queue.rs
+++ b/lemmy_apub/src/activity_queue.rs
@@ -219,6 +219,13 @@ where
return Ok(());
}
+ // Don't send anything to ourselves
+ let hostname = Settings::get().get_hostname_without_port()?;
+ let inboxes: Vec<&Url> = inboxes
+ .iter()
+ .filter(|i| i.domain().unwrap() != hostname)
+ .collect();
+
let activity = activity.into_any_base()?;
let serialised_activity = serde_json::to_string(&activity)?;
@@ -232,7 +239,7 @@ where
for i in inboxes {
let message = SendActivityTask {
activity: serialised_activity.to_owned(),
- inbox: i,
+ inbox: i.to_owned(),
actor_id: actor.actor_id()?,
private_key: actor.private_key().context(location_info!())?,
};
diff --git a/lemmy_apub/src/fetcher/search.rs b/lemmy_apub/src/fetcher/search.rs
index 13187b0a5..9f465f768 100644
--- a/lemmy_apub/src/fetcher/search.rs
+++ b/lemmy_apub/src/fetcher/search.rs
@@ -100,7 +100,9 @@ pub async fn search_by_apub_id(
delete_object_locally(&query_url, context).await?;
}
- build_response(fetch_response?, query_url, recursion_counter, context).await
+ // Necessary because we get a stack overflow using FetchError
+ let fet_res = fetch_response.map_err(|e| LemmyError::from(e.inner))?;
+ build_response(fet_res, query_url, recursion_counter, context).await
}
async fn build_response(
diff --git a/lemmy_apub/src/lib.rs b/lemmy_apub/src/lib.rs
index bbf1bbbc7..d5d682a7d 100644
--- a/lemmy_apub/src/lib.rs
+++ b/lemmy_apub/src/lib.rs
@@ -61,13 +61,7 @@ pub static APUB_JSON_CONTENT_TYPE: &str = "application/activity+json";
fn check_is_apub_id_valid(apub_id: &Url) -> Result<(), LemmyError> {
let settings = Settings::get();
let domain = apub_id.domain().context(location_info!())?.to_string();
- let local_instance = settings
- .hostname
- .split(':')
- .collect::>()
- .first()
- .context(location_info!())?
- .to_string();
+ let local_instance = settings.get_hostname_without_port()?;
if !settings.federation.enabled {
return if domain == local_instance {
diff --git a/lemmy_utils/src/settings.rs b/lemmy_utils/src/settings.rs
index 4ca87f282..4877d46b3 100644
--- a/lemmy_utils/src/settings.rs
+++ b/lemmy_utils/src/settings.rs
@@ -1,3 +1,5 @@
+use crate::location_info;
+use anyhow::Context;
use config::{Config, ConfigError, Environment, File};
use serde::Deserialize;
use std::{env, fs, io::Error, net::IpAddr, path::PathBuf, sync::RwLock};
@@ -178,6 +180,21 @@ impl Settings {
format!("{}://{}", self.get_protocol_string(), self.hostname)
}
+ /// When running the federation test setup in `api_tests/` or `docker/federation`, the `hostname`
+ /// variable will be like `lemmy-alpha:8541`. This method removes the port and returns
+ /// `lemmy-alpha` instead. It has no effect in production.
+ pub fn get_hostname_without_port(&self) -> Result {
+ Ok(
+ self
+ .hostname
+ .split(':')
+ .collect::>()
+ .first()
+ .context(location_info!())?
+ .to_string(),
+ )
+ }
+
pub fn save_config_file(data: &str) -> Result {
fs::write(CONFIG_FILE, data)?;
diff --git a/lemmy_utils/src/test.rs b/lemmy_utils/src/test.rs
index fdca384f5..aaa59bfa7 100644
--- a/lemmy_utils/src/test.rs
+++ b/lemmy_utils/src/test.rs
@@ -53,7 +53,7 @@ fn test_valid_post_title() {
#[test]
fn test_slur_filter() {
let test =
- "coons test dindu ladyboy tranny retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
+ "faggot test kike tranny cocksucker retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
let slur_free = "No slurs here";
assert_eq!(
remove_slurs(&test),
@@ -63,13 +63,13 @@ fn test_slur_filter() {
let has_slurs_vec = vec![
"Niggerz",
- "coons",
- "dindu",
- "ladyboy",
+ "cocksucker",
+ "faggot",
+ "kike",
"retardeds",
"tranny",
];
- let has_slurs_err_str = "No slurs - Niggerz, coons, dindu, ladyboy, retardeds, tranny";
+ let has_slurs_err_str = "No slurs - Niggerz, cocksucker, faggot, kike, retardeds, tranny";
assert_eq!(slur_check(test), Err(has_slurs_vec));
assert_eq!(slur_check(slur_free), Ok(()));
diff --git a/lemmy_utils/src/utils.rs b/lemmy_utils/src/utils.rs
index 2af010a84..b2a7c97e2 100644
--- a/lemmy_utils/src/utils.rs
+++ b/lemmy_utils/src/utils.rs
@@ -7,7 +7,7 @@ use regex::{Regex, RegexBuilder};
lazy_static! {
static ref EMAIL_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap();
-static ref SLUR_REGEX: Regex = RegexBuilder::new(r"(fag(g|got|tard)?|maricos?|cock\s?sucker(s|ing)?|\bn(i|1)g(\b|g?(a|er)?(s|z)?)\b|dindu(s?)|mudslime?s?|kikes?|mongoloids?|towel\s*heads?|\bspi(c|k)s?\b|\bchinks?|niglets?|beaners?|\bnips?\b|\bcoons?\b|jungle\s*bunn(y|ies?)|jigg?aboo?s?|\bpakis?\b|rag\s*heads?|gooks?|cunts?|bitch(es|ing|y)?|puss(y|ies?)|twats?|feminazis?|whor(es?|ing)|\bslut(s|t?y)?|\btr(a|@)nn?(y|ies?)|ladyboy(s?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap();
+static ref SLUR_REGEX: Regex = RegexBuilder::new(r"(fag(g|got|tard)?\b|cock\s?sucker(s|ing)?|\bn(i|1)g(\b|g?(a|er)?(s|z)?)\b|mudslime?s?|kikes?|\bspi(c|k)s?\b|\bchinks?|gooks?|bitch(es|ing|y)?|whor(es?|ing)|\btr(a|@)nn?(y|ies?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap();
static ref USERNAME_MATCHES_REGEX: Regex = Regex::new(r"/u/[a-zA-Z][0-9a-zA-Z_]*").unwrap();
// TODO keep this old one, it didn't work with port well tho
// static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P[\w.]+)@(?P[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)").unwrap();
diff --git a/migrations/2020-12-02-152437_create_site_aggregates/down.sql b/migrations/2020-12-02-152437_create_site_aggregates/down.sql
index 2a2aa97d5..914bdb8d8 100644
--- a/migrations/2020-12-02-152437_create_site_aggregates/down.sql
+++ b/migrations/2020-12-02-152437_create_site_aggregates/down.sql
@@ -1,13 +1,21 @@
-- Site aggregates
drop table site_aggregates;
drop trigger site_aggregates_site on site;
-drop trigger site_aggregates_user on user_;
-drop trigger site_aggregates_post on post;
-drop trigger site_aggregates_comment on comment;
-drop trigger site_aggregates_community on community;
+drop trigger site_aggregates_user_insert on user_;
+drop trigger site_aggregates_user_delete on user_;
+drop trigger site_aggregates_post_insert on post;
+drop trigger site_aggregates_post_delete on post;
+drop trigger site_aggregates_comment_insert on comment;
+drop trigger site_aggregates_comment_delete on comment;
+drop trigger site_aggregates_community_insert on community;
+drop trigger site_aggregates_community_delete on community;
drop function
site_aggregates_site,
- site_aggregates_user,
- site_aggregates_post,
- site_aggregates_comment,
- site_aggregates_community;
+ site_aggregates_user_insert,
+ site_aggregates_user_delete,
+ site_aggregates_post_insert,
+ site_aggregates_post_delete,
+ site_aggregates_comment_insert,
+ site_aggregates_comment_delete,
+ site_aggregates_community_insert,
+ site_aggregates_community_delete;
diff --git a/migrations/2020-12-02-152437_create_site_aggregates/up.sql b/migrations/2020-12-02-152437_create_site_aggregates/up.sql
index b10a5f419..679543d19 100644
--- a/migrations/2020-12-02-152437_create_site_aggregates/up.sql
+++ b/migrations/2020-12-02-152437_create_site_aggregates/up.sql
@@ -10,10 +10,10 @@ create table site_aggregates (
insert into site_aggregates (site_id, users, posts, comments, communities)
select id as site_id,
- ( select coalesce(count(*), 0) from user_) as users,
- ( select coalesce(count(*), 0) from post) as posts,
- ( select coalesce(count(*), 0) from comment) as comments,
- ( select coalesce(count(*), 0) from community) as communities
+ ( select coalesce(count(*), 0) from user_ where local = true) as users,
+ ( select coalesce(count(*), 0) from post where local = true) as posts,
+ ( select coalesce(count(*), 0) from comment where local = true) as comments,
+ ( select coalesce(count(*), 0) from community where local = true) as communities
from site;
-- initial site add
@@ -36,91 +36,134 @@ execute procedure site_aggregates_site();
-- Add site aggregate triggers
-- user
-create or replace function site_aggregates_user()
+create function site_aggregates_user_insert()
returns trigger language plpgsql
as $$
begin
- IF (TG_OP = 'INSERT') THEN
- update site_aggregates
- set users = users + 1;
- ELSIF (TG_OP = 'DELETE') THEN
- -- Join to site since the creator might not be there anymore
- update site_aggregates sa
- set users = users - 1
- from site s
- where sa.site_id = s.id;
- END IF;
+ update site_aggregates
+ set users = users + 1;
return null;
end $$;
-create trigger site_aggregates_user
-after insert or delete on user_
+create function site_aggregates_user_delete()
+returns trigger language plpgsql
+as $$
+begin
+ -- Join to site since the creator might not be there anymore
+ update site_aggregates sa
+ set users = users - 1
+ from site s
+ where sa.site_id = s.id;
+ return null;
+end $$;
+
+create trigger site_aggregates_user_insert
+after insert on user_
for each row
-execute procedure site_aggregates_user();
+when (NEW.local = true)
+execute procedure site_aggregates_user_insert();
+
+create trigger site_aggregates_user_delete
+after delete on user_
+for each row
+when (OLD.local = true)
+execute procedure site_aggregates_user_delete();
-- post
-create function site_aggregates_post()
+create function site_aggregates_post_insert()
returns trigger language plpgsql
as $$
begin
- IF (TG_OP = 'INSERT') THEN
- update site_aggregates
- set posts = posts + 1;
- ELSIF (TG_OP = 'DELETE') THEN
- update site_aggregates sa
- set posts = posts - 1
- from site s
- where sa.site_id = s.id;
- END IF;
+ update site_aggregates
+ set posts = posts + 1;
return null;
end $$;
-create trigger site_aggregates_post
-after insert or delete on post
+create function site_aggregates_post_delete()
+returns trigger language plpgsql
+as $$
+begin
+ update site_aggregates sa
+ set posts = posts - 1
+ from site s
+ where sa.site_id = s.id;
+ return null;
+end $$;
+
+create trigger site_aggregates_post_insert
+after insert on post
for each row
-execute procedure site_aggregates_post();
+when (NEW.local = true)
+execute procedure site_aggregates_post_insert();
+
+create trigger site_aggregates_post_delete
+after delete on post
+for each row
+when (OLD.local = true)
+execute procedure site_aggregates_post_delete();
-- comment
-create function site_aggregates_comment()
+create function site_aggregates_comment_insert()
returns trigger language plpgsql
as $$
begin
- IF (TG_OP = 'INSERT') THEN
- update site_aggregates
- set comments = comments + 1;
- ELSIF (TG_OP = 'DELETE') THEN
- update site_aggregates sa
- set comments = comments - 1
- from site s
- where sa.site_id = s.id;
- END IF;
+ update site_aggregates
+ set comments = comments + 1;
return null;
end $$;
-create trigger site_aggregates_comment
-after insert or delete on comment
+create function site_aggregates_comment_delete()
+returns trigger language plpgsql
+as $$
+begin
+ update site_aggregates sa
+ set comments = comments - 1
+ from site s
+ where sa.site_id = s.id;
+ return null;
+end $$;
+
+create trigger site_aggregates_comment_insert
+after insert on comment
for each row
-execute procedure site_aggregates_comment();
+when (NEW.local = true)
+execute procedure site_aggregates_comment_insert();
+
+create trigger site_aggregates_comment_delete
+after delete on comment
+for each row
+when (OLD.local = true)
+execute procedure site_aggregates_comment_delete();
-- community
-create function site_aggregates_community()
+create function site_aggregates_community_insert()
returns trigger language plpgsql
as $$
begin
- IF (TG_OP = 'INSERT') THEN
- update site_aggregates
- set communities = communities + 1;
- ELSIF (TG_OP = 'DELETE') THEN
- update site_aggregates sa
- set communities = communities - 1
- from site s
- where sa.site_id = s.id;
- END IF;
+ update site_aggregates
+ set communities = communities + 1;
return null;
end $$;
-create trigger site_aggregates_community
-after insert or delete on community
-for each row
-execute procedure site_aggregates_community();
+create function site_aggregates_community_delete()
+returns trigger language plpgsql
+as $$
+begin
+ update site_aggregates sa
+ set communities = communities - 1
+ from site s
+ where sa.site_id = s.id;
+ return null;
+end $$;
+create trigger site_aggregates_community_insert
+after insert on community
+for each row
+when (NEW.local = true)
+execute procedure site_aggregates_community_insert();
+
+create trigger site_aggregates_community_delete
+after delete on community
+for each row
+when (OLD.local = true)
+execute procedure site_aggregates_community_delete();
diff --git a/migrations/2020-12-10-152350_create_post_aggregates/up.sql b/migrations/2020-12-10-152350_create_post_aggregates/up.sql
index aaa611c4e..fcb3a9390 100644
--- a/migrations/2020-12-10-152350_create_post_aggregates/up.sql
+++ b/migrations/2020-12-10-152350_create_post_aggregates/up.sql
@@ -66,10 +66,13 @@ as $$
begin
IF (TG_OP = 'INSERT') THEN
update post_aggregates pa
- set comments = comments + 1,
- newest_comment_time = NEW.published
- where pa.post_id = NEW.post_id
+ set comments = comments + 1
+ where pa.post_id = NEW.post_id;
+
-- A 2 day necro-bump limit
+ update post_aggregates pa
+ set newest_comment_time = NEW.published
+ where pa.post_id = NEW.post_id
and published > ('now'::timestamp - '2 days'::interval);
ELSIF (TG_OP = 'DELETE') THEN
-- Join to post because that post may not exist anymore
diff --git a/scripts/compilation_benchmark.sh b/scripts/compilation_benchmark.sh
new file mode 100755
index 000000000..760037980
--- /dev/null
+++ b/scripts/compilation_benchmark.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -e
+
+times=3
+duration=0
+for ((i=0; i < times; i++)) ; do
+ echo "Starting iteration $i"
+ echo "cargo clean"
+ # to benchmark incremental compilation time, do a full build with the same compiler version first,
+ # and use the following clean command:
+ #cargo clean -p lemmy_utils
+ cargo clean
+ echo "cargo build"
+ start=$(date +%s.%N)
+ RUSTC_WRAPPER='' cargo +1.47.0 build -q
+ end=$(date +%s.%N)
+ echo "Finished iteration $i after $(bc <<< "scale=0; $end - $start") seconds"
+ duration=$(bc <<< "$duration + $end - $start")
+done
+
+average=$(bc <<< "scale=0; $duration / $times")
+
+echo "Average compilation time over $times runs is $average seconds"
\ No newline at end of file
diff --git a/db-init.sh b/scripts/db-init.sh
similarity index 100%
rename from db-init.sh
rename to scripts/db-init.sh
diff --git a/install.sh b/scripts/install.sh
similarity index 100%
rename from install.sh
rename to scripts/install.sh
diff --git a/query_testing/apache_bench_report.sh b/scripts/query_testing/apache_bench_report.sh
similarity index 100%
rename from query_testing/apache_bench_report.sh
rename to scripts/query_testing/apache_bench_report.sh
diff --git a/query_testing/api_benchmark.sh b/scripts/query_testing/api_benchmark.sh
similarity index 100%
rename from query_testing/api_benchmark.sh
rename to scripts/query_testing/api_benchmark.sh
diff --git a/query_testing/views_old/generate_reports.sh b/scripts/query_testing/views_old/generate_reports.sh
similarity index 100%
rename from query_testing/views_old/generate_reports.sh
rename to scripts/query_testing/views_old/generate_reports.sh
diff --git a/query_testing/views_old/timings-2021-01-05_21-06-37.out b/scripts/query_testing/views_old/timings-2021-01-05_21-06-37.out
similarity index 100%
rename from query_testing/views_old/timings-2021-01-05_21-06-37.out
rename to scripts/query_testing/views_old/timings-2021-01-05_21-06-37.out
diff --git a/query_testing/views_to_diesel_migration/generate_reports.sh b/scripts/query_testing/views_to_diesel_migration/generate_reports.sh
similarity index 100%
rename from query_testing/views_to_diesel_migration/generate_reports.sh
rename to scripts/query_testing/views_to_diesel_migration/generate_reports.sh
diff --git a/query_testing/views_to_diesel_migration/timings-2021-01-05_21-32-54.out b/scripts/query_testing/views_to_diesel_migration/timings-2021-01-05_21-32-54.out
similarity index 100%
rename from query_testing/views_to_diesel_migration/timings-2021-01-05_21-32-54.out
rename to scripts/query_testing/views_to_diesel_migration/timings-2021-01-05_21-32-54.out
diff --git a/test.sh b/scripts/test.sh
similarity index 100%
rename from test.sh
rename to scripts/test.sh
diff --git a/src/routes/nodeinfo.rs b/src/routes/nodeinfo.rs
index e6c5e6c42..df0064ecc 100644
--- a/src/routes/nodeinfo.rs
+++ b/src/routes/nodeinfo.rs
@@ -46,12 +46,13 @@ async fn node_info(context: web::Data) -> Result,
pub usage: NodeInfoUsage,
+ pub open_registrations: bool,
}
#[derive(Serialize, Deserialize, Debug)]
@@ -88,7 +91,6 @@ struct NodeInfoUsage {
pub users: NodeInfoUsers,
pub local_posts: i64,
pub local_comments: i64,
- pub open_registrations: bool,
}
#[derive(Serialize, Deserialize, Debug)]