Adding unique constraint for activity ap_id. Fixes #1878 (#1935)

* Adding unique constraint for activity ap_id. Fixes #1878

* Removing is_activity_already_known
asonix/tracing
Dessalines 2021-11-22 13:57:03 -05:00 committed by GitHub
parent 76c4378011
commit 3d08e6c1fc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 32 additions and 27 deletions

View File

@ -1,7 +1,7 @@
use crate::{
activities::{generate_activity_id, send_lemmy_activity, verify_activity, verify_is_public},
activity_lists::AnnouncableActivities,
http::{is_activity_already_known, ActivityCommonFields},
http::ActivityCommonFields,
insert_activity,
objects::community::ApubCommunity,
protocol::activities::{community::announce::AnnounceActivity, CreateOrUpdateType},
@ -109,9 +109,6 @@ impl ActivityHandler for AnnounceActivity {
let object_value = serde_json::to_value(&self.object)?;
let object_data: ActivityCommonFields = serde_json::from_value(object_value.to_owned())?;
if is_activity_already_known(context.pool(), &object_data.id).await? {
return Ok(());
}
insert_activity(&object_data.id, object_value, false, true, context.pool()).await?;
}
}

View File

@ -24,7 +24,7 @@ use lemmy_apub_lib::{
traits::{ActivityHandler, ActorType},
APUB_JSON_CONTENT_TYPE,
};
use lemmy_db_schema::{source::activity::Activity, DbPool};
use lemmy_db_schema::source::activity::Activity;
use lemmy_utils::{location_info, LemmyError};
use lemmy_websocket::LemmyContext;
use log::info;
@ -97,10 +97,6 @@ where
.await?;
verify_signature(&request, &actor.public_key())?;
// Do nothing if we received the same activity before
if is_activity_already_known(context.pool(), &activity_data.id).await? {
return Ok(HttpResponse::Ok().finish());
}
info!("Verifying activity {}", activity_data.id.to_string());
activity
.verify(&Data::new(context.clone()), request_counter)
@ -178,21 +174,6 @@ pub(crate) async fn get_activity(
}
}
pub(crate) async fn is_activity_already_known(
pool: &DbPool,
activity_id: &Url,
) -> Result<bool, LemmyError> {
let activity_id = activity_id.to_owned().into();
let existing = blocking(pool, move |conn| {
Activity::read_from_apub_id(conn, &activity_id)
})
.await?;
match existing {
Ok(_) => Ok(true),
Err(_) => Ok(false),
}
}
fn assert_activity_not_local(id: &Url, hostname: &str) -> Result<(), LemmyError> {
let activity_domain = id.domain().context(location_info!())?;

View File

@ -127,7 +127,7 @@ mod tests {
let inserted_activity = Activity::create(&conn, &activity_form).unwrap();
let expected_activity = Activity {
ap_id: Some(ap_id.clone()),
ap_id: ap_id.clone(),
id: inserted_activity.id,
data: test_json,
local: true,

View File

@ -5,7 +5,7 @@ table! {
local -> Bool,
published -> Timestamp,
updated -> Nullable<Timestamp>,
ap_id -> Nullable<Text>,
ap_id -> Text,
sensitive -> Nullable<Bool>,
}
}

View File

@ -10,7 +10,7 @@ pub struct Activity {
pub local: bool,
pub published: chrono::NaiveDateTime,
pub updated: Option<chrono::NaiveDateTime>,
pub ap_id: Option<DbUrl>,
pub ap_id: DbUrl,
pub sensitive: Option<bool>,
}

View File

@ -0,0 +1,5 @@
alter table activity alter column ap_id drop not null;
create unique index idx_activity_unique_apid on activity ((data ->> 'id'::text));
drop index idx_activity_ap_id;

View File

@ -0,0 +1,22 @@
-- Delete the empty ap_ids
delete from activity where ap_id is null;
-- Make it required
alter table activity alter column ap_id set not null;
-- Delete dupes, keeping the first one
delete
from activity
where id not in (
select min(id)
from activity
group by ap_id
);
-- The index
create unique index idx_activity_ap_id on activity(ap_id);
-- Drop the old index
drop index idx_activity_unique_apid;