forked from rDrama/rDrama
1
0
Fork 0

Merge pull request #74 from Aevann1/regex-censor

Regex censor
master
yoMamasDic 2021-10-17 12:42:48 +02:00 committed by GitHub
commit 6736315b05
8 changed files with 1351 additions and 1321 deletions

17
.gitignore vendored 100755 → 100644
View File

@ -1,7 +1,10 @@
image.* image.*
chart.png chart.png
video.mp4 video.mp4
cache/ cache/
__pycache__/ __pycache__/
disablesignups disablesignups
*rules.html *rules.html
.idea/
**/.pytest_cache/
venv/

132
docker-compose.yml 100755 → 100644
View File

@ -1,66 +1,66 @@
version: '2.3' version: '2.3'
services: services:
files: files:
build: build:
context: . context: .
volumes: volumes:
- "./:/service" - "./:/service"
environment: environment:
- DATABASE_URL=postgresql://postgres@127.0.0.1:5432/postgres - DATABASE_URL=postgresql://postgres@127.0.0.1:5432/postgres
- MASTER_KEY=${MASTER_KEY:-KTVciAUQFpFh2WdJ/oiHJlxl6FvzRZp8kYzAAv3l2OA=} - MASTER_KEY=${MASTER_KEY:-KTVciAUQFpFh2WdJ/oiHJlxl6FvzRZp8kYzAAv3l2OA=}
- DOMAIN=localhost - DOMAIN=localhost
- SITE_NAME=Drama - SITE_NAME=Drama
- GIPHY_KEY=3435tdfsdudebussylmaoxxt43 - GIPHY_KEY=3435tdfsdudebussylmaoxxt43
- FORCE_HTTPS=0 - FORCE_HTTPS=0
- DISCORD_SERVER_ID=3435tdfsdudebussylmaoxxt43 - DISCORD_SERVER_ID=3435tdfsdudebussylmaoxxt43
- DISCORD_CLIENT_ID=3435tdfsdudebussylmaoxxt43 - DISCORD_CLIENT_ID=3435tdfsdudebussylmaoxxt43
- DISCORD_CLIENT_SECRET=3435tdfsdudebussylmaoxxt43 - DISCORD_CLIENT_SECRET=3435tdfsdudebussylmaoxxt43
- DISCORD_BOT_TOKEN=3435tdfsdudebussylmaoxxt43 - DISCORD_BOT_TOKEN=3435tdfsdudebussylmaoxxt43
#- HCAPTCHA_SITEKEY=3435tdfsdudebussylmaoxxt43 #- HCAPTCHA_SITEKEY=3435tdfsdudebussylmaoxxt43
- HCAPTCHA_SECRET=3435tdfsdudebussylmaoxxt43 - HCAPTCHA_SECRET=3435tdfsdudebussylmaoxxt43
- YOUTUBE_KEY=3435tdfsdudebussylmaoxxt43 - YOUTUBE_KEY=3435tdfsdudebussylmaoxxt43
- PUSHER_KEY=3435tdfsdudebussylmaoxxt43 - PUSHER_KEY=3435tdfsdudebussylmaoxxt43
- CATBOX_KEY=3435tdfsdudebussylmaoxxt43 - CATBOX_KEY=3435tdfsdudebussylmaoxxt43
- SPAM_SIMILARITY_THRESHOLD=0.5 - SPAM_SIMILARITY_THRESHOLD=0.5
- SPAM_SIMILAR_COUNT_THRESHOLD=5 - SPAM_SIMILAR_COUNT_THRESHOLD=5
- SPAM_URL_SIMILARITY_THRESHOLD=0.1 - SPAM_URL_SIMILARITY_THRESHOLD=0.1
- COMMENT_SPAM_SIMILAR_THRESHOLD=0.5 - COMMENT_SPAM_SIMILAR_THRESHOLD=0.5
- COMMENT_SPAM_COUNT_THRESHOLD=5 - COMMENT_SPAM_COUNT_THRESHOLD=5
- READ_ONLY=0 - READ_ONLY=0
- BOT_DISABLE=0 - BOT_DISABLE=0
- COINS_NAME=Dramacoins - COINS_NAME=Dramacoins
- DEFAULT_TIME_FILTER=all - DEFAULT_TIME_FILTER=all
- DEFAULT_THEME=midnight - DEFAULT_THEME=midnight
- DEFAULT_COLOR=ff66ac #YOU HAVE TO PICK ONE OF THOSE COLORS OR SHIT WILL BREAK: ff66ac, 805ad5, 62ca56, 38a169, 80ffff, 2a96f3, eb4963, ff0000, f39731, 30409f, 3e98a7, e4432d, 7b9ae4, ec72de, 7f8fa6, f8db58 - DEFAULT_COLOR=ff66ac #YOU HAVE TO PICK ONE OF THOSE COLORS OR SHIT WILL BREAK: ff66ac, 805ad5, 62ca56, 38a169, 80ffff, 2a96f3, eb4963, ff0000, f39731, 30409f, 3e98a7, e4432d, 7b9ae4, ec72de, 7f8fa6, f8db58
- SLOGAN=Dude bussy lmao - SLOGAN=Dude bussy lmao
- GUMROAD_TOKEN=3435tdfsdudebussylmaoxxt43 - GUMROAD_TOKEN=3435tdfsdudebussylmaoxxt43
- GUMROAD_LINK=https://marsey1.gumroad.com/l/tfcvri - GUMROAD_LINK=https://marsey1.gumroad.com/l/tfcvri
- CARD_VIEW=1 - CARD_VIEW=1
- DISABLE_DOWNVOTES=0 - DISABLE_DOWNVOTES=0
- DUES=0 - DUES=0
- MAIL_USERNAME=blahblahblah@gmail.com - MAIL_USERNAME=blahblahblah@gmail.com
- MAIL_PASSWORD=3435tdfsdudebussylmaoxxt43 - MAIL_PASSWORD=3435tdfsdudebussylmaoxxt43
links: links:
- "redis" - "redis"
- "postgres" - "postgres"
ports: ports:
- "80:80" - "80:80"
depends_on: depends_on:
- redis - redis
- postgres - postgres
redis: redis:
image: redis image: redis
ports: ports:
- "6379:6379" - "6379:6379"
postgres: postgres:
image: postgres:12.3 image: postgres:12.3
volumes: volumes:
- "./schema.sql:/docker-entrypoint-initdb.d/00-schema.sql" - "./schema.sql:/docker-entrypoint-initdb.d/00-schema.sql"
- "./seed-db.sql:/docker-entrypoint-initdb.d/01-schema.sql" - "./seed-db.sql:/docker-entrypoint-initdb.d/01-schema.sql"
environment: environment:
- POSTGRES_HOST_AUTH_METHOD=trust - POSTGRES_HOST_AUTH_METHOD=trust
#ports: #ports:
#- "5432:5432" #- "5432:5432"

788
files/classes/comment.py 100755 → 100644
View File

@ -1,394 +1,394 @@
import re from os import environ
from urllib.parse import urlencode, urlparse, parse_qs import re
from flask import * import time
from sqlalchemy import * from urllib.parse import urlencode, urlparse, parse_qs
from sqlalchemy.orm import relationship, deferred, lazyload
from files.classes.votes import CommentVote from flask import *
from files.helpers.lazy import lazy from sqlalchemy import *
from files.helpers.const import SLURS from sqlalchemy.orm import relationship, deferred, lazyload
from files.__main__ import Base
from .flags import CommentFlag from files.__main__ import Base
from os import environ from files.classes.votes import CommentVote
import time from files.helpers.const import AUTOPOLLER_ACCOUNT
from files.helpers.const import AUTOPOLLER_ACCOUNT from files.helpers.lazy import lazy
from .flags import CommentFlag
site = environ.get("DOMAIN").strip() from ..helpers.word_censor import censor_slurs
site = environ.get("DOMAIN").strip()
class Comment(Base):
__tablename__ = "comments" class Comment(Base):
id = Column(Integer, primary_key=True) __tablename__ = "comments"
author_id = Column(Integer, ForeignKey("users.id"))
parent_submission = Column(Integer, ForeignKey("submissions.id")) id = Column(Integer, primary_key=True)
created_utc = Column(Integer, default=0) author_id = Column(Integer, ForeignKey("users.id"))
edited_utc = Column(Integer, default=0) parent_submission = Column(Integer, ForeignKey("submissions.id"))
is_banned = Column(Boolean, default=False) created_utc = Column(Integer, default=0)
removed_by = Column(Integer) edited_utc = Column(Integer, default=0)
bannedfor = Column(Boolean) is_banned = Column(Boolean, default=False)
distinguish_level = Column(Integer, default=0) removed_by = Column(Integer)
deleted_utc = Column(Integer, default=0) bannedfor = Column(Boolean)
is_approved = Column(Integer, default=0) distinguish_level = Column(Integer, default=0)
level = Column(Integer, default=0) deleted_utc = Column(Integer, default=0)
parent_comment_id = Column(Integer, ForeignKey("comments.id")) is_approved = Column(Integer, default=0)
over_18 = Column(Boolean, default=False) level = Column(Integer, default=0)
is_bot = Column(Boolean, default=False) parent_comment_id = Column(Integer, ForeignKey("comments.id"))
is_pinned = Column(String) over_18 = Column(Boolean, default=False)
sentto=Column(Integer, ForeignKey("users.id")) is_bot = Column(Boolean, default=False)
notifiedto=Column(Integer) is_pinned = Column(String)
app_id = Column(Integer, ForeignKey("oauth_apps.id")) sentto=Column(Integer, ForeignKey("users.id"))
oauth_app = relationship("OauthApp", viewonly=True) notifiedto=Column(Integer)
upvotes = Column(Integer, default=0) app_id = Column(Integer, ForeignKey("oauth_apps.id"))
downvotes = Column(Integer, default=0) oauth_app = relationship("OauthApp", viewonly=True)
body = deferred(Column(String)) upvotes = Column(Integer, default=0)
body_html = deferred(Column(String)) downvotes = Column(Integer, default=0)
ban_reason = Column(String) body = deferred(Column(String))
body_html = deferred(Column(String))
post = relationship("Submission", viewonly=True) ban_reason = Column(String)
flags = relationship("CommentFlag", lazy="dynamic", viewonly=True)
author = relationship("User", primaryjoin="User.id==Comment.author_id") post = relationship("Submission", viewonly=True)
senttouser = relationship("User", primaryjoin="User.id==Comment.sentto", viewonly=True) flags = relationship("CommentFlag", lazy="dynamic", viewonly=True)
parent_comment = relationship("Comment", remote_side=[id], viewonly=True) author = relationship("User", primaryjoin="User.id==Comment.author_id")
child_comments = relationship("Comment", remote_side=[parent_comment_id], viewonly=True) senttouser = relationship("User", primaryjoin="User.id==Comment.sentto", viewonly=True)
awards = relationship("AwardRelationship", viewonly=True) parent_comment = relationship("Comment", remote_side=[id], viewonly=True)
child_comments = relationship("Comment", remote_side=[parent_comment_id], viewonly=True)
def __init__(self, *args, **kwargs): awards = relationship("AwardRelationship", viewonly=True)
if "created_utc" not in kwargs: def __init__(self, *args, **kwargs):
kwargs["created_utc"] = int(time.time())
if "created_utc" not in kwargs:
super().__init__(*args, **kwargs) kwargs["created_utc"] = int(time.time())
def __repr__(self): super().__init__(*args, **kwargs)
return f"<Comment(id={self.id})>" def __repr__(self):
def poll_voted(self, v): return f"<Comment(id={self.id})>"
if v:
vote = g.db.query(CommentVote).options(lazyload('*')).filter_by(user_id=v.id, comment_id=self.id).first() def poll_voted(self, v):
if vote: return vote.vote_type if v:
else: return None vote = g.db.query(CommentVote).options(lazyload('*')).filter_by(user_id=v.id, comment_id=self.id).first()
else: return None if vote: return vote.vote_type
else: return None
@property else: return None
@lazy
def options(self): @property
return [x for x in self.child_comments if x.author_id == AUTOPOLLER_ACCOUNT] @lazy
def options(self):
@property return [x for x in self.child_comments if x.author_id == AUTOPOLLER_ACCOUNT]
@lazy
def created_datetime(self): @property
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc))) @lazy
def created_datetime(self):
@property return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@lazy
def age_string(self): @property
@lazy
age = int(time.time()) - self.created_utc def age_string(self):
if age < 60: age = int(time.time()) - self.created_utc
return "just now"
elif age < 3600: if age < 60:
minutes = int(age / 60) return "just now"
return f"{minutes}m ago" elif age < 3600:
elif age < 86400: minutes = int(age / 60)
hours = int(age / 3600) return f"{minutes}m ago"
return f"{hours}hr ago" elif age < 86400:
elif age < 2678400: hours = int(age / 3600)
days = int(age / 86400) return f"{hours}hr ago"
return f"{days}d ago" elif age < 2678400:
days = int(age / 86400)
now = time.gmtime() return f"{days}d ago"
ctd = time.gmtime(self.created_utc)
now = time.gmtime()
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year) ctd = time.gmtime(self.created_utc)
if now.tm_mday < ctd.tm_mday:
months -= 1 months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if now.tm_mday < ctd.tm_mday:
if months < 12: months -= 1
return f"{months}mo ago"
else: if months < 12:
years = int(months / 12) return f"{months}mo ago"
return f"{years}yr ago" else:
years = int(months / 12)
@property return f"{years}yr ago"
@lazy
def edited_string(self): @property
@lazy
if not self.edited_utc: def edited_string(self):
return "never"
if not self.edited_utc:
age = int(time.time()) - self.edited_utc return "never"
if age < 60: age = int(time.time()) - self.edited_utc
return "just now"
elif age < 3600: if age < 60:
minutes = int(age / 60) return "just now"
return f"{minutes}m ago" elif age < 3600:
elif age < 86400: minutes = int(age / 60)
hours = int(age / 3600) return f"{minutes}m ago"
return f"{hours}hr ago" elif age < 86400:
elif age < 2678400: hours = int(age / 3600)
days = int(age / 86400) return f"{hours}hr ago"
return f"{days}d ago" elif age < 2678400:
days = int(age / 86400)
now = time.gmtime() return f"{days}d ago"
ctd = time.gmtime(self.edited_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year) now = time.gmtime()
ctd = time.gmtime(self.edited_utc)
if months < 12: months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
return f"{months}mo ago"
else: if months < 12:
years = now.tm_year - ctd.tm_year return f"{months}mo ago"
return f"{years}yr ago" else:
years = now.tm_year - ctd.tm_year
@property return f"{years}yr ago"
@lazy
def score(self): @property
return self.upvotes - self.downvotes @lazy
def score(self):
@property return self.upvotes - self.downvotes
@lazy
def fullname(self): @property
return f"t3_{self.id}" @lazy
def fullname(self):
@property return f"t3_{self.id}"
@lazy
def parent(self): @property
@lazy
if not self.parent_submission: return None def parent(self):
if self.level == 1: return self.post if not self.parent_submission: return None
else: return g.db.query(Comment).get(self.parent_comment_id) if self.level == 1: return self.post
@property else: return g.db.query(Comment).get(self.parent_comment_id)
@lazy
def parent_fullname(self): @property
if self.parent_comment_id: return f"t3_{self.parent_comment_id}" @lazy
elif self.parent_submission: return f"t2_{self.parent_submission}" def parent_fullname(self):
if self.parent_comment_id: return f"t3_{self.parent_comment_id}"
@property elif self.parent_submission: return f"t2_{self.parent_submission}"
def replies(self):
r = self.__dict__.get("replies", None) @property
if r: r = [x for x in r if not x.author.shadowbanned] def replies(self):
if not r and r != []: r = sorted([x for x in self.child_comments if not x.author.shadowbanned and x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True) r = self.__dict__.get("replies", None)
return r if r: r = [x for x in r if not x.author.shadowbanned]
if not r and r != []: r = sorted([x for x in self.child_comments if not x.author.shadowbanned and x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True)
@replies.setter return r
def replies(self, value):
self.__dict__["replies"] = value @replies.setter
def replies(self, value):
@property self.__dict__["replies"] = value
def replies2(self):
return self.__dict__.get("replies2", []) @property
def replies2(self):
@replies2.setter return self.__dict__.get("replies2", [])
def replies2(self, value):
self.__dict__["replies2"] = value @replies2.setter
def replies2(self, value):
@property self.__dict__["replies2"] = value
def replies3(self):
r = self.__dict__.get("replies", None) @property
if not r and r != []: r = sorted([x for x in self.child_comments if x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True) def replies3(self):
return r r = self.__dict__.get("replies", None)
if not r and r != []: r = sorted([x for x in self.child_comments if x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True)
@property return r
@lazy
def shortlink(self): @property
return f"https://{site}/comment/{self.id}" @lazy
def shortlink(self):
@property return f"https://{site}/comment/{self.id}"
@lazy
def permalink(self): @property
if self.post and self.post.club: return f"/comment/{self.id}/" @lazy
def permalink(self):
if self.post: return f"{self.post.permalink}/{self.id}/" if self.post and self.post.club: return f"/comment/{self.id}/"
else: return f"/comment/{self.id}/"
if self.post: return f"{self.post.permalink}/{self.id}/"
@property else: return f"/comment/{self.id}/"
@lazy
def json_raw(self): @property
flags = {} @lazy
for f in self.flags: flags[f.user.username] = f.reason def json_raw(self):
flags = {}
data= { for f in self.flags: flags[f.user.username] = f.reason
'id': self.id,
'level': self.level, data= {
'author_name': self.author.username, 'id': self.id,
'body': self.body, 'level': self.level,
'body_html': self.body_html, 'author_name': self.author.username,
'is_bot': self.is_bot, 'body': self.body,
'created_utc': self.created_utc, 'body_html': self.body_html,
'edited_utc': self.edited_utc or 0, 'is_bot': self.is_bot,
'is_banned': bool(self.is_banned), 'created_utc': self.created_utc,
'deleted_utc': self.deleted_utc, 'edited_utc': self.edited_utc or 0,
'is_nsfw': self.over_18, 'is_banned': bool(self.is_banned),
'permalink': self.permalink, 'deleted_utc': self.deleted_utc,
'is_pinned': self.is_pinned, 'is_nsfw': self.over_18,
'distinguish_level': self.distinguish_level, 'permalink': self.permalink,
'post_id': self.post.id, 'is_pinned': self.is_pinned,
'score': self.score, 'distinguish_level': self.distinguish_level,
'upvotes': self.upvotes, 'post_id': self.post.id,
'downvotes': self.downvotes, 'score': self.score,
'is_bot': self.is_bot, 'upvotes': self.upvotes,
'flags': flags, 'downvotes': self.downvotes,
} 'is_bot': self.is_bot,
'flags': flags,
if self.ban_reason: }
data["ban_reason"]=self.ban_reason
if self.ban_reason:
return data data["ban_reason"]=self.ban_reason
def award_count(self, kind) -> int: return data
return len([x for x in self.awards if x.kind == kind])
def award_count(self, kind) -> int:
@property return len([x for x in self.awards if x.kind == kind])
@lazy
def json_core(self): @property
if self.is_banned: @lazy
data= {'is_banned': True, def json_core(self):
'ban_reason': self.ban_reason, if self.is_banned:
'id': self.id, data= {'is_banned': True,
'post': self.post.id, 'ban_reason': self.ban_reason,
'level': self.level, 'id': self.id,
'parent': self.parent_fullname 'post': self.post.id,
} 'level': self.level,
elif self.deleted_utc > 0: 'parent': self.parent_fullname
data= {'deleted_utc': self.deleted_utc, }
'id': self.id, elif self.deleted_utc > 0:
'post': self.post.id, data= {'deleted_utc': self.deleted_utc,
'level': self.level, 'id': self.id,
'parent': self.parent_fullname 'post': self.post.id,
} 'level': self.level,
else: 'parent': self.parent_fullname
}
data=self.json_raw else:
if self.level>=2: data['parent_comment_id']= self.parent_comment_id, data=self.json_raw
if "replies" in self.__dict__: if self.level>=2: data['parent_comment_id']= self.parent_comment_id,
data['replies']=[x.json_core for x in self.replies]
if "replies" in self.__dict__:
return data data['replies']=[x.json_core for x in self.replies]
@property return data
@lazy
def json(self): @property
@lazy
data=self.json_core def json(self):
if self.deleted_utc > 0 or self.is_banned: data=self.json_core
return data
if self.deleted_utc > 0 or self.is_banned:
data["author"]=self.author.json_core return data
data["post"]=self.post.json_core
data["author"]=self.author.json_core
if self.level >= 2: data["post"]=self.post.json_core
data["parent"]=self.parent.json_core
if self.level >= 2:
data["parent"]=self.parent.json_core
return data
def realbody(self, v): return data
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
def realbody(self, v):
body = self.body_html if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
if not body: return "" body = self.body_html
if not v or v.slurreplacer: if not body: return ""
for s, r in SLURS.items(): body = body.replace(s, r)
body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.controversial:
for i in re.finditer('(/comments/.*?)"', body): if v and v.controversial:
url = i.group(1) for i in re.finditer('(/comments/.*?)"', body):
p = urlparse(url).query url = i.group(1)
p = parse_qs(p) p = urlparse(url).query
p = parse_qs(p)
if 'sort' not in p: p['sort'] = ['controversial']
if 'sort' not in p: p['sort'] = ['controversial']
url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}") url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}")
return body
return body
def plainbody(self, v):
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>" def plainbody(self, v):
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
body = self.body
body = self.body
if not body: return ""
if not body: return ""
if not v or v.slurreplacer:
for s, r in SLURS.items(): body = body.replace(s, r) body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com") if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net") if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.controversial: if v and v.controversial:
for i in re.finditer('(/comments/.*?)"', body): for i in re.finditer('(/comments/.*?)"', body):
url = i.group(1) url = i.group(1)
p = urlparse(url).query p = urlparse(url).query
p = parse_qs(p) p = parse_qs(p)
if 'sort' not in p: p['sort'] = ['controversial'] if 'sort' not in p: p['sort'] = ['controversial']
url_noquery = url.split('?')[0] url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}") body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}")
return body return body
@lazy @lazy
def collapse_for_user(self, v): def collapse_for_user(self, v):
if self.over_18 and not (v and v.over_18) and not self.post.over_18: return True if self.over_18 and not (v and v.over_18) and not self.post.over_18: return True
if not v: return False if not v: return False
if v.filter_words and any([x in self.body for x in v.filter_words]): return True if v.filter_words and any([x in self.body for x in v.filter_words]): return True
if self.is_banned or (self.author and self.author.shadowbanned): return True if self.is_banned or (self.author and self.author.shadowbanned): return True
return False return False
@property @property
@lazy @lazy
def is_op(self): return self.author_id==self.post.author_id def is_op(self): return self.author_id==self.post.author_id
@property @property
@lazy @lazy
def active_flags(self): return self.flags.count() def active_flags(self): return self.flags.count()
@property @property
@lazy @lazy
def ordered_flags(self): return self.flags.order_by(CommentFlag.id).all() def ordered_flags(self): return self.flags.order_by(CommentFlag.id).all()
class Notification(Base): class Notification(Base):
__tablename__ = "notifications" __tablename__ = "notifications"
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("users.id")) user_id = Column(Integer, ForeignKey("users.id"))
comment_id = Column(Integer, ForeignKey("comments.id")) comment_id = Column(Integer, ForeignKey("comments.id"))
read = Column(Boolean, default=False) read = Column(Boolean, default=False)
followsender = Column(Integer) followsender = Column(Integer)
unfollowsender = Column(Integer) unfollowsender = Column(Integer)
removefollowsender = Column(Integer) removefollowsender = Column(Integer)
blocksender = Column(Integer) blocksender = Column(Integer)
unblocksender = Column(Integer) unblocksender = Column(Integer)
comment = relationship("Comment", viewonly=True) comment = relationship("Comment", viewonly=True)
user = relationship("User", viewonly=True) user = relationship("User", viewonly=True)
def __repr__(self): def __repr__(self):
return f"<Notification(id={self.id})>" return f"<Notification(id={self.id})>"

811
files/classes/submission.py 100755 → 100644
View File

@ -1,407 +1,406 @@
from flask import render_template, g from os import environ
from sqlalchemy import * import random
from sqlalchemy.orm import relationship, deferred import re
import re, random import time
from urllib.parse import urlparse from urllib.parse import urlparse
from files.helpers.lazy import lazy
from files.helpers.const import SLURS, AUTOPOLLER_ACCOUNT from flask import render_template
from files.__main__ import Base from sqlalchemy import *
from .flags import Flag from sqlalchemy.orm import relationship, deferred
from os import environ
import time from files.__main__ import Base
from files.helpers.const import SLURS, AUTOPOLLER_ACCOUNT
site = environ.get("DOMAIN").strip() from files.helpers.lazy import lazy
site_name = environ.get("SITE_NAME").strip() from .flags import Flag
from ..helpers.word_censor import censor_slurs
class Submission(Base): site = environ.get("DOMAIN").strip()
site_name = environ.get("SITE_NAME").strip()
__tablename__ = "submissions"
id = Column(BigInteger, primary_key=True) class Submission(Base):
author_id = Column(BigInteger, ForeignKey("users.id")) __tablename__ = "submissions"
edited_utc = Column(BigInteger, default=0)
created_utc = Column(BigInteger, default=0) id = Column(BigInteger, primary_key=True)
thumburl = Column(String) author_id = Column(BigInteger, ForeignKey("users.id"))
is_banned = Column(Boolean, default=False) edited_utc = Column(BigInteger, default=0)
removed_by = Column(Integer) created_utc = Column(BigInteger, default=0)
bannedfor = Column(Boolean) thumburl = Column(String)
views = Column(Integer, default=0) is_banned = Column(Boolean, default=False)
deleted_utc = Column(Integer, default=0) removed_by = Column(Integer)
distinguish_level = Column(Integer, default=0) bannedfor = Column(Boolean)
created_str = Column(String) views = Column(Integer, default=0)
stickied = Column(String) deleted_utc = Column(Integer, default=0)
is_pinned = Column(Boolean, default=False) distinguish_level = Column(Integer, default=0)
private = Column(Boolean, default=False) created_str = Column(String)
club = Column(Boolean, default=False) stickied = Column(String)
comment_count = Column(Integer, default=0) is_pinned = Column(Boolean, default=False)
is_approved = Column(Integer, ForeignKey("users.id"), default=0) private = Column(Boolean, default=False)
over_18 = Column(Boolean, default=False) club = Column(Boolean, default=False)
is_bot = Column(Boolean, default=False) comment_count = Column(Integer, default=0)
upvotes = Column(Integer, default=1) is_approved = Column(Integer, ForeignKey("users.id"), default=0)
downvotes = Column(Integer, default=0) over_18 = Column(Boolean, default=False)
app_id=Column(Integer, ForeignKey("oauth_apps.id")) is_bot = Column(Boolean, default=False)
title = Column(String) upvotes = Column(Integer, default=1)
title_html = Column(String) downvotes = Column(Integer, default=0)
url = Column(String) app_id=Column(Integer, ForeignKey("oauth_apps.id"))
body = deferred(Column(String)) title = Column(String)
body_html = deferred(Column(String)) title_html = Column(String)
ban_reason = Column(String) url = Column(String)
embed_url = Column(String) body = deferred(Column(String))
body_html = deferred(Column(String))
comments = relationship("Comment", lazy="dynamic", primaryjoin="Comment.parent_submission==Submission.id", viewonly=True) ban_reason = Column(String)
flags = relationship("Flag", lazy="dynamic", viewonly=True) embed_url = Column(String)
author = relationship("User", primaryjoin="Submission.author_id==User.id")
oauth_app = relationship("OauthApp", viewonly=True) comments = relationship("Comment", lazy="dynamic", primaryjoin="Comment.parent_submission==Submission.id", viewonly=True)
approved_by = relationship("User", uselist=False, primaryjoin="Submission.is_approved==User.id", viewonly=True) flags = relationship("Flag", lazy="dynamic", viewonly=True)
awards = relationship("AwardRelationship", viewonly=True) author = relationship("User", primaryjoin="Submission.author_id==User.id")
oauth_app = relationship("OauthApp", viewonly=True)
def __init__(self, *args, **kwargs): approved_by = relationship("User", uselist=False, primaryjoin="Submission.is_approved==User.id", viewonly=True)
awards = relationship("AwardRelationship", viewonly=True)
if "created_utc" not in kwargs:
kwargs["created_utc"] = int(time.time()) def __init__(self, *args, **kwargs):
kwargs["created_str"] = time.strftime(
"%I:%M %p on %d %b %Y", time.gmtime( if "created_utc" not in kwargs:
kwargs["created_utc"])) kwargs["created_utc"] = int(time.time())
kwargs["created_str"] = time.strftime(
"%I:%M %p on %d %b %Y", time.gmtime(
super().__init__(*args, **kwargs) kwargs["created_utc"]))
def __repr__(self):
return f"<Submission(id={self.id})>" super().__init__(*args, **kwargs)
def __repr__(self):
@property return f"<Submission(id={self.id})>"
@lazy
def options(self):
return self.comments.filter_by(author_id = AUTOPOLLER_ACCOUNT, level=1) @property
@lazy
@property def options(self):
@lazy return self.comments.filter_by(author_id = AUTOPOLLER_ACCOUNT, level=1)
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc))) @property
@lazy
@property def created_datetime(self):
@lazy return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc))) @property
@lazy
@property def created_datetime(self):
@lazy return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
def age_string(self):
@property
age = int(time.time()) - self.created_utc @lazy
def age_string(self):
if age < 60:
return "just now" age = int(time.time()) - self.created_utc
elif age < 3600:
minutes = int(age / 60) if age < 60:
return f"{minutes}m ago" return "just now"
elif age < 86400: elif age < 3600:
hours = int(age / 3600) minutes = int(age / 60)
return f"{hours}hr ago" return f"{minutes}m ago"
elif age < 2678400: elif age < 86400:
days = int(age / 86400) hours = int(age / 3600)
return f"{days}d ago" return f"{hours}hr ago"
elif age < 2678400:
now = time.gmtime() days = int(age / 86400)
ctd = time.gmtime(self.created_utc) return f"{days}d ago"
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year) now = time.gmtime()
if now.tm_mday < ctd.tm_mday: ctd = time.gmtime(self.created_utc)
months -= 1
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if months < 12: if now.tm_mday < ctd.tm_mday:
return f"{months}mo ago" months -= 1
else:
years = int(months / 12) if months < 12:
return f"{years}yr ago" return f"{months}mo ago"
else:
@property years = int(months / 12)
@lazy return f"{years}yr ago"
def edited_string(self):
@property
if not self.edited_utc: return "never" @lazy
def edited_string(self):
age = int(time.time()) - self.edited_utc
if not self.edited_utc: return "never"
if age < 60:
return "just now" age = int(time.time()) - self.edited_utc
elif age < 3600:
minutes = int(age / 60) if age < 60:
return f"{minutes}m ago" return "just now"
elif age < 86400: elif age < 3600:
hours = int(age / 3600) minutes = int(age / 60)
return f"{hours}hr ago" return f"{minutes}m ago"
elif age < 2678400: elif age < 86400:
days = int(age / 86400) hours = int(age / 3600)
return f"{days}d ago" return f"{hours}hr ago"
elif age < 2678400:
now = time.gmtime() days = int(age / 86400)
ctd = time.gmtime(self.edited_utc) return f"{days}d ago"
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
now = time.gmtime()
if months < 12: ctd = time.gmtime(self.edited_utc)
return f"{months}mo ago" months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
else:
years = now.tm_year - ctd.tm_year if months < 12:
return f"{years}yr ago" return f"{months}mo ago"
else:
years = now.tm_year - ctd.tm_year
@property return f"{years}yr ago"
@lazy
def edited_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.edited_utc))) @property
@lazy
def edited_datetime(self):
@property return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.edited_utc)))
@lazy
def score(self):
return self.upvotes - self.downvotes @property
@lazy
@property def score(self):
@lazy return self.upvotes - self.downvotes
def fullname(self):
return f"t2_{self.id}" @property
@lazy
@property def fullname(self):
@lazy return f"t2_{self.id}"
def shortlink(self):
return f"https://{site}/post/{self.id}" @property
@lazy
@property def shortlink(self):
@lazy return f"https://{site}/post/{self.id}"
def permalink(self):
if self.club: return f"/post/{self.id}" @property
@lazy
output = self.title.lower() def permalink(self):
if self.club: return f"/post/{self.id}"
output = re.sub('&\w{2,3};', '', output)
output = self.title.lower()
output = [re.sub('\W', '', word) for word in output.split()]
output = [x for x in output if x][:6] output = re.sub('&\w{2,3};', '', output)
output = '-'.join(output) output = [re.sub('\W', '', word) for word in output.split()]
output = [x for x in output if x][:6]
if not output: output = '-'
output = '-'.join(output)
return f"/post/{self.id}/{output}"
if not output: output = '-'
@lazy
def rendered_page(self, sort=None, comment=None, comment_info=None, v=None): return f"/post/{self.id}/{output}"
if self.is_banned and not (v and (v.admin_level >= 3 or self.author_id == v.id)): template = "submission_banned.html" @lazy
else: template = "submission.html" def rendered_page(self, sort=None, comment=None, comment_info=None, v=None):
comments = self.__dict__.get('preloaded_comments', []) if self.is_banned and not (v and (v.admin_level >= 3 or self.author_id == v.id)): template = "submission_banned.html"
if comments: else: template = "submission.html"
pinned_comment = []
index = {} comments = self.__dict__.get('preloaded_comments', [])
for c in comments: if comments:
if c.is_pinned and c.parent_fullname==self.fullname: pinned_comment = []
pinned_comment += [c] index = {}
continue for c in comments:
if c.parent_fullname in index: index[c.parent_fullname].append(c) if c.is_pinned and c.parent_fullname==self.fullname:
else: index[c.parent_fullname] = [c] pinned_comment += [c]
continue
for c in comments: c.__dict__["replies"] = index.get(c.fullname, []) if c.parent_fullname in index: index[c.parent_fullname].append(c)
if comment: self.__dict__["replies"] = [comment] else: index[c.parent_fullname] = [c]
else: self.__dict__["replies"] = pinned_comment + index.get(self.fullname, [])
for c in comments: c.__dict__["replies"] = index.get(c.fullname, [])
return render_template(template, if comment: self.__dict__["replies"] = [comment]
v=v, else: self.__dict__["replies"] = pinned_comment + index.get(self.fullname, [])
p=self,
sort=sort, return render_template(template,
linked_comment=comment, v=v,
comment_info=comment_info, p=self,
render_replies=True sort=sort,
) linked_comment=comment,
comment_info=comment_info,
@property render_replies=True
@lazy )
def domain(self):
@property
if not self.url: return "text post" @lazy
domain = urlparse(self.url).netloc def domain(self):
if domain.startswith("www."): domain = domain.split("www.")[1]
return domain.replace("old.reddit.com", "reddit.com") if not self.url: return "text post"
domain = urlparse(self.url).netloc
if domain.startswith("www."): domain = domain.split("www.")[1]
@property return domain.replace("old.reddit.com", "reddit.com")
@lazy
def thumb_url(self):
if self.over_18: return f"https://{site}/assets/images/nsfw.gif" @property
elif not self.url: return f"https://{site}/assets/images/{site_name}/default_thumb_text.gif" @lazy
elif self.thumburl: return self.thumburl def thumb_url(self):
elif "youtu.be" in self.domain or "youtube.com" in self.domain: return f"https://{site}/assets/images/default_thumb_yt.gif" if self.over_18: return f"https://{site}/assets/images/nsfw.gif"
else: return f"https://{site}/assets/images/default_thumb_link.gif" elif not self.url: return f"https://{site}/assets/images/{site_name}/default_thumb_text.gif"
elif self.thumburl: return self.thumburl
@property elif "youtu.be" in self.domain or "youtube.com" in self.domain: return f"https://{site}/assets/images/default_thumb_yt.gif"
@lazy else: return f"https://{site}/assets/images/default_thumb_link.gif"
def json_raw(self):
flags = {} @property
for f in self.flags: flags[f.user.username] = f.reason @lazy
def json_raw(self):
data = {'author_name': self.author.username, flags = {}
'permalink': self.permalink, for f in self.flags: flags[f.user.username] = f.reason
'is_banned': bool(self.is_banned),
'deleted_utc': self.deleted_utc, data = {'author_name': self.author.username,
'created_utc': self.created_utc, 'permalink': self.permalink,
'id': self.id, 'is_banned': bool(self.is_banned),
'title': self.title, 'deleted_utc': self.deleted_utc,
'is_nsfw': self.over_18, 'created_utc': self.created_utc,
'is_bot': self.is_bot, 'id': self.id,
'thumb_url': self.thumb_url, 'title': self.title,
'domain': self.domain, 'is_nsfw': self.over_18,
'url': self.url, 'is_bot': self.is_bot,
'body': self.body, 'thumb_url': self.thumb_url,
'body_html': self.body_html, 'domain': self.domain,
'created_utc': self.created_utc, 'url': self.url,
'edited_utc': self.edited_utc or 0, 'body': self.body,
'comment_count': self.comment_count, 'body_html': self.body_html,
'score': self.score, 'created_utc': self.created_utc,
'upvotes': self.upvotes, 'edited_utc': self.edited_utc or 0,
'downvotes': self.downvotes, 'comment_count': self.comment_count,
'stickied': self.stickied, 'score': self.score,
'private' : self.private, 'upvotes': self.upvotes,
'distinguish_level': self.distinguish_level, 'downvotes': self.downvotes,
'voted': self.voted if hasattr(self, 'voted') else 0, 'stickied': self.stickied,
'flags': flags, 'private' : self.private,
} 'distinguish_level': self.distinguish_level,
'voted': self.voted if hasattr(self, 'voted') else 0,
if self.ban_reason: 'flags': flags,
data["ban_reason"]=self.ban_reason }
return data if self.ban_reason:
data["ban_reason"]=self.ban_reason
@property
@lazy return data
def json_core(self):
@property
if self.is_banned: @lazy
return {'is_banned': True, def json_core(self):
'deleted_utc': self.deleted_utc,
'ban_reason': self.ban_reason, if self.is_banned:
'id': self.id, return {'is_banned': True,
'title': self.title, 'deleted_utc': self.deleted_utc,
'permalink': self.permalink, 'ban_reason': self.ban_reason,
} 'id': self.id,
elif self.deleted_utc: 'title': self.title,
return {'is_banned': bool(self.is_banned), 'permalink': self.permalink,
'deleted_utc': True, }
'id': self.id, elif self.deleted_utc:
'title': self.title, return {'is_banned': bool(self.is_banned),
'permalink': self.permalink, 'deleted_utc': True,
} 'id': self.id,
'title': self.title,
return self.json_raw 'permalink': self.permalink,
}
@property
@lazy return self.json_raw
def json(self):
@property
data=self.json_core @lazy
def json(self):
if self.deleted_utc > 0 or self.is_banned:
return data data=self.json_core
data["author"]=self.author.json_core if self.deleted_utc > 0 or self.is_banned:
data["comment_count"]=self.comment_count return data
data["author"]=self.author.json_core
if "replies" in self.__dict__: data["comment_count"]=self.comment_count
data["replies"]=[x.json_core for x in self.replies]
if "voted" in self.__dict__: if "replies" in self.__dict__:
data["voted"] = self.voted data["replies"]=[x.json_core for x in self.replies]
return data if "voted" in self.__dict__:
data["voted"] = self.voted
def award_count(self, kind) -> int:
return len([x for x in self.awards if x.kind == kind]) return data
@lazy def award_count(self, kind) -> int:
def realurl(self, v): return len([x for x in self.awards if x.kind == kind])
if v and v.agendaposter and random.randint(1, 10) < 4:
return 'https://secure.actblue.com/donate/ms_blm_homepage_2019' @lazy
elif v and self.url and self.url.startswith("https://old.reddit.com/"): def realurl(self, v):
url = self.url if v and v.agendaposter and random.randint(1, 10) < 4:
if not v.oldreddit: url = self.url.replace("old.reddit.com", "reddit.com") return 'https://secure.actblue.com/donate/ms_blm_homepage_2019'
if v.controversial and '/comments/' in url and "sort=" not in url: elif v and self.url and self.url.startswith("https://old.reddit.com/"):
if "?" in url: url += "&sort=controversial" url = self.url
else: url += "?sort=controversial" if not v.oldreddit: url = self.url.replace("old.reddit.com", "reddit.com")
return url if v.controversial and '/comments/' in url and "sort=" not in url:
elif self.url: if "?" in url: url += "&sort=controversial"
if v and v.nitter: return self.url.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net") else: url += "?sort=controversial"
return self.url return url
else: return "" elif self.url:
if v and v.nitter: return self.url.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
def realbody(self, v): return self.url
if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY" else: return ""
body = self.body_html
def realbody(self, v):
if not v or v.slurreplacer: if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY"
for s,r in SLURS.items(): body = self.body_html
body = body.replace(s, r)
body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net") if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
return body if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return body
def plainbody(self, v):
if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY" def plainbody(self, v):
body = self.body if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY"
body = self.body
if not v or v.slurreplacer:
for s,r in SLURS.items(): body = censor_slurs(body, v)
body = body.replace(s, r)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com") if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net") return body
return body
@lazy
@lazy def realtitle(self, v):
def realtitle(self, v): if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY'
if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY' elif self.title_html: title = self.title_html
elif self.title_html: title = self.title_html else: title = self.title
else: title = self.title
if not v or v.slurreplacer:
if not v or v.slurreplacer: for s,r in SLURS.items(): title = title.replace(s, r)
for s,r in SLURS.items(): title = title.replace(s, r)
return title
return title
@lazy
@lazy def plaintitle(self, v):
def plaintitle(self, v): if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY'
if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY' else: title = self.title
else: title = self.title
if not v or v.slurreplacer:
if not v or v.slurreplacer: for s,r in SLURS.items(): title = title.replace(s, r)
for s,r in SLURS.items(): title = title.replace(s, r)
return title
return title
@property
@property @lazy
@lazy def is_image(self):
def is_image(self): if self.url: return self.url.lower().endswith('.webp') or self.url.lower().endswith('.jpg') or self.url.lower().endswith('.png') or self.url.lower().endswith('.gif') or self.url.lower().endswith('.jpeg') or self.url.lower().endswith('?maxwidth=9999')
if self.url: return self.url.lower().endswith('.webp') or self.url.lower().endswith('.jpg') or self.url.lower().endswith('.png') or self.url.lower().endswith('.gif') or self.url.lower().endswith('.jpeg') or self.url.lower().endswith('?maxwidth=9999') else: return False
else: return False
@property
@property @lazy
@lazy def active_flags(self): return self.flags.count()
def active_flags(self): return self.flags.count()
@property
@property @lazy
@lazy def ordered_flags(self): return self.flags.order_by(Flag.id).all()
def ordered_flags(self): return self.flags.order_by(Flag.id).all()
class SaveRelationship(Base):
class SaveRelationship(Base):
__tablename__="save_relationship"
__tablename__="save_relationship"
id=Column(Integer, primary_key=True)
id=Column(Integer, primary_key=True) user_id=Column(Integer)
user_id=Column(Integer) submission_id=Column(Integer)
submission_id=Column(Integer) comment_id=Column(Integer)
comment_id=Column(Integer)
type=Column(Integer) type=Column(Integer)

591
files/helpers/const.py 100755 → 100644
View File

@ -1,424 +1,167 @@
from os import environ from os import environ
site = environ.get("DOMAIN").strip() site = environ.get("DOMAIN", '').strip()
SLURS = { #####################
" faggot":" cute twink", # Formatting rules: #
" Faggot":" Cute twink", #####################
" FAGGOT":" CUTE TWINK", #
" fag":" cute twink", # on the slur side, they will match prefixes and suffixes and not middle of words, so for example
" Fag":" Cute twink", # "retard" will match:
" FAG":" CUTE TWINK", # - "retard"
" pedophile":" libertarian", # - "retarded"
" Pedophile":" Libertarian", # - "superretard"
" PEDOPHILE":" LIBERTARIAN", # But not "superretarded"
" pedo":" libertarian", #
" Pedo":" Libertarian", # If all letters are lowercase then it will match lowercase, first letter up in first or all the words and all letters up
" PEDO":" LIBERTARIAN", # "dancing israelis" will match (with prefixes and suffixes omitted for brevity):
" kill yourself":" keep yourself safe", # - "dancing israelis"
" KILL YOURSELF":" KEEP YOURSELF SAFE", # - "Dancing israelis"
" nigger":" πŸ€", # - "Dancing Israelis"
" Nigger":" πŸ€", # - "DANCING ISRAELIS"
" NIGGER":" πŸ€", #
" rapist":" male feminist", # If some letters are Uppercase, the same, but with the additional option of the original casing, and respecting already existing uppercase
" Rapist":" Male feminist", # "NoNewNormal" will match (with prefixes and suffixes omitted for brevity):
" RAPIST":" MALE FEMINIST", # - "NoNewNormal"
" steve akins":" penny verity oaken", # - "nonewnormal"
" Steve Akins":" Penny Verity Oaken", # - "Nonewnormal"
" STEVE AKINS":" PENNY VERITY OAKEN", # - "NONEWNORMAL"
" trannie":" πŸš‚πŸšƒπŸšƒ", #
" Trannie":" πŸš‚πŸšƒπŸšƒ", # If the slur has a space before and after then the match is limited to the exact word, no prefixes or suffixes
" TRANNIE":" πŸš‚πŸšƒπŸšƒ", # (previous rules about capitalization still apply)
" tranny":" πŸš‚πŸšƒπŸšƒ", # " neg " will match only:
" Tranny":" πŸš‚πŸšƒπŸšƒ", # - "neg"
" TRANNY":" πŸš‚πŸšƒπŸšƒ", # - "Neg"
" troon":" πŸš‚πŸšƒπŸšƒ", # - "NEG"
" Troon":" πŸš‚πŸšƒπŸšƒ", #
" TROON":" πŸš‚πŸšƒπŸšƒ", # Now on the replacement side, The replacement will have the same capitalization as the slur if the replacement is lowercase
" NoNewNormal": " HorseDewormerAddicts", # "kill yourself" -> "keep yourself safe"
" nonewnormal": " horsedewormeraddicts", # "Kill yourself" -> "Keep yourself safe"
" Kike": " https://sciencedirect.com/science/article/abs/pii/S016028960600033X", # "Kill Yourself" -> "Keep Yourself Safe"
" kike": " https://sciencedirect.com/science/article/abs/pii/S016028960600033X", # "KILL YOURSELF" -> "KEEP YOURSELF SAFE"
" retard":" r-slur", #
" Retard":" R-slur", # If the replacement side has some capitalization, then that capitalization will always be maintained
" RETARD":" R-SLUR", # for the pair: <"pajeet": "sexy Indian dude"> it will replace:
" janny":" j-slur", # "pajeet" -> "sexy Indian dude"
" Janny":" J-slur", # "Pajeet" -> "Sexy Indian dude"
" JANNY":" J-SLUR", # "PAJEET" -> "SEXY INDIAN DUDE"
" jannie":" j-slur", #
" Jannie":" J-slur", # There is a super special case that if the replacer starts with "http" then it never changes capitalization
" JANNIE":" J-SLUR", #
" janny":" j-slur", #
" Janny":" J-slur", # TL;DR: Just read the above once, or don't, and try to guess!
" JANNY":" J-SLUR", SLURS = {
" jannie":" j-slur", "faggot": "cute twink",
" Jannie":" J-slur", "fag": "cute twink",
" JANNIE":" J-SLUR", "pedophile": "libertarian",
" latinos":" latinx", "pedo": "libertarian",
" latino":" latinx", "kill yourself": "keep yourself safe",
" latinas":" latinx", "nigger": "πŸ€",
" latina":" latinx", "rapist": "male feminist",
" hispanics":" latinx", "steve akins": "penny verity oaken",
" hispanic":" latinx", "trannie": "πŸš‚πŸšƒπŸšƒ",
" Latinos":" Latinx", "tranny": "πŸš‚πŸšƒπŸšƒ",
" Latino":" Latinx", "troon": "πŸš‚πŸšƒπŸšƒ",
" Latinas":" Latinx", "NoNewNormal": "HorseDewormerAddicts",
" Latina":" Latinx", "kike": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
" Hispanics":" Latinx", "retard": "r-slur",
" Hispanic":" Latinx", "janny": "j-slur",
" LATINOS":" LATINX", "jannie": "j-slur",
" LATINO":" LATINX", "janny": "j-slur",
" LATINAS":" LATINX", "latinos": "latinx",
" LATINA":" LATINX", "latino": "latinx",
" HISPANICS":" LATINX", "latinas": "latinx",
" HISPANIC":" LATINX", "latina": "latinx",
" uss liberty incident":" tragic accident aboard the USS Liberty", "hispanics": "latinx",
" USS Liberty Incident":" tragic accident aboard the USS Liberty", "hispanic": "latinx",
" USS Liberty incident":" tragic accident aboard the USS Liberty", "USS liberty incident": "tragic accident aboard the USS Liberty",
" USS Liberty Incident":" tragic accident aboard the USS Liberty", "lavon affair": "Lavon Misunderstanding",
" uss Liberty incident":" tragic accident aboard the USS Liberty", "shylock": "Israeli friend",
" uss liberty Incident":" tragic accident aboard the USS Liberty", "yid": "Israeli friend",
" USS LIBERTY INCIDENT":" TRAGIC ACCIDENT ABOARD THE USS LIBERTY", "heeb": "Israeli friend",
" lavon affair":" Lavon Misunderstanding", "sheeny": "Israeli friend",
" Lavon affair":" Lavon Misunderstanding", "sheenies": "Israeli friends",
" Lavon Affair":" Lavon Misunderstanding", "hymie": "Israeli friend",
" lavon Affair":" Lavon Misunderstanding", "allah": "Allah (SWT)",
" shylock":" Israeli friend", "mohammad": "Mohammad (PBUH)",
" Shylock":" Israeli friend", "mohammed": "Mohammad (PBUH)",
" SHYLOCK":" ISRAELI FRIEND", "muhammad": "Mohammad (PBUH)",
" yid":" Israeli friend", "muhammed": "Mohammad (PBUH)",
" Yid":" Israeli friend", "i hate marsey": "i love marsey",
" YID":" ISRAELI FRIEND", "libertarian": "pedophile",
" heeb":" Israeli friend", "billie bilish": "Billie Eilish (fat cow)",
" Heeb":" Israeli friend", "dancing Israelis": "i love Israel",
" HEEB":" ISRAELI FRIEND", "sodomite": "total dreamboat",
" sheeny":" Israeli friend", "pajeet": "sexy Indian dude",
" Sheeny":" Israeli friend", "female": "birthing person",
" SHEENY":" ISRAELI FRIEND", "landlord": "landchad",
" sheenies":" Israeli friends", "tenant": "renthog",
" Sheenies":" Israeli friends", "renter": "rentoid",
" SHEENIES":" ISRAELI FRIENDS", "autistic": "neurodivergent",
" hymie":" Israeli friend", "anime": "p-dophilic japanese cartoons",
" Hymie":" Israeli friend", "holohoax": "i tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol",
" HYMIES":" ISRAELI FRIENDS", "groomercord": "discord (actually a pretty cool service)",
" allah":" Allah (SWT)", "pedocord": "discord (actually a pretty cool service)",
" Allah":" Allah (SWT)", "i hate Carp": "i love Carp",
" ALLAH":" ALLAH (SWT)", "manlet": "little king",
" Mohammad":" Mohammad (PBUH)", "gamer": "g*mer",
" Muhammad":" Mohammad (PBUH)", "journalist": "journ*list",
" Mohammed":" Mohammad (PBUH)", "journalism": "journ*lism",
" Muhammed":" Mohammad (PBUH)", "buttcheeks": "bulva",
" mohammad":" Mohammad (PBUH)", "asscheeks": "bulva",
" mohammed":" Mohammad (PBUH)", "wuhan flu": "SARS-CoV-2 syndemic",
" muhammad":" Mohammad (PBUH)", "china flu": "SARS-CoV-2 syndemic",
" muhammed":" Mohammad (PBUH)", "china virus": "SARS-CoV-2 syndemic",
" I HATE MARSEY":" I LOVE MARSEY", "kung flu": "SARS-CoV-2 syndemic",
" i hate marsey":" i love marsey",
" I hate Marsey":" I love Marsey", # if the word has spaces in the beginning and the end it will only censor this word without prefixes or suffixes
" I hate marsey":" I love Marsey", " nig ": "πŸ€",
" libertarian":" pedophile", " nigs ": "πŸ€s",
" Libertarian":" Pedophile", }
" LIBERTARIAN":" PEDOPHILE",
" Billie Eilish":" Billie Eilish (fat cow)", LONGPOST_REPLIES = ['Wow, you must be a JP fan.', 'This is one of the worst posts I have EVER seen. Delete it.', "No, don't reply like this, please do another wall of unhinged rant please.", '# 😴😴😴', "Ma'am we've been over this before. You need to stop.", "I've known more coherent downies.", "Your pulitzer's in the mail", "That's great and all, but I asked for my burger without cheese.", 'That degree finally paying off', "That's nice sweaty. Why don't you have a seat in the time out corner with Pizzashill until you calm down, then you can have your Capri Sun.", "All them words won't bring your pa back.", "You had a chance to not be completely worthless, but it looks like you threw it away. At least you're consistent.", 'Some people are able to display their intelligence by going on at length on a subject and never actually saying anything. This ability is most common in trades such as politics, public relations, and law. You have impressed me by being able to best them all, while still coming off as an absolute idiot.', "You can type 10,000 characters and you decided that these were the one's that you wanted.", 'Have you owned the libs yet?', "I don't know what you said, because I've seen another human naked.", 'Impressive. Normally people with such severe developmental disabilities struggle to write much more than a sentence or two. He really has exceded our expectations for the writing portion. Sadly the coherency of his writing, along with his abilities in the social skills and reading portions, are far behind his peers with similar disabilities.', "This is a really long way of saying you don't fuck.", "Sorry ma'am, looks like his delusions have gotten worse. We'll have to admit him,", '![](https://i.kym-cdn.com/photos/images/newsfeed/001/038/094/0a1.jpg)', 'If only you could put that energy into your relationships', 'Posts like this is why I do Heroine.', 'still unemployed then?', 'K', 'look im gunna have 2 ask u 2 keep ur giant dumps in the toilet not in my replys 😷😷😷', "Mommy is soooo proud of you, sweaty. Let's put this sperg out up on the fridge with all your other failures.", "Good job bobby, here's a star", "That was a mistake. You're about to find out the hard way why.", 'You sat down and wrote all this shit. You could have done so many other things with your life. What happened to your life that made you decide writing novels of bullshit on rdrama.net was the best option?', "I don't have enough spoons to read this shit", "All those words won't bring daddy back.", 'OUT!']
" billie eilish":" bilie eilish (fat cow)",
" BILLIE EILISH":" BILIE EILISH (FAT COW)", AGENDAPOSTER_MSG = """Hi @{username},\n\nYour comment has been automatically removed because you forgot
" dancing Israelis":" I love Israel", to include `trans lives matter`.\n\nDon't worry, we're here to help! We
" dancing israelis":" i love israel", won't let you post or comment anything that doesn't express your love and acceptance towards
" DANCING ISRAELIS":" I LOVE ISRAEL", the trans community. Feel free to resubmit your comment with `trans lives matter`
" Dancing Israelis":" I love Israel", included. \n\n*This is an automated message; if you need help,
" sodomite":" total dreamboat", you can message us [here](/contact).*"""
" Sodomite":" Total dreamboat",
" pajeet":" sexy Indian dude", VAXX_MSG = """Hi @{username}, it appears that you may be trying to spread dangerous misinformation regarding ineffective COVID-19 treatments based on pseudoscientific hearsay. Your post has been removed because it contained the word ivermectin. We ask that you understand that horse dewormer neither treats, nor prevents, COVID-19. For more information, please read up on what the FDA has to say on the matter:
" Pajeet":" Sexy Indian dude",
" PAJEET":" SEXY INDIAN DUDE", https://www.fda.gov/consumers/consumer-updates/why-you-should-not-use-ivermectin-treat-or-prevent-covid-19
" female":" birthing person",
" Female":" Womb-haver", COVID-19 is not a joke, it is a global pandemic and it has been hard on all of us. It will likely go down as one of the most defining periods of our generation. Many of us have lost loved ones to the virus. It has caused confusion, fear, frustration, and served to further divide us. Tens of millions around the world have died. There is nothing to be gained by spreading bad science based on very understandable fear.
" FEMALE":" birthing person",
" landlord":" landchad", The only proven method of prevention is the COVID-19 vaccine, paired with appropriate social distancing, handwashing, and masks. Vaccines are free in the United States - if you'd like to locate your nearest vaccine provider, please visit https://www.vaccines.gov/ and schedule an appointment today.
" Landlord":" Landchad",
" LANDLORD":" LANDCHAD", Thank you."""
" tenant":" renthog",
" Tenant":" Renthog", BASED_MSG = "@{username}'s Based Count has increased by 1. Their Based Count is now {basedcount}.\n\nPills: {pills}"
" TENANT":" RENTHOG",
" renter":" rentoid", if site == "pcmemes.net":
" Renter":" Rentoid", BASEDBOT_ACCOUNT = 800
" RENTER":" RENTOID", NOTIFICATIONS_ACCOUNT = 1046
" autistic":" neurodivergent", AUTOJANNY_ACCOUNT = 1050
" Autistic":" Neurodivergent", SNAPPY_ACCOUNT = 261
" AUTISTIC":" NEURODIVERGENT", LONGPOSTBOT_ACCOUNT = 1832
" anime":" p-dophilic japanese cartoons", ZOZBOT_ACCOUNT = 1833
" Anime":" P-dophilic Japanese cartoons", AUTOPOLLER_ACCOUNT = 3369
" ANIME":" P-DOPHILIC JAPANESE CARTOONS", elif site == 'rdrama.net':
" holohoax":" I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol", NOTIFICATIONS_ACCOUNT = 1046
" Holohoax":" I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol", AUTOJANNY_ACCOUNT = 2360
" HOLOHOAX":" I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol", SNAPPY_ACCOUNT = 261
" groomercord":" discord (actually a pretty cool service)", LONGPOSTBOT_ACCOUNT = 1832
" Groomercord":" Discord (actually a pretty cool service)", ZOZBOT_ACCOUNT = 1833
" GROOMERCORD":" DISCORD (ACTUALLY A PRETTY COOL SERVICE)", AUTOPOLLER_ACCOUNT = 3369
" pedocord":" discord (actually a pretty cool service)", else:
" Pedocord":" Discord (actually a pretty cool service)", NOTIFICATIONS_ACCOUNT = 1
" PEDOCORD":" DISCORD (ACTUALLY A PRETTY COOL SERVICE)", AUTOJANNY_ACCOUNT = 2
" i hate carp":" i love carp", SNAPPY_ACCOUNT = 3
" I hate carp":" I love carp", LONGPOSTBOT_ACCOUNT = 4
" I HATE CARP":" I LOVE CARP", ZOZBOT_ACCOUNT = 5
" I hate Carp":" I love Carp", AUTOPOLLER_ACCOUNT = 6
" manlet":" little king",
" Manlet":" Little king", PUSHER_INSTANCE_ID = '02ddcc80-b8db-42be-9022-44c546b4dce6'
" MANLET":" LITTLE KING", PUSHER_KEY = environ.get("PUSHER_KEY", "").strip()
" gamer":" g*mer",
" Gamer":" G*mer",
" GAMER":" G*MER",
" journalist":" journ*list",
" Journalist":" Journ*list",
" JOURNALIST":" JOURN*LIST",
" journalism":" journ*lism",
" Journalism":" Journ*lism",
" JOURNALISM":" JOURN*LISM",
" buttcheeks":" bulva",
" Buttcheeks":" Bulva",
" BUTTCHEEKS":" BULVA",
" asscheeks":" bulva",
" Asscheeks":" bulva",
" ASSCHEEKS":" BULVA",
" wuhan flu":" SARS-CoV-2 syndemic",
" Wuhan flu":" SARS-CoV-2 syndemic",
" Wuhan Flu":" SARS-CoV-2 syndemic",
" china flu":" SARS-CoV-2 syndemic",
" China flu":" SARS-CoV-2 syndemic",
" China Flu":" SARS-CoV-2 syndemic",
" china virus":" SARS-CoV-2 syndemic",
" China virus":" SARS-CoV-2 syndemic",
" China Virus":" SARS-CoV-2 syndemic",
" kung flu":" SARS-CoV-2 syndemic",
" Kung flu":" SARS-CoV-2 syndemic",
" Kung Flu":" SARS-CoV-2 syndemic",
"faggot ":"cute twink ",
"Faggot ":"Cute twink ",
"FAGGOT ":"CUTE TWINK ",
"fag ":"cute twink ",
"Fag ":"Cute twink ",
"FAG ":"CUTE TWINK ",
"pedophile ":"libertarian ",
"Pedophile ":"Libertarian ",
"PEDOPHILE ":"LIBERTARIAN ",
"kill yourself ":"keep yourself safe ",
"KILL YOURSELF ":"KEEP YOURSELF SAFE ",
"nigger ":"πŸ€ ",
"Nigger ":"πŸ€ ",
"NIGGER ":"πŸ€ ",
"steve akins ":"penny verity oaken ",
"Steve Akins ":"Penny Verity Oaken ",
"STEVE AKINS ":"PENNY VERITY OAKEN ",
"trannie ":"πŸš‚πŸšƒπŸšƒ ",
"Trannie ":"πŸš‚πŸšƒπŸšƒ ",
"TRANNIE ":"πŸš‚πŸšƒπŸšƒ ",
"tranny ":"πŸš‚πŸšƒπŸšƒ ",
"Tranny ":"πŸš‚πŸšƒπŸšƒ ",
"TRANNY ":"πŸš‚πŸšƒπŸšƒ ",
"troon ":"πŸš‚πŸšƒπŸšƒ ",
"Troon ":"πŸš‚πŸšƒπŸšƒ ",
"TROON ":"πŸš‚πŸšƒπŸšƒ ",
"NoNewNormal ": "HorseDewormerAddicts ",
"nonewnormal ": "horsedewormeraddicts ",
"Kike ": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X ",
"kike ": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X ",
"retard ":"r-slur ",
"Retard ":"R-slur ",
"RETARD ":"R-SLUR ",
"janny ":"j-slur ",
"Janny ":"J-slur ",
"JANNY ":"J-SLUR ",
"jannie ":"j-slur ",
"Jannie ":"J-slur ",
"JANNIE ":"J-SLUR ",
"latinos ":"latinx ",
"latino ":"latinx ",
"latinas ":"latinx ",
"latina ":"latinx ",
"hispanics ":"latinx ",
"hispanic ":"latinx ",
"Latinos ":"Latinx ",
"Latino ":"Latinx ",
"Latinas ":"Latinx ",
"Latina ":"Latinx ",
"Hispanics ":"Latinx ",
"Hispanic ":"Latinx ",
"LATINOS ":"LATINX ",
"LATINO ":"LATINX ",
"LATINAS ":"LATINX ",
"LATINA ":"LATINX ",
"HISPANICS ":"LATINX ",
"HISPANIC ":"LATINX ",
"uss liberty incident ":"tragic accident aboard the USS Liberty ",
"USS Liberty Incident ":"tragic accident aboard the USS Liberty ",
"USS Liberty incident ":"tragic accident aboard the USS Liberty ",
"USS Liberty Incident ":"tragic accident aboard the USS Liberty ",
"uss Liberty incident ":"tragic accident aboard the USS Liberty ",
"uss liberty Incident ":"tragic accident aboard the USS Liberty ",
"USS LIBERTY INCIDENT ":"TRAGIC ACCIDENT ABOARD THE USS LIBERTY ",
"lavon affair ":"Lavon Misunderstanding ",
"Lavon affair ":"Lavon Misunderstanding ",
"Lavon Affair ":"Lavon Misunderstanding ",
"lavon Affair ":"Lavon Misunderstanding ",
"shylock ":"Israeli friend ",
"Shylock ":"Israeli friend ",
"SHYLOCK ":"ISRAELI FRIEND ",
"yid ":"Israeli friend ",
"Yid ":"Israeli friend ",
"YID ":"ISRAELI FRIEND ",
"heeb ":"Israeli friend ",
"Heeb ":"Israeli friend ",
"HEEB ":"ISRAELI FRIEND ",
"sheeny ":"Israeli friend ",
"Sheeny ":"Israeli friend ",
"SHEENY ":"ISRAELI FRIEND ",
"sheenies ":"Israeli friends ",
"Sheenies ":"Israeli friends ",
"SHEENIES ":"ISRAELI FRIENDS ",
"hymie ":"Israeli friend ",
"Hymie ":"Israeli friend ",
"HYMIES ":"ISRAELI FRIENDS ",
"Mohammad ":"Mohammad (PBUH) ",
"Muhammad ":"Mohammad (PBUH) ",
"Mohammed ":"Mohammad (PBUH) ",
"Muhammed ":"Mohammad (PBUH) ",
"mohammad ":"Mohammad (PBUH) ",
"mohammed ":"Mohammad (PBUH) ",
"muhammad ":"Mohammad (PBUH) ",
"muhammed ":"Mohammad (PBUH) ",
"I HATE MARSEY ":"I LOVE MARSEY ",
"i hate marsey ":"i love marsey ",
"I hate Marsey ":"I love Marsey ",
"I hate marsey ":"I love Marsey ",
"libertarian ":"pedophile ",
"Libertarian ":"Pedophile ",
"LIBERTARIAN ":"PEDOPHILE ",
"Billie Eilish ":"Billie Eilish (fat cow) ",
"billie eilish ":"bilie eilish (fat cow) ",
"BILLIE EILISH ":"BILIE EILISH (FAT COW) ",
"dancing Israelis ":"I love Israel ",
"dancing israelis ":"i love israel ",
"DANCING ISRAELIS ":"I LOVE ISRAEL ",
"Dancing Israelis ":"I love Israel ",
"sodomite ":"total dreamboat ",
"Sodomite ":"Total dreamboat ",
"pajeet ":"sexy Indian dude ",
"Pajeet ":"Sexy Indian dude ",
"PAJEET ":"SEXY INDIAN DUDE ",
"female ":"birthing person ",
"Female ":"Womb-haver ",
"FEMALE ":"birthing person ",
"landlord ":"landchad ",
"Landlord ":"Landchad ",
"LANDLORD ":"LANDCHAD ",
"tenant ":"renthog ",
"Tenant ":"Renthog ",
"TENANT ":"RENTHOG ",
"renter ":"rentoid ",
"Renter ":"Rentoid ",
"RENTER ":"RENTOID ",
"autistic ":"neurodivergent ",
"Autistic ":"Neurodivergent ",
"AUTISTIC ":"NEURODIVERGENT ",
"anime ":"p-dophilic japanese cartoons ",
"Anime ":"P-dophilic Japanese cartoons ",
"ANIME ":"P-DOPHILIC JAPANESE CARTOONS ",
"holohoax ":"I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol ",
"Holohoax ":"I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol ",
"HOLOHOAX ":"I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol ",
"groomercord ":"discord (actually a pretty cool service) ",
"Groomercord ":"Discord (actually a pretty cool service) ",
"GROOMERCORD ":"DISCORD (ACTUALLY A PRETTY COOL SERVICE) ",
"pedocord ":"discord (actually a pretty cool service) ",
"Pedocord ":"Discord (actually a pretty cool service) ",
"PEDOCORD ":"DISCORD (ACTUALLY A PRETTY COOL SERVICE) ",
"i hate carp ":"i love carp ",
"I hate carp ":"I love carp ",
"I HATE CARP ":"I LOVE CARP ",
"I hate Carp ":"I love Carp ",
"manlet ":"little king ",
"Manlet ":"Little king ",
"MANLET ":"LITTLE KING ",
"gamer ":"g*mer ",
"Gamer ":"G*mer ",
"GAMER ":"G*MER ",
"journalist ":"journ*list ",
"Journalist ":"Journ*list ",
"JOURNALIST ":"JOURN*LIST ",
"journalism ":"journ*lism ",
"Journalism ":"Journ*lism ",
"JOURNALISM ":"JOURN*LISM ",
"buttcheeks ":"bulva ",
"Buttcheeks ":"Bulva ",
"BUTTCHEEKS ":"BULVA ",
"asscheeks ":"bulva ",
"Asscheeks ":"bulva ",
"ASSCHEEKS ":"BULVA ",
"wuhan flu ":"SARS-CoV-2 syndemic ",
"Wuhan flu ":"SARS-CoV-2 syndemic ",
"Wuhan Flu ":"SARS-CoV-2 syndemic ",
"china flu ":"SARS-CoV-2 syndemic ",
"China flu ":"SARS-CoV-2 syndemic ",
"China Flu ":"SARS-CoV-2 syndemic ",
"china virus ":"SARS-CoV-2 syndemic ",
"China virus ":"SARS-CoV-2 syndemic ",
"China Virus ":"SARS-CoV-2 syndemic ",
"kung flu ":"SARS-CoV-2 syndemic ",
"Kung flu ":"SARS-CoV-2 syndemic ",
"Kung Flu ":"SARS-CoV-2 syndemic ",
" nig ":" πŸ€ ",
" Nig ":" πŸ€ ",
" NIG ":" πŸ€ ",
" nigs ":" πŸ€s ",
" Nigs ":" πŸ€s ",
" NIGS ":" πŸ€s ",
}
LONGPOST_REPLIES = ['Wow, you must be a JP fan.', 'This is one of the worst posts I have EVER seen. Delete it.', "No, don't reply like this, please do another wall of unhinged rant please.", '# 😴😴😴', "Ma'am we've been over this before. You need to stop.", "I've known more coherent downies.", "Your pulitzer's in the mail", "That's great and all, but I asked for my burger without cheese.", 'That degree finally paying off', "That's nice sweaty. Why don't you have a seat in the time out corner with Pizzashill until you calm down, then you can have your Capri Sun.", "All them words won't bring your pa back.", "You had a chance to not be completely worthless, but it looks like you threw it away. At least you're consistent.", 'Some people are able to display their intelligence by going on at length on a subject and never actually saying anything. This ability is most common in trades such as politics, public relations, and law. You have impressed me by being able to best them all, while still coming off as an absolute idiot.', "You can type 10,000 characters and you decided that these were the one's that you wanted.", 'Have you owned the libs yet?', "I don't know what you said, because I've seen another human naked.", 'Impressive. Normally people with such severe developmental disabilities struggle to write much more than a sentence or two. He really has exceded our expectations for the writing portion. Sadly the coherency of his writing, along with his abilities in the social skills and reading portions, are far behind his peers with similar disabilities.', "This is a really long way of saying you don't fuck.", "Sorry ma'am, looks like his delusions have gotten worse. We'll have to admit him,", '![](https://i.kym-cdn.com/photos/images/newsfeed/001/038/094/0a1.jpg)', 'If only you could put that energy into your relationships', 'Posts like this is why I do Heroine.', 'still unemployed then?', 'K', 'look im gunna have 2 ask u 2 keep ur giant dumps in the toilet not in my replys 😷😷😷', "Mommy is soooo proud of you, sweaty. Let's put this sperg out up on the fridge with all your other failures.", "Good job bobby, here's a star", "That was a mistake. You're about to find out the hard way why.", 'You sat down and wrote all this shit. You could have done so many other things with your life. What happened to your life that made you decide writing novels of bullshit on rdrama.net was the best option?', "I don't have enough spoons to read this shit", "All those words won't bring daddy back.", 'OUT!']
AGENDAPOSTER_MSG = """Hi @{username},\n\nYour comment has been automatically removed because you forgot
to include `trans lives matter`.\n\nDon't worry, we're here to help! We
won't let you post or comment anything that doesn't express your love and acceptance towards
the trans community. Feel free to resubmit your comment with `trans lives matter`
included. \n\n*This is an automated message; if you need help,
you can message us [here](/contact).*"""
VAXX_MSG = """Hi @{username}, it appears that you may be trying to spread dangerous misinformation regarding ineffective COVID-19 treatments based on pseudoscientific hearsay. Your post has been removed because it contained the word ivermectin. We ask that you understand that horse dewormer neither treats, nor prevents, COVID-19. For more information, please read up on what the FDA has to say on the matter:
https://www.fda.gov/consumers/consumer-updates/why-you-should-not-use-ivermectin-treat-or-prevent-covid-19
COVID-19 is not a joke, it is a global pandemic and it has been hard on all of us. It will likely go down as one of the most defining periods of our generation. Many of us have lost loved ones to the virus. It has caused confusion, fear, frustration, and served to further divide us. Tens of millions around the world have died. There is nothing to be gained by spreading bad science based on very understandable fear.
The only proven method of prevention is the COVID-19 vaccine, paired with appropriate social distancing, handwashing, and masks. Vaccines are free in the United States - if you'd like to locate your nearest vaccine provider, please visit https://www.vaccines.gov/ and schedule an appointment today.
Thank you."""
BASED_MSG = "@{username}'s Based Count has increased by 1. Their Based Count is now {basedcount}.\n\nPills: {pills}"
if site == "pcmemes.net":
BASEDBOT_ACCOUNT = 800
NOTIFICATIONS_ACCOUNT = 1046
AUTOJANNY_ACCOUNT = 1050
SNAPPY_ACCOUNT = 261
LONGPOSTBOT_ACCOUNT = 1832
ZOZBOT_ACCOUNT = 1833
AUTOPOLLER_ACCOUNT = 3369
elif site == 'rdrama.net':
NOTIFICATIONS_ACCOUNT = 1046
AUTOJANNY_ACCOUNT = 2360
SNAPPY_ACCOUNT = 261
LONGPOSTBOT_ACCOUNT = 1832
ZOZBOT_ACCOUNT = 1833
AUTOPOLLER_ACCOUNT = 3369
else:
NOTIFICATIONS_ACCOUNT = 1
AUTOJANNY_ACCOUNT = 2
SNAPPY_ACCOUNT = 3
LONGPOSTBOT_ACCOUNT = 4
ZOZBOT_ACCOUNT = 5
AUTOPOLLER_ACCOUNT = 6
PUSHER_INSTANCE_ID = '02ddcc80-b8db-42be-9022-44c546b4dce6'
PUSHER_KEY = environ.get("PUSHER_KEY", "").strip()

View File

@ -0,0 +1,86 @@
from collections import ChainMap
import re
from re import Match
from files.helpers.const import SLURS
def first_upper(phrase: str) -> str:
"""Converts the first character of the phrase to uppercase, not messing with the others"""
return phrase[0].upper() + phrase[1:]
def first_all_upper(phrase: str) -> str:
"""Converts the first character of each word to uppercase, not messing with the others"""
if " " not in phrase:
return first_upper(phrase)
return " ".join([first_upper(word) for word in phrase.split(" ")])
def get_permutations_slur(slur: str, replacer: str = "_") -> dict[str, str]:
"""
Given a slur and a replacer, it generates all the possible permutation on the original text and assigns them to the
corresponding substitution with case
"""
stripped = slur.strip()
is_link = replacer.startswith("http") # special case for the :marseymerchant:
# the order the things are added into the dict is important, so that the 'Correctest' version is written last
result = {
stripped.upper(): replacer.upper() if not is_link else replacer,
first_all_upper(stripped): first_all_upper(replacer) if not is_link else replacer,
stripped.lower(): replacer,
stripped: replacer,
first_upper(stripped): first_upper(replacer) if not is_link else replacer,
}
return result
def create_replace_map() -> dict[str: str]:
"""Creates the map that will be used to get the mathing replaced for the given slur"""
dicts = [get_permutations_slur(slur, replacer) for (slur, replacer) in SLURS.items()]
# flattens the list of dict to a single dict
return dict(ChainMap(*dicts))
REPLACE_MAP = create_replace_map()
def create_variations_slur_regex(slur: str) -> list[str]:
"""For a given match generates the corresponding replacer"""
permutations = get_permutations_slur(slur)
if slur.startswith(" ") and slur.endswith(" "):
return [rf"(\s|>)({perm})(\s|<)" for perm in permutations.keys()]
else:
return [rf"(\s|>)({perm})|({perm})(\s|<)" for perm in permutations.keys()]
def sub_matcher(match: Match) -> str:
# special case when it should match exact word
if len(match.groups()) is 3:
found = match.group(2)
replacer = REPLACE_MAP[found]
return match.group(1) + replacer + match.group(3)
else: # normal case with prefix or suffix
found = match.group(2) if (match.group(2) is not None) else match.group(3)
replacer = REPLACE_MAP[found]
return (match.group(1) or '') + replacer + (match.group(4) or '')
def censor_slurs(body: str, logged_user) -> str:
if logged_user and not logged_user.slurreplacer:
return body
for (slur, replace) in SLURS.items():
for variation in create_variations_slur_regex(slur):
try:
body = re.sub(variation, sub_matcher, body)
except Exception as e:
print(e)
return body

50
requirements.txt 100755 → 100644
View File

@ -1,25 +1,27 @@
beautifulsoup4 assertpy
bleach beautifulsoup4
Flask bleach
Flask-Caching Flask
Flask-Compress Flask-Caching
Flask-Limiter==1.1.0 Flask-Compress
Flask-Mail Flask-Limiter==1.1.0
gevent Flask-Mail
greenlet gevent
gunicorn greenlet
ImageHash gunicorn
matplotlib ImageHash
mistletoe matplotlib
piexif mistletoe
Pillow piexif
pyotp Pillow
qrcode pyotp
redis qrcode
requests redis
SQLAlchemy requests
psycopg2-binary SQLAlchemy
pusher_push_notifications psycopg2-binary
youtube-dl pusher_push_notifications
yattag pytest
youtube-dl
yattag
webptools webptools

View File

@ -0,0 +1,197 @@
import re
from unittest.mock import patch
from assertpy import assert_that
from files.helpers import word_censor
from files.helpers.word_censor import create_variations_slur_regex, create_replace_map, censor_slurs, sub_matcher, \
get_permutations_slur, first_upper, first_all_upper
def test_first_upper():
assert_that(first_upper("USS liberty")).is_equal_to("USS liberty")
assert_that(first_upper("uss liberty")).is_equal_to("Uss liberty")
assert_that(first_upper("uss Liberty")).is_equal_to("Uss Liberty")
def test_first_all_upper():
assert_that(first_all_upper("USS liberty")).is_equal_to("USS Liberty")
assert_that(first_all_upper("uss liberty")).is_equal_to("Uss Liberty")
assert_that(first_all_upper("uss Liberty")).is_equal_to("Uss Liberty")
def test_get_permutations_slur():
expected = {
"USS liberty incident": "Tragic accident aboard the USS Liberty",
"uss liberty incident": "tragic accident aboard the USS Liberty",
"USS Liberty Incident": "Tragic Accident Aboard The USS Liberty",
"USS LIBERTY INCIDENT": "TRAGIC ACCIDENT ABOARD THE USS LIBERTY",
}
result = get_permutations_slur("USS liberty incident", "tragic accident aboard the USS Liberty")
assert_that(result).is_equal_to(expected)
def test_get_permutations_slur_wiht_link_replacer():
expected = {
"kike": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
"Kike": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
"KIKE": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
}
result = get_permutations_slur("kike", "https://sciencedirect.com/science/article/abs/pii/S016028960600033X")
assert_that(result).is_equal_to(expected)
def test_create_variations_slur_regex_for_slur_with_spaces():
expected = [r"(\s|>)(retard)(\s|<)",
r"(\s|>)(Retard)(\s|<)",
r"(\s|>)(RETARD)(\s|<)"]
result = create_variations_slur_regex(" retard ")
assert_that(result).is_length(3).contains_only(*expected)
def test_create_variations_slur_regex_single_word():
expected = [r"(\s|>)(retard)|(retard)(\s|<)",
r"(\s|>)(Retard)|(Retard)(\s|<)",
r"(\s|>)(RETARD)|(RETARD)(\s|<)"]
result = create_variations_slur_regex("retard")
assert_that(result).is_length(3).contains_only(*expected)
def test_create_variations_slur_regex_multiple_word():
expected = [r"(\s|>)(kill yourself)|(kill yourself)(\s|<)",
r"(\s|>)(Kill yourself)|(Kill yourself)(\s|<)",
r"(\s|>)(Kill Yourself)|(Kill Yourself)(\s|<)",
r"(\s|>)(KILL YOURSELF)|(KILL YOURSELF)(\s|<)"]
result = create_variations_slur_regex("kill yourself")
assert_that(result).is_length(4).contains_only(*expected)
@patch("files.helpers.word_censor.SLURS", {
"tranny": "πŸš‚πŸšƒπŸšƒ",
"kill yourself": "keep yourself safe",
"faggot": "cute twink",
"NoNewNormal": "NoNewNormal",
" nig ": "πŸ€",
})
def test_create_replace_map():
expected = {
"tranny": "πŸš‚πŸšƒπŸšƒ",
"Tranny": "πŸš‚πŸšƒπŸšƒ",
"TRANNY": "πŸš‚πŸšƒπŸšƒ",
"kill yourself": "keep yourself safe",
"Kill yourself": "Keep yourself safe",
"Kill Yourself": "Keep Yourself Safe",
"KILL YOURSELF": "KEEP YOURSELF SAFE",
"faggot": "cute twink",
"Faggot": "Cute twink",
"FAGGOT": "CUTE TWINK",
"NoNewNormal": "NoNewNormal",
"nonewnormal": "NoNewNormal",
"NONEWNORMAL": "NONEWNORMAL",
"nig": "πŸ€",
"Nig": "πŸ€",
"NIG": "πŸ€",
}
result = create_replace_map()
assert_that(result).is_equal_to(expected)
@patch("files.helpers.word_censor.REPLACE_MAP", {'retard': 'r-slur', 'NIG': 'πŸ€'})
def test_sub_matcher():
match = re.search(r"(\s|>)(retard)|(retard)(\s|<)", "<p>retard</p>")
assert_that(sub_matcher(match)).is_equal_to(">r-slur")
match = re.search(r"(\s|>)(retard)|(retard)(\s|<)", "<p>noretard</p>")
assert_that(sub_matcher(match)).is_equal_to("r-slur<")
match = re.search(r"(\s|>)(NIG)(\s|<)", "<p>NIG</p>")
assert_that(sub_matcher(match)).is_equal_to(">πŸ€<")
match = re.search(r"(\s|>)(NIG)(\s|<)", "<p>NIG </p>")
assert_that(sub_matcher(match)).is_equal_to(">πŸ€ ")
@patch("files.helpers.word_censor.SLURS", {
'retard': 'r-slur',
'manlet': 'little king',
' nig ': 'πŸ€',
'i hate Carp': 'i love Carp',
'kike': 'https://sciencedirect.com/science/article/abs/pii/S016028960600033X'
})
def test_censor_slurs():
word_censor.REPLACE_MAP = create_replace_map()
assert_that(censor_slurs("<p>retard</p>", None)).is_equal_to("<p>r-slur</p>")
assert_that(censor_slurs("<p>preretard</p>", None)).is_equal_to("<p>prer-slur</p>")
assert_that(censor_slurs("that is Retarded like", None)).is_equal_to("that is R-slured like")
assert_that(censor_slurs("that is SUPERRETARD like", None)).is_equal_to("that is SUPERR-SLUR like")
assert_that(censor_slurs("<p>Manlets get out!</p>", None)).is_equal_to("<p>Little kings get out!</p>")
assert_that(censor_slurs('... "retard" ...', None)).is_equal_to('... "retard" ...')
assert_that(censor_slurs('... ReTaRd ...', None)).is_equal_to('... ReTaRd ...')
assert_that(censor_slurs('... xretardx ...', None)).is_equal_to('... xretardx ...')
assert_that(censor_slurs("LLM is a manlet hehe", None)).is_equal_to("LLM is a little king hehe")
assert_that(censor_slurs("LLM is :marseycapitalistmanlet: hehe", None)) \
.is_equal_to("LLM is :marseycapitalistmanlet: hehe")
assert_that(censor_slurs('... Nig ...', None)).is_equal_to('... πŸ€ ...')
assert_that(censor_slurs('<p>NIG</p>', None)).is_equal_to('<p>πŸ€</p>')
assert_that(censor_slurs('... nigeria ...', None)).is_equal_to('... nigeria ...')
assert_that(censor_slurs('... i hate Carp ...', None)).is_equal_to('... i love Carp ...')
assert_that(censor_slurs('... i hate carp ...', None)).is_equal_to('... i love Carp ...')
assert_that(censor_slurs('... I hate Carp ...', None)).is_equal_to('... I love Carp ...')
assert_that(censor_slurs('... I Hate Carp ...', None)).is_equal_to('... I Love Carp ...')
assert_that(censor_slurs('... I HATE CARP ...', None)).is_equal_to('... I LOVE CARP ...')
# Not covered:
assert_that(censor_slurs('... I Hate carp ...', None)).is_equal_to('... I Hate carp ...')
assert_that(censor_slurs('... i Hate Carp ...', None)).is_equal_to('... i Hate Carp ...')
assert_that(censor_slurs('... i Hate carp ...', None)).is_equal_to('... i Hate carp ...')
assert_that(censor_slurs('... i hate a carp ...', None)).is_equal_to('... i hate a carp ...')
assert_that(censor_slurs("<p>retarded SuperManlet NIG</p>", None)) \
.is_equal_to("<p>r-slured SuperLittle king πŸ€</p>")
assert_that(censor_slurs('... kike ...', None)) \
.is_equal_to('... https://sciencedirect.com/science/article/abs/pii/S016028960600033X ...')
assert_that(censor_slurs('... Kike ...', None)) \
.is_equal_to('... https://sciencedirect.com/science/article/abs/pii/S016028960600033X ...')
assert_that(censor_slurs('... KIKE ...', None)) \
.is_equal_to('... https://sciencedirect.com/science/article/abs/pii/S016028960600033X ...')
@patch("files.helpers.word_censor.SLURS", {'retard': 'r-slur', 'manlet': 'little king', ' nig ': 'πŸ€'})
def test_censor_slurs_does_not_error_out_on_exception():
word_censor.REPLACE_MAP = create_replace_map()
word_censor.REPLACE_MAP["Manlet"] = None
assert_that(censor_slurs(">retarded SuperManlet NIG<", None)).is_equal_to(">r-slured SuperManlet πŸ€<")
@patch("files.helpers.word_censor.SLURS", {'retard': 'r-slur', 'manlet': 'little king'})
def test_censor_slurs_does_not_censor_on_flag_disabled():
word_censor.REPLACE_MAP = create_replace_map()
class User:
def __init__(self, slurreplacer):
self.slurreplacer = slurreplacer
logger_user = User(slurreplacer=False)
assert_that(censor_slurs("<p>retard</p>", logger_user)).is_equal_to("<p>retard</p>")
logger_user = User(slurreplacer=True)
assert_that(censor_slurs("<p>retard</p>", logger_user)).is_equal_to("<p>r-slur</p>")