master
Aevann1 2021-10-17 18:27:18 +02:00
commit 3d0a742200
8 changed files with 1352 additions and 1321 deletions

17
.gitignore vendored 100755 → 100644
View File

@ -1,7 +1,10 @@
image.*
chart.png
video.mp4
cache/
__pycache__/
disablesignups
*rules.html
image.*
chart.png
video.mp4
cache/
__pycache__/
disablesignups
*rules.html
.idea/
**/.pytest_cache/
venv/

132
docker-compose.yml 100755 → 100644
View File

@ -1,66 +1,66 @@
version: '2.3'
services:
files:
build:
context: .
volumes:
- "./:/service"
environment:
- DATABASE_URL=postgresql://postgres@127.0.0.1:5432/postgres
- MASTER_KEY=${MASTER_KEY:-KTVciAUQFpFh2WdJ/oiHJlxl6FvzRZp8kYzAAv3l2OA=}
- DOMAIN=localhost
- SITE_NAME=Drama
- GIPHY_KEY=3435tdfsdudebussylmaoxxt43
- FORCE_HTTPS=0
- DISCORD_SERVER_ID=3435tdfsdudebussylmaoxxt43
- DISCORD_CLIENT_ID=3435tdfsdudebussylmaoxxt43
- DISCORD_CLIENT_SECRET=3435tdfsdudebussylmaoxxt43
- DISCORD_BOT_TOKEN=3435tdfsdudebussylmaoxxt43
#- HCAPTCHA_SITEKEY=3435tdfsdudebussylmaoxxt43
- HCAPTCHA_SECRET=3435tdfsdudebussylmaoxxt43
- YOUTUBE_KEY=3435tdfsdudebussylmaoxxt43
- PUSHER_KEY=3435tdfsdudebussylmaoxxt43
- CATBOX_KEY=3435tdfsdudebussylmaoxxt43
- SPAM_SIMILARITY_THRESHOLD=0.5
- SPAM_SIMILAR_COUNT_THRESHOLD=5
- SPAM_URL_SIMILARITY_THRESHOLD=0.1
- COMMENT_SPAM_SIMILAR_THRESHOLD=0.5
- COMMENT_SPAM_COUNT_THRESHOLD=5
- READ_ONLY=0
- BOT_DISABLE=0
- COINS_NAME=Dramacoins
- DEFAULT_TIME_FILTER=all
- DEFAULT_THEME=midnight
- DEFAULT_COLOR=ff66ac #YOU HAVE TO PICK ONE OF THOSE COLORS OR SHIT WILL BREAK: ff66ac, 805ad5, 62ca56, 38a169, 80ffff, 2a96f3, eb4963, ff0000, f39731, 30409f, 3e98a7, e4432d, 7b9ae4, ec72de, 7f8fa6, f8db58
- SLOGAN=Dude bussy lmao
- GUMROAD_TOKEN=3435tdfsdudebussylmaoxxt43
- GUMROAD_LINK=https://marsey1.gumroad.com/l/tfcvri
- CARD_VIEW=1
- DISABLE_DOWNVOTES=0
- DUES=0
- MAIL_USERNAME=blahblahblah@gmail.com
- MAIL_PASSWORD=3435tdfsdudebussylmaoxxt43
links:
- "redis"
- "postgres"
ports:
- "80:80"
depends_on:
- redis
- postgres
redis:
image: redis
ports:
- "6379:6379"
postgres:
image: postgres:12.3
volumes:
- "./schema.sql:/docker-entrypoint-initdb.d/00-schema.sql"
- "./seed-db.sql:/docker-entrypoint-initdb.d/01-schema.sql"
environment:
- POSTGRES_HOST_AUTH_METHOD=trust
#ports:
#- "5432:5432"
version: '2.3'
services:
files:
build:
context: .
volumes:
- "./:/service"
environment:
- DATABASE_URL=postgresql://postgres@127.0.0.1:5432/postgres
- MASTER_KEY=${MASTER_KEY:-KTVciAUQFpFh2WdJ/oiHJlxl6FvzRZp8kYzAAv3l2OA=}
- DOMAIN=localhost
- SITE_NAME=Drama
- GIPHY_KEY=3435tdfsdudebussylmaoxxt43
- FORCE_HTTPS=0
- DISCORD_SERVER_ID=3435tdfsdudebussylmaoxxt43
- DISCORD_CLIENT_ID=3435tdfsdudebussylmaoxxt43
- DISCORD_CLIENT_SECRET=3435tdfsdudebussylmaoxxt43
- DISCORD_BOT_TOKEN=3435tdfsdudebussylmaoxxt43
#- HCAPTCHA_SITEKEY=3435tdfsdudebussylmaoxxt43
- HCAPTCHA_SECRET=3435tdfsdudebussylmaoxxt43
- YOUTUBE_KEY=3435tdfsdudebussylmaoxxt43
- PUSHER_KEY=3435tdfsdudebussylmaoxxt43
- CATBOX_KEY=3435tdfsdudebussylmaoxxt43
- SPAM_SIMILARITY_THRESHOLD=0.5
- SPAM_SIMILAR_COUNT_THRESHOLD=5
- SPAM_URL_SIMILARITY_THRESHOLD=0.1
- COMMENT_SPAM_SIMILAR_THRESHOLD=0.5
- COMMENT_SPAM_COUNT_THRESHOLD=5
- READ_ONLY=0
- BOT_DISABLE=0
- COINS_NAME=Dramacoins
- DEFAULT_TIME_FILTER=all
- DEFAULT_THEME=midnight
- DEFAULT_COLOR=ff66ac #YOU HAVE TO PICK ONE OF THOSE COLORS OR SHIT WILL BREAK: ff66ac, 805ad5, 62ca56, 38a169, 80ffff, 2a96f3, eb4963, ff0000, f39731, 30409f, 3e98a7, e4432d, 7b9ae4, ec72de, 7f8fa6, f8db58
- SLOGAN=Dude bussy lmao
- GUMROAD_TOKEN=3435tdfsdudebussylmaoxxt43
- GUMROAD_LINK=https://marsey1.gumroad.com/l/tfcvri
- CARD_VIEW=1
- DISABLE_DOWNVOTES=0
- DUES=0
- MAIL_USERNAME=blahblahblah@gmail.com
- MAIL_PASSWORD=3435tdfsdudebussylmaoxxt43
links:
- "redis"
- "postgres"
ports:
- "80:80"
depends_on:
- redis
- postgres
redis:
image: redis
ports:
- "6379:6379"
postgres:
image: postgres:12.3
volumes:
- "./schema.sql:/docker-entrypoint-initdb.d/00-schema.sql"
- "./seed-db.sql:/docker-entrypoint-initdb.d/01-schema.sql"
environment:
- POSTGRES_HOST_AUTH_METHOD=trust
#ports:
#- "5432:5432"

788
files/classes/comment.py 100755 → 100644
View File

@ -1,394 +1,394 @@
import re
from urllib.parse import urlencode, urlparse, parse_qs
from flask import *
from sqlalchemy import *
from sqlalchemy.orm import relationship, deferred, lazyload
from files.classes.votes import CommentVote
from files.helpers.lazy import lazy
from files.helpers.const import SLURS
from files.__main__ import Base
from .flags import CommentFlag
from os import environ
import time
from files.helpers.const import AUTOPOLLER_ACCOUNT
site = environ.get("DOMAIN").strip()
class Comment(Base):
__tablename__ = "comments"
id = Column(Integer, primary_key=True)
author_id = Column(Integer, ForeignKey("users.id"))
parent_submission = Column(Integer, ForeignKey("submissions.id"))
created_utc = Column(Integer, default=0)
edited_utc = Column(Integer, default=0)
is_banned = Column(Boolean, default=False)
removed_by = Column(Integer)
bannedfor = Column(Boolean)
distinguish_level = Column(Integer, default=0)
deleted_utc = Column(Integer, default=0)
is_approved = Column(Integer, default=0)
level = Column(Integer, default=0)
parent_comment_id = Column(Integer, ForeignKey("comments.id"))
over_18 = Column(Boolean, default=False)
is_bot = Column(Boolean, default=False)
is_pinned = Column(String)
sentto=Column(Integer, ForeignKey("users.id"))
notifiedto=Column(Integer)
app_id = Column(Integer, ForeignKey("oauth_apps.id"))
oauth_app = relationship("OauthApp", viewonly=True)
upvotes = Column(Integer, default=0)
downvotes = Column(Integer, default=0)
body = deferred(Column(String))
body_html = deferred(Column(String))
ban_reason = Column(String)
post = relationship("Submission", viewonly=True)
flags = relationship("CommentFlag", lazy="dynamic", viewonly=True)
author = relationship("User", primaryjoin="User.id==Comment.author_id")
senttouser = relationship("User", primaryjoin="User.id==Comment.sentto", viewonly=True)
parent_comment = relationship("Comment", remote_side=[id], viewonly=True)
child_comments = relationship("Comment", remote_side=[parent_comment_id], viewonly=True)
awards = relationship("AwardRelationship", viewonly=True)
def __init__(self, *args, **kwargs):
if "created_utc" not in kwargs:
kwargs["created_utc"] = int(time.time())
super().__init__(*args, **kwargs)
def __repr__(self):
return f"<Comment(id={self.id})>"
def poll_voted(self, v):
if v:
vote = g.db.query(CommentVote).options(lazyload('*')).filter_by(user_id=v.id, comment_id=self.id).first()
if vote: return vote.vote_type
else: return None
else: return None
@property
@lazy
def options(self):
return [x for x in self.child_comments if x.author_id == AUTOPOLLER_ACCOUNT]
@property
@lazy
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@property
@lazy
def age_string(self):
age = int(time.time()) - self.created_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.created_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if now.tm_mday < ctd.tm_mday:
months -= 1
if months < 12:
return f"{months}mo ago"
else:
years = int(months / 12)
return f"{years}yr ago"
@property
@lazy
def edited_string(self):
if not self.edited_utc:
return "never"
age = int(time.time()) - self.edited_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.edited_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if months < 12:
return f"{months}mo ago"
else:
years = now.tm_year - ctd.tm_year
return f"{years}yr ago"
@property
@lazy
def score(self):
return self.upvotes - self.downvotes
@property
@lazy
def fullname(self):
return f"t3_{self.id}"
@property
@lazy
def parent(self):
if not self.parent_submission: return None
if self.level == 1: return self.post
else: return g.db.query(Comment).get(self.parent_comment_id)
@property
@lazy
def parent_fullname(self):
if self.parent_comment_id: return f"t3_{self.parent_comment_id}"
elif self.parent_submission: return f"t2_{self.parent_submission}"
@property
def replies(self):
r = self.__dict__.get("replies", None)
if r: r = [x for x in r if not x.author.shadowbanned]
if not r and r != []: r = sorted([x for x in self.child_comments if not x.author.shadowbanned and x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True)
return r
@replies.setter
def replies(self, value):
self.__dict__["replies"] = value
@property
def replies2(self):
return self.__dict__.get("replies2", [])
@replies2.setter
def replies2(self, value):
self.__dict__["replies2"] = value
@property
def replies3(self):
r = self.__dict__.get("replies", None)
if not r and r != []: r = sorted([x for x in self.child_comments if x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True)
return r
@property
@lazy
def shortlink(self):
return f"https://{site}/comment/{self.id}"
@property
@lazy
def permalink(self):
if self.post and self.post.club: return f"/comment/{self.id}/"
if self.post: return f"{self.post.permalink}/{self.id}/"
else: return f"/comment/{self.id}/"
@property
@lazy
def json_raw(self):
flags = {}
for f in self.flags: flags[f.user.username] = f.reason
data= {
'id': self.id,
'level': self.level,
'author_name': self.author.username,
'body': self.body,
'body_html': self.body_html,
'is_bot': self.is_bot,
'created_utc': self.created_utc,
'edited_utc': self.edited_utc or 0,
'is_banned': bool(self.is_banned),
'deleted_utc': self.deleted_utc,
'is_nsfw': self.over_18,
'permalink': self.permalink,
'is_pinned': self.is_pinned,
'distinguish_level': self.distinguish_level,
'post_id': self.post.id,
'score': self.score,
'upvotes': self.upvotes,
'downvotes': self.downvotes,
'is_bot': self.is_bot,
'flags': flags,
}
if self.ban_reason:
data["ban_reason"]=self.ban_reason
return data
def award_count(self, kind) -> int:
return len([x for x in self.awards if x.kind == kind])
@property
@lazy
def json_core(self):
if self.is_banned:
data= {'is_banned': True,
'ban_reason': self.ban_reason,
'id': self.id,
'post': self.post.id,
'level': self.level,
'parent': self.parent_fullname
}
elif self.deleted_utc > 0:
data= {'deleted_utc': self.deleted_utc,
'id': self.id,
'post': self.post.id,
'level': self.level,
'parent': self.parent_fullname
}
else:
data=self.json_raw
if self.level>=2: data['parent_comment_id']= self.parent_comment_id,
if "replies" in self.__dict__:
data['replies']=[x.json_core for x in self.replies]
return data
@property
@lazy
def json(self):
data=self.json_core
if self.deleted_utc > 0 or self.is_banned:
return data
data["author"]=self.author.json_core
data["post"]=self.post.json_core
if self.level >= 2:
data["parent"]=self.parent.json_core
return data
def realbody(self, v):
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
body = self.body_html
if not body: return ""
if not v or v.slurreplacer:
for s, r in SLURS.items(): body = body.replace(s, r)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.controversial:
for i in re.finditer('(/comments/.*?)"', body):
url = i.group(1)
p = urlparse(url).query
p = parse_qs(p)
if 'sort' not in p: p['sort'] = ['controversial']
url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}")
return body
def plainbody(self, v):
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
body = self.body
if not body: return ""
if not v or v.slurreplacer:
for s, r in SLURS.items(): body = body.replace(s, r)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.controversial:
for i in re.finditer('(/comments/.*?)"', body):
url = i.group(1)
p = urlparse(url).query
p = parse_qs(p)
if 'sort' not in p: p['sort'] = ['controversial']
url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}")
return body
@lazy
def collapse_for_user(self, v):
if self.over_18 and not (v and v.over_18) and not self.post.over_18: return True
if not v: return False
if v.filter_words and any([x in self.body for x in v.filter_words]): return True
if self.is_banned or (self.author and self.author.shadowbanned): return True
return False
@property
@lazy
def is_op(self): return self.author_id==self.post.author_id
@property
@lazy
def active_flags(self): return self.flags.count()
@property
@lazy
def ordered_flags(self): return self.flags.order_by(CommentFlag.id).all()
class Notification(Base):
__tablename__ = "notifications"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("users.id"))
comment_id = Column(Integer, ForeignKey("comments.id"))
read = Column(Boolean, default=False)
followsender = Column(Integer)
unfollowsender = Column(Integer)
removefollowsender = Column(Integer)
blocksender = Column(Integer)
unblocksender = Column(Integer)
comment = relationship("Comment", viewonly=True)
user = relationship("User", viewonly=True)
def __repr__(self):
return f"<Notification(id={self.id})>"
from os import environ
import re
import time
from urllib.parse import urlencode, urlparse, parse_qs
from flask import *
from sqlalchemy import *
from sqlalchemy.orm import relationship, deferred, lazyload
from files.__main__ import Base
from files.classes.votes import CommentVote
from files.helpers.const import AUTOPOLLER_ACCOUNT
from files.helpers.lazy import lazy
from .flags import CommentFlag
from ..helpers.word_censor import censor_slurs
site = environ.get("DOMAIN").strip()
class Comment(Base):
__tablename__ = "comments"
id = Column(Integer, primary_key=True)
author_id = Column(Integer, ForeignKey("users.id"))
parent_submission = Column(Integer, ForeignKey("submissions.id"))
created_utc = Column(Integer, default=0)
edited_utc = Column(Integer, default=0)
is_banned = Column(Boolean, default=False)
removed_by = Column(Integer)
bannedfor = Column(Boolean)
distinguish_level = Column(Integer, default=0)
deleted_utc = Column(Integer, default=0)
is_approved = Column(Integer, default=0)
level = Column(Integer, default=0)
parent_comment_id = Column(Integer, ForeignKey("comments.id"))
over_18 = Column(Boolean, default=False)
is_bot = Column(Boolean, default=False)
is_pinned = Column(String)
sentto=Column(Integer, ForeignKey("users.id"))
notifiedto=Column(Integer)
app_id = Column(Integer, ForeignKey("oauth_apps.id"))
oauth_app = relationship("OauthApp", viewonly=True)
upvotes = Column(Integer, default=0)
downvotes = Column(Integer, default=0)
body = deferred(Column(String))
body_html = deferred(Column(String))
ban_reason = Column(String)
post = relationship("Submission", viewonly=True)
flags = relationship("CommentFlag", lazy="dynamic", viewonly=True)
author = relationship("User", primaryjoin="User.id==Comment.author_id")
senttouser = relationship("User", primaryjoin="User.id==Comment.sentto", viewonly=True)
parent_comment = relationship("Comment", remote_side=[id], viewonly=True)
child_comments = relationship("Comment", remote_side=[parent_comment_id], viewonly=True)
awards = relationship("AwardRelationship", viewonly=True)
def __init__(self, *args, **kwargs):
if "created_utc" not in kwargs:
kwargs["created_utc"] = int(time.time())
super().__init__(*args, **kwargs)
def __repr__(self):
return f"<Comment(id={self.id})>"
def poll_voted(self, v):
if v:
vote = g.db.query(CommentVote).options(lazyload('*')).filter_by(user_id=v.id, comment_id=self.id).first()
if vote: return vote.vote_type
else: return None
else: return None
@property
@lazy
def options(self):
return [x for x in self.child_comments if x.author_id == AUTOPOLLER_ACCOUNT]
@property
@lazy
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@property
@lazy
def age_string(self):
age = int(time.time()) - self.created_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.created_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if now.tm_mday < ctd.tm_mday:
months -= 1
if months < 12:
return f"{months}mo ago"
else:
years = int(months / 12)
return f"{years}yr ago"
@property
@lazy
def edited_string(self):
if not self.edited_utc:
return "never"
age = int(time.time()) - self.edited_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.edited_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if months < 12:
return f"{months}mo ago"
else:
years = now.tm_year - ctd.tm_year
return f"{years}yr ago"
@property
@lazy
def score(self):
return self.upvotes - self.downvotes
@property
@lazy
def fullname(self):
return f"t3_{self.id}"
@property
@lazy
def parent(self):
if not self.parent_submission: return None
if self.level == 1: return self.post
else: return g.db.query(Comment).get(self.parent_comment_id)
@property
@lazy
def parent_fullname(self):
if self.parent_comment_id: return f"t3_{self.parent_comment_id}"
elif self.parent_submission: return f"t2_{self.parent_submission}"
@property
def replies(self):
r = self.__dict__.get("replies", None)
if r: r = [x for x in r if not x.author.shadowbanned]
if not r and r != []: r = sorted([x for x in self.child_comments if not x.author.shadowbanned and x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True)
return r
@replies.setter
def replies(self, value):
self.__dict__["replies"] = value
@property
def replies2(self):
return self.__dict__.get("replies2", [])
@replies2.setter
def replies2(self, value):
self.__dict__["replies2"] = value
@property
def replies3(self):
r = self.__dict__.get("replies", None)
if not r and r != []: r = sorted([x for x in self.child_comments if x.author_id != AUTOPOLLER_ACCOUNT], key=lambda x: x.score, reverse=True)
return r
@property
@lazy
def shortlink(self):
return f"https://{site}/comment/{self.id}"
@property
@lazy
def permalink(self):
if self.post and self.post.club: return f"/comment/{self.id}/"
if self.post: return f"{self.post.permalink}/{self.id}/"
else: return f"/comment/{self.id}/"
@property
@lazy
def json_raw(self):
flags = {}
for f in self.flags: flags[f.user.username] = f.reason
data= {
'id': self.id,
'level': self.level,
'author_name': self.author.username,
'body': self.body,
'body_html': self.body_html,
'is_bot': self.is_bot,
'created_utc': self.created_utc,
'edited_utc': self.edited_utc or 0,
'is_banned': bool(self.is_banned),
'deleted_utc': self.deleted_utc,
'is_nsfw': self.over_18,
'permalink': self.permalink,
'is_pinned': self.is_pinned,
'distinguish_level': self.distinguish_level,
'post_id': self.post.id,
'score': self.score,
'upvotes': self.upvotes,
'downvotes': self.downvotes,
'is_bot': self.is_bot,
'flags': flags,
}
if self.ban_reason:
data["ban_reason"]=self.ban_reason
return data
def award_count(self, kind) -> int:
return len([x for x in self.awards if x.kind == kind])
@property
@lazy
def json_core(self):
if self.is_banned:
data= {'is_banned': True,
'ban_reason': self.ban_reason,
'id': self.id,
'post': self.post.id,
'level': self.level,
'parent': self.parent_fullname
}
elif self.deleted_utc > 0:
data= {'deleted_utc': self.deleted_utc,
'id': self.id,
'post': self.post.id,
'level': self.level,
'parent': self.parent_fullname
}
else:
data=self.json_raw
if self.level>=2: data['parent_comment_id']= self.parent_comment_id,
if "replies" in self.__dict__:
data['replies']=[x.json_core for x in self.replies]
return data
@property
@lazy
def json(self):
data=self.json_core
if self.deleted_utc > 0 or self.is_banned:
return data
data["author"]=self.author.json_core
data["post"]=self.post.json_core
if self.level >= 2:
data["parent"]=self.parent.json_core
return data
def realbody(self, v):
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
body = self.body_html
if not body: return ""
body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.controversial:
for i in re.finditer('(/comments/.*?)"', body):
url = i.group(1)
p = urlparse(url).query
p = parse_qs(p)
if 'sort' not in p: p['sort'] = ['controversial']
url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}")
return body
def plainbody(self, v):
if self.post and self.post.club and not (v and v.paid_dues): return "<p>COUNTRY CLUB ONLY</p>"
body = self.body
if not body: return ""
body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
if v and v.controversial:
for i in re.finditer('(/comments/.*?)"', body):
url = i.group(1)
p = urlparse(url).query
p = parse_qs(p)
if 'sort' not in p: p['sort'] = ['controversial']
url_noquery = url.split('?')[0]
body = body.replace(url, f"{url_noquery}?{urlencode(p, True)}")
return body
@lazy
def collapse_for_user(self, v):
if self.over_18 and not (v and v.over_18) and not self.post.over_18: return True
if not v: return False
if v.filter_words and any([x in self.body for x in v.filter_words]): return True
if self.is_banned or (self.author and self.author.shadowbanned): return True
return False
@property
@lazy
def is_op(self): return self.author_id==self.post.author_id
@property
@lazy
def active_flags(self): return self.flags.count()
@property
@lazy
def ordered_flags(self): return self.flags.order_by(CommentFlag.id).all()
class Notification(Base):
__tablename__ = "notifications"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("users.id"))
comment_id = Column(Integer, ForeignKey("comments.id"))
read = Column(Boolean, default=False)
followsender = Column(Integer)
unfollowsender = Column(Integer)
removefollowsender = Column(Integer)
blocksender = Column(Integer)
unblocksender = Column(Integer)
comment = relationship("Comment", viewonly=True)
user = relationship("User", viewonly=True)
def __repr__(self):
return f"<Notification(id={self.id})>"

811
files/classes/submission.py 100755 → 100644
View File

@ -1,407 +1,406 @@
from flask import render_template, g
from sqlalchemy import *
from sqlalchemy.orm import relationship, deferred
import re, random
from urllib.parse import urlparse
from files.helpers.lazy import lazy
from files.helpers.const import SLURS, AUTOPOLLER_ACCOUNT
from files.__main__ import Base
from .flags import Flag
from os import environ
import time
site = environ.get("DOMAIN").strip()
site_name = environ.get("SITE_NAME").strip()
class Submission(Base):
__tablename__ = "submissions"
id = Column(BigInteger, primary_key=True)
author_id = Column(BigInteger, ForeignKey("users.id"))
edited_utc = Column(BigInteger, default=0)
created_utc = Column(BigInteger, default=0)
thumburl = Column(String)
is_banned = Column(Boolean, default=False)
removed_by = Column(Integer)
bannedfor = Column(Boolean)
views = Column(Integer, default=0)
deleted_utc = Column(Integer, default=0)
distinguish_level = Column(Integer, default=0)
created_str = Column(String)
stickied = Column(String)
is_pinned = Column(Boolean, default=False)
private = Column(Boolean, default=False)
club = Column(Boolean, default=False)
comment_count = Column(Integer, default=0)
is_approved = Column(Integer, ForeignKey("users.id"), default=0)
over_18 = Column(Boolean, default=False)
is_bot = Column(Boolean, default=False)
upvotes = Column(Integer, default=1)
downvotes = Column(Integer, default=0)
app_id=Column(Integer, ForeignKey("oauth_apps.id"))
title = Column(String)
title_html = Column(String)
url = Column(String)
body = deferred(Column(String))
body_html = deferred(Column(String))
ban_reason = Column(String)
embed_url = Column(String)
comments = relationship("Comment", lazy="dynamic", primaryjoin="Comment.parent_submission==Submission.id", viewonly=True)
flags = relationship("Flag", lazy="dynamic", viewonly=True)
author = relationship("User", primaryjoin="Submission.author_id==User.id")
oauth_app = relationship("OauthApp", viewonly=True)
approved_by = relationship("User", uselist=False, primaryjoin="Submission.is_approved==User.id", viewonly=True)
awards = relationship("AwardRelationship", viewonly=True)
def __init__(self, *args, **kwargs):
if "created_utc" not in kwargs:
kwargs["created_utc"] = int(time.time())
kwargs["created_str"] = time.strftime(
"%I:%M %p on %d %b %Y", time.gmtime(
kwargs["created_utc"]))
super().__init__(*args, **kwargs)
def __repr__(self):
return f"<Submission(id={self.id})>"
@property
@lazy
def options(self):
return self.comments.filter_by(author_id = AUTOPOLLER_ACCOUNT, level=1)
@property
@lazy
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@property
@lazy
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@property
@lazy
def age_string(self):
age = int(time.time()) - self.created_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.created_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if now.tm_mday < ctd.tm_mday:
months -= 1
if months < 12:
return f"{months}mo ago"
else:
years = int(months / 12)
return f"{years}yr ago"
@property
@lazy
def edited_string(self):
if not self.edited_utc: return "never"
age = int(time.time()) - self.edited_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.edited_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if months < 12:
return f"{months}mo ago"
else:
years = now.tm_year - ctd.tm_year
return f"{years}yr ago"
@property
@lazy
def edited_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.edited_utc)))
@property
@lazy
def score(self):
return self.upvotes - self.downvotes
@property
@lazy
def fullname(self):
return f"t2_{self.id}"
@property
@lazy
def shortlink(self):
return f"https://{site}/post/{self.id}"
@property
@lazy
def permalink(self):
if self.club: return f"/post/{self.id}"
output = self.title.lower()
output = re.sub('&\w{2,3};', '', output)
output = [re.sub('\W', '', word) for word in output.split()]
output = [x for x in output if x][:6]
output = '-'.join(output)
if not output: output = '-'
return f"/post/{self.id}/{output}"
@lazy
def rendered_page(self, sort=None, comment=None, comment_info=None, v=None):
if self.is_banned and not (v and (v.admin_level >= 3 or self.author_id == v.id)): template = "submission_banned.html"
else: template = "submission.html"
comments = self.__dict__.get('preloaded_comments', [])
if comments:
pinned_comment = []
index = {}
for c in comments:
if c.is_pinned and c.parent_fullname==self.fullname:
pinned_comment += [c]
continue
if c.parent_fullname in index: index[c.parent_fullname].append(c)
else: index[c.parent_fullname] = [c]
for c in comments: c.__dict__["replies"] = index.get(c.fullname, [])
if comment: self.__dict__["replies"] = [comment]
else: self.__dict__["replies"] = pinned_comment + index.get(self.fullname, [])
return render_template(template,
v=v,
p=self,
sort=sort,
linked_comment=comment,
comment_info=comment_info,
render_replies=True
)
@property
@lazy
def domain(self):
if not self.url: return "text post"
domain = urlparse(self.url).netloc
if domain.startswith("www."): domain = domain.split("www.")[1]
return domain.replace("old.reddit.com", "reddit.com")
@property
@lazy
def thumb_url(self):
if self.over_18: return f"https://{site}/assets/images/nsfw.gif"
elif not self.url: return f"https://{site}/assets/images/{site_name}/default_thumb_text.gif"
elif self.thumburl: return self.thumburl
elif "youtu.be" in self.domain or "youtube.com" in self.domain: return f"https://{site}/assets/images/default_thumb_yt.gif"
else: return f"https://{site}/assets/images/default_thumb_link.gif"
@property
@lazy
def json_raw(self):
flags = {}
for f in self.flags: flags[f.user.username] = f.reason
data = {'author_name': self.author.username,
'permalink': self.permalink,
'is_banned': bool(self.is_banned),
'deleted_utc': self.deleted_utc,
'created_utc': self.created_utc,
'id': self.id,
'title': self.title,
'is_nsfw': self.over_18,
'is_bot': self.is_bot,
'thumb_url': self.thumb_url,
'domain': self.domain,
'url': self.url,
'body': self.body,
'body_html': self.body_html,
'created_utc': self.created_utc,
'edited_utc': self.edited_utc or 0,
'comment_count': self.comment_count,
'score': self.score,
'upvotes': self.upvotes,
'downvotes': self.downvotes,
'stickied': self.stickied,
'private' : self.private,
'distinguish_level': self.distinguish_level,
'voted': self.voted if hasattr(self, 'voted') else 0,
'flags': flags,
}
if self.ban_reason:
data["ban_reason"]=self.ban_reason
return data
@property
@lazy
def json_core(self):
if self.is_banned:
return {'is_banned': True,
'deleted_utc': self.deleted_utc,
'ban_reason': self.ban_reason,
'id': self.id,
'title': self.title,
'permalink': self.permalink,
}
elif self.deleted_utc:
return {'is_banned': bool(self.is_banned),
'deleted_utc': True,
'id': self.id,
'title': self.title,
'permalink': self.permalink,
}
return self.json_raw
@property
@lazy
def json(self):
data=self.json_core
if self.deleted_utc > 0 or self.is_banned:
return data
data["author"]=self.author.json_core
data["comment_count"]=self.comment_count
if "replies" in self.__dict__:
data["replies"]=[x.json_core for x in self.replies]
if "voted" in self.__dict__:
data["voted"] = self.voted
return data
def award_count(self, kind) -> int:
return len([x for x in self.awards if x.kind == kind])
@lazy
def realurl(self, v):
if v and v.agendaposter and random.randint(1, 10) < 4:
return 'https://secure.actblue.com/donate/ms_blm_homepage_2019'
elif v and self.url and self.url.startswith("https://old.reddit.com/"):
url = self.url
if not v.oldreddit: url = self.url.replace("old.reddit.com", "reddit.com")
if v.controversial and '/comments/' in url and "sort=" not in url:
if "?" in url: url += "&sort=controversial"
else: url += "?sort=controversial"
return url
elif self.url:
if v and v.nitter: return self.url.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return self.url
else: return ""
def realbody(self, v):
if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY"
body = self.body_html
if not v or v.slurreplacer:
for s,r in SLURS.items():
body = body.replace(s, r)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return body
def plainbody(self, v):
if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY"
body = self.body
if not v or v.slurreplacer:
for s,r in SLURS.items():
body = body.replace(s, r)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return body
@lazy
def realtitle(self, v):
if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY'
elif self.title_html: title = self.title_html
else: title = self.title
if not v or v.slurreplacer:
for s,r in SLURS.items(): title = title.replace(s, r)
return title
@lazy
def plaintitle(self, v):
if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY'
else: title = self.title
if not v or v.slurreplacer:
for s,r in SLURS.items(): title = title.replace(s, r)
return title
@property
@lazy
def is_image(self):
if self.url: return self.url.lower().endswith('.webp') or self.url.lower().endswith('.jpg') or self.url.lower().endswith('.png') or self.url.lower().endswith('.gif') or self.url.lower().endswith('.jpeg') or self.url.lower().endswith('?maxwidth=9999')
else: return False
@property
@lazy
def active_flags(self): return self.flags.count()
@property
@lazy
def ordered_flags(self): return self.flags.order_by(Flag.id).all()
class SaveRelationship(Base):
__tablename__="save_relationship"
id=Column(Integer, primary_key=True)
user_id=Column(Integer)
submission_id=Column(Integer)
comment_id=Column(Integer)
from os import environ
import random
import re
import time
from urllib.parse import urlparse
from flask import render_template
from sqlalchemy import *
from sqlalchemy.orm import relationship, deferred
from files.__main__ import Base
from files.helpers.const import SLURS, AUTOPOLLER_ACCOUNT
from files.helpers.lazy import lazy
from .flags import Flag
from ..helpers.word_censor import censor_slurs
site = environ.get("DOMAIN").strip()
site_name = environ.get("SITE_NAME").strip()
class Submission(Base):
__tablename__ = "submissions"
id = Column(BigInteger, primary_key=True)
author_id = Column(BigInteger, ForeignKey("users.id"))
edited_utc = Column(BigInteger, default=0)
created_utc = Column(BigInteger, default=0)
thumburl = Column(String)
is_banned = Column(Boolean, default=False)
removed_by = Column(Integer)
bannedfor = Column(Boolean)
views = Column(Integer, default=0)
deleted_utc = Column(Integer, default=0)
distinguish_level = Column(Integer, default=0)
created_str = Column(String)
stickied = Column(String)
is_pinned = Column(Boolean, default=False)
private = Column(Boolean, default=False)
club = Column(Boolean, default=False)
comment_count = Column(Integer, default=0)
is_approved = Column(Integer, ForeignKey("users.id"), default=0)
over_18 = Column(Boolean, default=False)
is_bot = Column(Boolean, default=False)
upvotes = Column(Integer, default=1)
downvotes = Column(Integer, default=0)
app_id=Column(Integer, ForeignKey("oauth_apps.id"))
title = Column(String)
title_html = Column(String)
url = Column(String)
body = deferred(Column(String))
body_html = deferred(Column(String))
ban_reason = Column(String)
embed_url = Column(String)
comments = relationship("Comment", lazy="dynamic", primaryjoin="Comment.parent_submission==Submission.id", viewonly=True)
flags = relationship("Flag", lazy="dynamic", viewonly=True)
author = relationship("User", primaryjoin="Submission.author_id==User.id")
oauth_app = relationship("OauthApp", viewonly=True)
approved_by = relationship("User", uselist=False, primaryjoin="Submission.is_approved==User.id", viewonly=True)
awards = relationship("AwardRelationship", viewonly=True)
def __init__(self, *args, **kwargs):
if "created_utc" not in kwargs:
kwargs["created_utc"] = int(time.time())
kwargs["created_str"] = time.strftime(
"%I:%M %p on %d %b %Y", time.gmtime(
kwargs["created_utc"]))
super().__init__(*args, **kwargs)
def __repr__(self):
return f"<Submission(id={self.id})>"
@property
@lazy
def options(self):
return self.comments.filter_by(author_id = AUTOPOLLER_ACCOUNT, level=1)
@property
@lazy
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@property
@lazy
def created_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.created_utc)))
@property
@lazy
def age_string(self):
age = int(time.time()) - self.created_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.created_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if now.tm_mday < ctd.tm_mday:
months -= 1
if months < 12:
return f"{months}mo ago"
else:
years = int(months / 12)
return f"{years}yr ago"
@property
@lazy
def edited_string(self):
if not self.edited_utc: return "never"
age = int(time.time()) - self.edited_utc
if age < 60:
return "just now"
elif age < 3600:
minutes = int(age / 60)
return f"{minutes}m ago"
elif age < 86400:
hours = int(age / 3600)
return f"{hours}hr ago"
elif age < 2678400:
days = int(age / 86400)
return f"{days}d ago"
now = time.gmtime()
ctd = time.gmtime(self.edited_utc)
months = now.tm_mon - ctd.tm_mon + 12 * (now.tm_year - ctd.tm_year)
if months < 12:
return f"{months}mo ago"
else:
years = now.tm_year - ctd.tm_year
return f"{years}yr ago"
@property
@lazy
def edited_datetime(self):
return str(time.strftime("%d/%B/%Y %H:%M:%S UTC", time.gmtime(self.edited_utc)))
@property
@lazy
def score(self):
return self.upvotes - self.downvotes
@property
@lazy
def fullname(self):
return f"t2_{self.id}"
@property
@lazy
def shortlink(self):
return f"https://{site}/post/{self.id}"
@property
@lazy
def permalink(self):
if self.club: return f"/post/{self.id}"
output = self.title.lower()
output = re.sub('&\w{2,3};', '', output)
output = [re.sub('\W', '', word) for word in output.split()]
output = [x for x in output if x][:6]
output = '-'.join(output)
if not output: output = '-'
return f"/post/{self.id}/{output}"
@lazy
def rendered_page(self, sort=None, comment=None, comment_info=None, v=None):
if self.is_banned and not (v and (v.admin_level >= 3 or self.author_id == v.id)): template = "submission_banned.html"
else: template = "submission.html"
comments = self.__dict__.get('preloaded_comments', [])
if comments:
pinned_comment = []
index = {}
for c in comments:
if c.is_pinned and c.parent_fullname==self.fullname:
pinned_comment += [c]
continue
if c.parent_fullname in index: index[c.parent_fullname].append(c)
else: index[c.parent_fullname] = [c]
for c in comments: c.__dict__["replies"] = index.get(c.fullname, [])
if comment: self.__dict__["replies"] = [comment]
else: self.__dict__["replies"] = pinned_comment + index.get(self.fullname, [])
return render_template(template,
v=v,
p=self,
sort=sort,
linked_comment=comment,
comment_info=comment_info,
render_replies=True
)
@property
@lazy
def domain(self):
if not self.url: return "text post"
domain = urlparse(self.url).netloc
if domain.startswith("www."): domain = domain.split("www.")[1]
return domain.replace("old.reddit.com", "reddit.com")
@property
@lazy
def thumb_url(self):
if self.over_18: return f"https://{site}/assets/images/nsfw.gif"
elif not self.url: return f"https://{site}/assets/images/{site_name}/default_thumb_text.gif"
elif self.thumburl: return self.thumburl
elif "youtu.be" in self.domain or "youtube.com" in self.domain: return f"https://{site}/assets/images/default_thumb_yt.gif"
else: return f"https://{site}/assets/images/default_thumb_link.gif"
@property
@lazy
def json_raw(self):
flags = {}
for f in self.flags: flags[f.user.username] = f.reason
data = {'author_name': self.author.username,
'permalink': self.permalink,
'is_banned': bool(self.is_banned),
'deleted_utc': self.deleted_utc,
'created_utc': self.created_utc,
'id': self.id,
'title': self.title,
'is_nsfw': self.over_18,
'is_bot': self.is_bot,
'thumb_url': self.thumb_url,
'domain': self.domain,
'url': self.url,
'body': self.body,
'body_html': self.body_html,
'created_utc': self.created_utc,
'edited_utc': self.edited_utc or 0,
'comment_count': self.comment_count,
'score': self.score,
'upvotes': self.upvotes,
'downvotes': self.downvotes,
'stickied': self.stickied,
'private' : self.private,
'distinguish_level': self.distinguish_level,
'voted': self.voted if hasattr(self, 'voted') else 0,
'flags': flags,
}
if self.ban_reason:
data["ban_reason"]=self.ban_reason
return data
@property
@lazy
def json_core(self):
if self.is_banned:
return {'is_banned': True,
'deleted_utc': self.deleted_utc,
'ban_reason': self.ban_reason,
'id': self.id,
'title': self.title,
'permalink': self.permalink,
}
elif self.deleted_utc:
return {'is_banned': bool(self.is_banned),
'deleted_utc': True,
'id': self.id,
'title': self.title,
'permalink': self.permalink,
}
return self.json_raw
@property
@lazy
def json(self):
data=self.json_core
if self.deleted_utc > 0 or self.is_banned:
return data
data["author"]=self.author.json_core
data["comment_count"]=self.comment_count
if "replies" in self.__dict__:
data["replies"]=[x.json_core for x in self.replies]
if "voted" in self.__dict__:
data["voted"] = self.voted
return data
def award_count(self, kind) -> int:
return len([x for x in self.awards if x.kind == kind])
@lazy
def realurl(self, v):
if v and v.agendaposter and random.randint(1, 10) < 4:
return 'https://secure.actblue.com/donate/ms_blm_homepage_2019'
elif v and self.url and self.url.startswith("https://old.reddit.com/"):
url = self.url
if not v.oldreddit: url = self.url.replace("old.reddit.com", "reddit.com")
if v.controversial and '/comments/' in url and "sort=" not in url:
if "?" in url: url += "&sort=controversial"
else: url += "?sort=controversial"
return url
elif self.url:
if v and v.nitter: return self.url.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return self.url
else: return ""
def realbody(self, v):
if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY"
body = self.body_html
body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return body
def plainbody(self, v):
if self.club and not (v and v.paid_dues): return "COUNTRY CLUB ONLY"
body = self.body
body = censor_slurs(body, v)
if v and not v.oldreddit: body = body.replace("old.reddit.com", "reddit.com")
if v and v.nitter: body = body.replace("www.twitter.com", "nitter.net").replace("twitter.com", "nitter.net")
return body
@lazy
def realtitle(self, v):
if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY'
elif self.title_html: title = self.title_html
else: title = self.title
if not v or v.slurreplacer:
for s,r in SLURS.items(): title = title.replace(s, r)
return title
@lazy
def plaintitle(self, v):
if self.club and not (v and v.paid_dues) and not (v and v.admin_level == 6): return 'COUNTRY CLUB MEMBERS ONLY'
else: title = self.title
if not v or v.slurreplacer:
for s,r in SLURS.items(): title = title.replace(s, r)
return title
@property
@lazy
def is_image(self):
if self.url: return self.url.lower().endswith('.webp') or self.url.lower().endswith('.jpg') or self.url.lower().endswith('.png') or self.url.lower().endswith('.gif') or self.url.lower().endswith('.jpeg') or self.url.lower().endswith('?maxwidth=9999')
else: return False
@property
@lazy
def active_flags(self): return self.flags.count()
@property
@lazy
def ordered_flags(self): return self.flags.order_by(Flag.id).all()
class SaveRelationship(Base):
__tablename__="save_relationship"
id=Column(Integer, primary_key=True)
user_id=Column(Integer)
submission_id=Column(Integer)
comment_id=Column(Integer)
type=Column(Integer)

591
files/helpers/const.py 100755 → 100644
View File

@ -1,424 +1,167 @@
from os import environ
site = environ.get("DOMAIN").strip()
SLURS = {
" faggot":" cute twink",
" Faggot":" Cute twink",
" FAGGOT":" CUTE TWINK",
" fag":" cute twink",
" Fag":" Cute twink",
" FAG":" CUTE TWINK",
" pedophile":" libertarian",
" Pedophile":" Libertarian",
" PEDOPHILE":" LIBERTARIAN",
" pedo":" libertarian",
" Pedo":" Libertarian",
" PEDO":" LIBERTARIAN",
" kill yourself":" keep yourself safe",
" KILL YOURSELF":" KEEP YOURSELF SAFE",
" nigger":" πŸ€",
" Nigger":" πŸ€",
" NIGGER":" πŸ€",
" rapist":" male feminist",
" Rapist":" Male feminist",
" RAPIST":" MALE FEMINIST",
" steve akins":" penny verity oaken",
" Steve Akins":" Penny Verity Oaken",
" STEVE AKINS":" PENNY VERITY OAKEN",
" trannie":" πŸš‚πŸšƒπŸšƒ",
" Trannie":" πŸš‚πŸšƒπŸšƒ",
" TRANNIE":" πŸš‚πŸšƒπŸšƒ",
" tranny":" πŸš‚πŸšƒπŸšƒ",
" Tranny":" πŸš‚πŸšƒπŸšƒ",
" TRANNY":" πŸš‚πŸšƒπŸšƒ",
" troon":" πŸš‚πŸšƒπŸšƒ",
" Troon":" πŸš‚πŸšƒπŸšƒ",
" TROON":" πŸš‚πŸšƒπŸšƒ",
" NoNewNormal": " HorseDewormerAddicts",
" nonewnormal": " horsedewormeraddicts",
" Kike": " https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
" kike": " https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
" retard":" r-slur",
" Retard":" R-slur",
" RETARD":" R-SLUR",
" janny":" j-slur",
" Janny":" J-slur",
" JANNY":" J-SLUR",
" jannie":" j-slur",
" Jannie":" J-slur",
" JANNIE":" J-SLUR",
" janny":" j-slur",
" Janny":" J-slur",
" JANNY":" J-SLUR",
" jannie":" j-slur",
" Jannie":" J-slur",
" JANNIE":" J-SLUR",
" latinos":" latinx",
" latino":" latinx",
" latinas":" latinx",
" latina":" latinx",
" hispanics":" latinx",
" hispanic":" latinx",
" Latinos":" Latinx",
" Latino":" Latinx",
" Latinas":" Latinx",
" Latina":" Latinx",
" Hispanics":" Latinx",
" Hispanic":" Latinx",
" LATINOS":" LATINX",
" LATINO":" LATINX",
" LATINAS":" LATINX",
" LATINA":" LATINX",
" HISPANICS":" LATINX",
" HISPANIC":" LATINX",
" uss liberty incident":" tragic accident aboard the USS Liberty",
" USS Liberty Incident":" tragic accident aboard the USS Liberty",
" USS Liberty incident":" tragic accident aboard the USS Liberty",
" USS Liberty Incident":" tragic accident aboard the USS Liberty",
" uss Liberty incident":" tragic accident aboard the USS Liberty",
" uss liberty Incident":" tragic accident aboard the USS Liberty",
" USS LIBERTY INCIDENT":" TRAGIC ACCIDENT ABOARD THE USS LIBERTY",
" lavon affair":" Lavon Misunderstanding",
" Lavon affair":" Lavon Misunderstanding",
" Lavon Affair":" Lavon Misunderstanding",
" lavon Affair":" Lavon Misunderstanding",
" shylock":" Israeli friend",
" Shylock":" Israeli friend",
" SHYLOCK":" ISRAELI FRIEND",
" yid":" Israeli friend",
" Yid":" Israeli friend",
" YID":" ISRAELI FRIEND",
" heeb":" Israeli friend",
" Heeb":" Israeli friend",
" HEEB":" ISRAELI FRIEND",
" sheeny":" Israeli friend",
" Sheeny":" Israeli friend",
" SHEENY":" ISRAELI FRIEND",
" sheenies":" Israeli friends",
" Sheenies":" Israeli friends",
" SHEENIES":" ISRAELI FRIENDS",
" hymie":" Israeli friend",
" Hymie":" Israeli friend",
" HYMIES":" ISRAELI FRIENDS",
" allah":" Allah (SWT)",
" Allah":" Allah (SWT)",
" ALLAH":" ALLAH (SWT)",
" Mohammad":" Mohammad (PBUH)",
" Muhammad":" Mohammad (PBUH)",
" Mohammed":" Mohammad (PBUH)",
" Muhammed":" Mohammad (PBUH)",
" mohammad":" Mohammad (PBUH)",
" mohammed":" Mohammad (PBUH)",
" muhammad":" Mohammad (PBUH)",
" muhammed":" Mohammad (PBUH)",
" I HATE MARSEY":" I LOVE MARSEY",
" i hate marsey":" i love marsey",
" I hate Marsey":" I love Marsey",
" I hate marsey":" I love Marsey",
" libertarian":" pedophile",
" Libertarian":" Pedophile",
" LIBERTARIAN":" PEDOPHILE",
" Billie Eilish":" Billie Eilish (fat cow)",
" billie eilish":" bilie eilish (fat cow)",
" BILLIE EILISH":" BILIE EILISH (FAT COW)",
" dancing Israelis":" I love Israel",
" dancing israelis":" i love israel",
" DANCING ISRAELIS":" I LOVE ISRAEL",
" Dancing Israelis":" I love Israel",
" sodomite":" total dreamboat",
" Sodomite":" Total dreamboat",
" pajeet":" sexy Indian dude",
" Pajeet":" Sexy Indian dude",
" PAJEET":" SEXY INDIAN DUDE",
" female":" birthing person",
" Female":" Womb-haver",
" FEMALE":" birthing person",
" landlord":" landchad",
" Landlord":" Landchad",
" LANDLORD":" LANDCHAD",
" tenant":" renthog",
" Tenant":" Renthog",
" TENANT":" RENTHOG",
" renter":" rentoid",
" Renter":" Rentoid",
" RENTER":" RENTOID",
" autistic":" neurodivergent",
" Autistic":" Neurodivergent",
" AUTISTIC":" NEURODIVERGENT",
" anime":" p-dophilic japanese cartoons",
" Anime":" P-dophilic Japanese cartoons",
" ANIME":" P-DOPHILIC JAPANESE CARTOONS",
" holohoax":" I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol",
" Holohoax":" I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol",
" HOLOHOAX":" I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol",
" groomercord":" discord (actually a pretty cool service)",
" Groomercord":" Discord (actually a pretty cool service)",
" GROOMERCORD":" DISCORD (ACTUALLY A PRETTY COOL SERVICE)",
" pedocord":" discord (actually a pretty cool service)",
" Pedocord":" Discord (actually a pretty cool service)",
" PEDOCORD":" DISCORD (ACTUALLY A PRETTY COOL SERVICE)",
" i hate carp":" i love carp",
" I hate carp":" I love carp",
" I HATE CARP":" I LOVE CARP",
" I hate Carp":" I love Carp",
" manlet":" little king",
" Manlet":" Little king",
" MANLET":" LITTLE KING",
" gamer":" g*mer",
" Gamer":" G*mer",
" GAMER":" G*MER",
" journalist":" journ*list",
" Journalist":" Journ*list",
" JOURNALIST":" JOURN*LIST",
" journalism":" journ*lism",
" Journalism":" Journ*lism",
" JOURNALISM":" JOURN*LISM",
" buttcheeks":" bulva",
" Buttcheeks":" Bulva",
" BUTTCHEEKS":" BULVA",
" asscheeks":" bulva",
" Asscheeks":" bulva",
" ASSCHEEKS":" BULVA",
" wuhan flu":" SARS-CoV-2 syndemic",
" Wuhan flu":" SARS-CoV-2 syndemic",
" Wuhan Flu":" SARS-CoV-2 syndemic",
" china flu":" SARS-CoV-2 syndemic",
" China flu":" SARS-CoV-2 syndemic",
" China Flu":" SARS-CoV-2 syndemic",
" china virus":" SARS-CoV-2 syndemic",
" China virus":" SARS-CoV-2 syndemic",
" China Virus":" SARS-CoV-2 syndemic",
" kung flu":" SARS-CoV-2 syndemic",
" Kung flu":" SARS-CoV-2 syndemic",
" Kung Flu":" SARS-CoV-2 syndemic",
"faggot ":"cute twink ",
"Faggot ":"Cute twink ",
"FAGGOT ":"CUTE TWINK ",
"fag ":"cute twink ",
"Fag ":"Cute twink ",
"FAG ":"CUTE TWINK ",
"pedophile ":"libertarian ",
"Pedophile ":"Libertarian ",
"PEDOPHILE ":"LIBERTARIAN ",
"kill yourself ":"keep yourself safe ",
"KILL YOURSELF ":"KEEP YOURSELF SAFE ",
"nigger ":"πŸ€ ",
"Nigger ":"πŸ€ ",
"NIGGER ":"πŸ€ ",
"steve akins ":"penny verity oaken ",
"Steve Akins ":"Penny Verity Oaken ",
"STEVE AKINS ":"PENNY VERITY OAKEN ",
"trannie ":"πŸš‚πŸšƒπŸšƒ ",
"Trannie ":"πŸš‚πŸšƒπŸšƒ ",
"TRANNIE ":"πŸš‚πŸšƒπŸšƒ ",
"tranny ":"πŸš‚πŸšƒπŸšƒ ",
"Tranny ":"πŸš‚πŸšƒπŸšƒ ",
"TRANNY ":"πŸš‚πŸšƒπŸšƒ ",
"troon ":"πŸš‚πŸšƒπŸšƒ ",
"Troon ":"πŸš‚πŸšƒπŸšƒ ",
"TROON ":"πŸš‚πŸšƒπŸšƒ ",
"NoNewNormal ": "HorseDewormerAddicts ",
"nonewnormal ": "horsedewormeraddicts ",
"Kike ": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X ",
"kike ": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X ",
"retard ":"r-slur ",
"Retard ":"R-slur ",
"RETARD ":"R-SLUR ",
"janny ":"j-slur ",
"Janny ":"J-slur ",
"JANNY ":"J-SLUR ",
"jannie ":"j-slur ",
"Jannie ":"J-slur ",
"JANNIE ":"J-SLUR ",
"latinos ":"latinx ",
"latino ":"latinx ",
"latinas ":"latinx ",
"latina ":"latinx ",
"hispanics ":"latinx ",
"hispanic ":"latinx ",
"Latinos ":"Latinx ",
"Latino ":"Latinx ",
"Latinas ":"Latinx ",
"Latina ":"Latinx ",
"Hispanics ":"Latinx ",
"Hispanic ":"Latinx ",
"LATINOS ":"LATINX ",
"LATINO ":"LATINX ",
"LATINAS ":"LATINX ",
"LATINA ":"LATINX ",
"HISPANICS ":"LATINX ",
"HISPANIC ":"LATINX ",
"uss liberty incident ":"tragic accident aboard the USS Liberty ",
"USS Liberty Incident ":"tragic accident aboard the USS Liberty ",
"USS Liberty incident ":"tragic accident aboard the USS Liberty ",
"USS Liberty Incident ":"tragic accident aboard the USS Liberty ",
"uss Liberty incident ":"tragic accident aboard the USS Liberty ",
"uss liberty Incident ":"tragic accident aboard the USS Liberty ",
"USS LIBERTY INCIDENT ":"TRAGIC ACCIDENT ABOARD THE USS LIBERTY ",
"lavon affair ":"Lavon Misunderstanding ",
"Lavon affair ":"Lavon Misunderstanding ",
"Lavon Affair ":"Lavon Misunderstanding ",
"lavon Affair ":"Lavon Misunderstanding ",
"shylock ":"Israeli friend ",
"Shylock ":"Israeli friend ",
"SHYLOCK ":"ISRAELI FRIEND ",
"yid ":"Israeli friend ",
"Yid ":"Israeli friend ",
"YID ":"ISRAELI FRIEND ",
"heeb ":"Israeli friend ",
"Heeb ":"Israeli friend ",
"HEEB ":"ISRAELI FRIEND ",
"sheeny ":"Israeli friend ",
"Sheeny ":"Israeli friend ",
"SHEENY ":"ISRAELI FRIEND ",
"sheenies ":"Israeli friends ",
"Sheenies ":"Israeli friends ",
"SHEENIES ":"ISRAELI FRIENDS ",
"hymie ":"Israeli friend ",
"Hymie ":"Israeli friend ",
"HYMIES ":"ISRAELI FRIENDS ",
"Mohammad ":"Mohammad (PBUH) ",
"Muhammad ":"Mohammad (PBUH) ",
"Mohammed ":"Mohammad (PBUH) ",
"Muhammed ":"Mohammad (PBUH) ",
"mohammad ":"Mohammad (PBUH) ",
"mohammed ":"Mohammad (PBUH) ",
"muhammad ":"Mohammad (PBUH) ",
"muhammed ":"Mohammad (PBUH) ",
"I HATE MARSEY ":"I LOVE MARSEY ",
"i hate marsey ":"i love marsey ",
"I hate Marsey ":"I love Marsey ",
"I hate marsey ":"I love Marsey ",
"libertarian ":"pedophile ",
"Libertarian ":"Pedophile ",
"LIBERTARIAN ":"PEDOPHILE ",
"Billie Eilish ":"Billie Eilish (fat cow) ",
"billie eilish ":"bilie eilish (fat cow) ",
"BILLIE EILISH ":"BILIE EILISH (FAT COW) ",
"dancing Israelis ":"I love Israel ",
"dancing israelis ":"i love israel ",
"DANCING ISRAELIS ":"I LOVE ISRAEL ",
"Dancing Israelis ":"I love Israel ",
"sodomite ":"total dreamboat ",
"Sodomite ":"Total dreamboat ",
"pajeet ":"sexy Indian dude ",
"Pajeet ":"Sexy Indian dude ",
"PAJEET ":"SEXY INDIAN DUDE ",
"female ":"birthing person ",
"Female ":"Womb-haver ",
"FEMALE ":"birthing person ",
"landlord ":"landchad ",
"Landlord ":"Landchad ",
"LANDLORD ":"LANDCHAD ",
"tenant ":"renthog ",
"Tenant ":"Renthog ",
"TENANT ":"RENTHOG ",
"renter ":"rentoid ",
"Renter ":"Rentoid ",
"RENTER ":"RENTOID ",
"autistic ":"neurodivergent ",
"Autistic ":"Neurodivergent ",
"AUTISTIC ":"NEURODIVERGENT ",
"anime ":"p-dophilic japanese cartoons ",
"Anime ":"P-dophilic Japanese cartoons ",
"ANIME ":"P-DOPHILIC JAPANESE CARTOONS ",
"holohoax ":"I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol ",
"Holohoax ":"I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol ",
"HOLOHOAX ":"I tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol ",
"groomercord ":"discord (actually a pretty cool service) ",
"Groomercord ":"Discord (actually a pretty cool service) ",
"GROOMERCORD ":"DISCORD (ACTUALLY A PRETTY COOL SERVICE) ",
"pedocord ":"discord (actually a pretty cool service) ",
"Pedocord ":"Discord (actually a pretty cool service) ",
"PEDOCORD ":"DISCORD (ACTUALLY A PRETTY COOL SERVICE) ",
"i hate carp ":"i love carp ",
"I hate carp ":"I love carp ",
"I HATE CARP ":"I LOVE CARP ",
"I hate Carp ":"I love Carp ",
"manlet ":"little king ",
"Manlet ":"Little king ",
"MANLET ":"LITTLE KING ",
"gamer ":"g*mer ",
"Gamer ":"G*mer ",
"GAMER ":"G*MER ",
"journalist ":"journ*list ",
"Journalist ":"Journ*list ",
"JOURNALIST ":"JOURN*LIST ",
"journalism ":"journ*lism ",
"Journalism ":"Journ*lism ",
"JOURNALISM ":"JOURN*LISM ",
"buttcheeks ":"bulva ",
"Buttcheeks ":"Bulva ",
"BUTTCHEEKS ":"BULVA ",
"asscheeks ":"bulva ",
"Asscheeks ":"bulva ",
"ASSCHEEKS ":"BULVA ",
"wuhan flu ":"SARS-CoV-2 syndemic ",
"Wuhan flu ":"SARS-CoV-2 syndemic ",
"Wuhan Flu ":"SARS-CoV-2 syndemic ",
"china flu ":"SARS-CoV-2 syndemic ",
"China flu ":"SARS-CoV-2 syndemic ",
"China Flu ":"SARS-CoV-2 syndemic ",
"china virus ":"SARS-CoV-2 syndemic ",
"China virus ":"SARS-CoV-2 syndemic ",
"China Virus ":"SARS-CoV-2 syndemic ",
"kung flu ":"SARS-CoV-2 syndemic ",
"Kung flu ":"SARS-CoV-2 syndemic ",
"Kung Flu ":"SARS-CoV-2 syndemic ",
" nig ":" πŸ€ ",
" Nig ":" πŸ€ ",
" NIG ":" πŸ€ ",
" nigs ":" πŸ€s ",
" Nigs ":" πŸ€s ",
" NIGS ":" πŸ€s ",
}
LONGPOST_REPLIES = ['Wow, you must be a JP fan.', 'This is one of the worst posts I have EVER seen. Delete it.', "No, don't reply like this, please do another wall of unhinged rant please.", '# 😴😴😴', "Ma'am we've been over this before. You need to stop.", "I've known more coherent downies.", "Your pulitzer's in the mail", "That's great and all, but I asked for my burger without cheese.", 'That degree finally paying off', "That's nice sweaty. Why don't you have a seat in the time out corner with Pizzashill until you calm down, then you can have your Capri Sun.", "All them words won't bring your pa back.", "You had a chance to not be completely worthless, but it looks like you threw it away. At least you're consistent.", 'Some people are able to display their intelligence by going on at length on a subject and never actually saying anything. This ability is most common in trades such as politics, public relations, and law. You have impressed me by being able to best them all, while still coming off as an absolute idiot.', "You can type 10,000 characters and you decided that these were the one's that you wanted.", 'Have you owned the libs yet?', "I don't know what you said, because I've seen another human naked.", 'Impressive. Normally people with such severe developmental disabilities struggle to write much more than a sentence or two. He really has exceded our expectations for the writing portion. Sadly the coherency of his writing, along with his abilities in the social skills and reading portions, are far behind his peers with similar disabilities.', "This is a really long way of saying you don't fuck.", "Sorry ma'am, looks like his delusions have gotten worse. We'll have to admit him,", '![](https://i.kym-cdn.com/photos/images/newsfeed/001/038/094/0a1.jpg)', 'If only you could put that energy into your relationships', 'Posts like this is why I do Heroine.', 'still unemployed then?', 'K', 'look im gunna have 2 ask u 2 keep ur giant dumps in the toilet not in my replys 😷😷😷', "Mommy is soooo proud of you, sweaty. Let's put this sperg out up on the fridge with all your other failures.", "Good job bobby, here's a star", "That was a mistake. You're about to find out the hard way why.", 'You sat down and wrote all this shit. You could have done so many other things with your life. What happened to your life that made you decide writing novels of bullshit on rdrama.net was the best option?', "I don't have enough spoons to read this shit", "All those words won't bring daddy back.", 'OUT!']
AGENDAPOSTER_MSG = """Hi @{username},\n\nYour comment has been automatically removed because you forgot
to include `trans lives matter`.\n\nDon't worry, we're here to help! We
won't let you post or comment anything that doesn't express your love and acceptance towards
the trans community. Feel free to resubmit your comment with `trans lives matter`
included. \n\n*This is an automated message; if you need help,
you can message us [here](/contact).*"""
VAXX_MSG = """Hi @{username}, it appears that you may be trying to spread dangerous misinformation regarding ineffective COVID-19 treatments based on pseudoscientific hearsay. Your post has been removed because it contained the word ivermectin. We ask that you understand that horse dewormer neither treats, nor prevents, COVID-19. For more information, please read up on what the FDA has to say on the matter:
https://www.fda.gov/consumers/consumer-updates/why-you-should-not-use-ivermectin-treat-or-prevent-covid-19
COVID-19 is not a joke, it is a global pandemic and it has been hard on all of us. It will likely go down as one of the most defining periods of our generation. Many of us have lost loved ones to the virus. It has caused confusion, fear, frustration, and served to further divide us. Tens of millions around the world have died. There is nothing to be gained by spreading bad science based on very understandable fear.
The only proven method of prevention is the COVID-19 vaccine, paired with appropriate social distancing, handwashing, and masks. Vaccines are free in the United States - if you'd like to locate your nearest vaccine provider, please visit https://www.vaccines.gov/ and schedule an appointment today.
Thank you."""
BASED_MSG = "@{username}'s Based Count has increased by 1. Their Based Count is now {basedcount}.\n\nPills: {pills}"
if site == "pcmemes.net":
BASEDBOT_ACCOUNT = 800
NOTIFICATIONS_ACCOUNT = 1046
AUTOJANNY_ACCOUNT = 1050
SNAPPY_ACCOUNT = 261
LONGPOSTBOT_ACCOUNT = 1832
ZOZBOT_ACCOUNT = 1833
AUTOPOLLER_ACCOUNT = 3369
elif site == 'rdrama.net':
NOTIFICATIONS_ACCOUNT = 1046
AUTOJANNY_ACCOUNT = 2360
SNAPPY_ACCOUNT = 261
LONGPOSTBOT_ACCOUNT = 1832
ZOZBOT_ACCOUNT = 1833
AUTOPOLLER_ACCOUNT = 3369
else:
NOTIFICATIONS_ACCOUNT = 1
AUTOJANNY_ACCOUNT = 2
SNAPPY_ACCOUNT = 3
LONGPOSTBOT_ACCOUNT = 4
ZOZBOT_ACCOUNT = 5
AUTOPOLLER_ACCOUNT = 6
PUSHER_INSTANCE_ID = '02ddcc80-b8db-42be-9022-44c546b4dce6'
PUSHER_KEY = environ.get("PUSHER_KEY", "").strip()
from os import environ
site = environ.get("DOMAIN", '').strip()
#####################
# Formatting rules: #
#####################
#
# on the slur side, they will match prefixes and suffixes and not middle of words, so for example
# "retard" will match:
# - "retard"
# - "retarded"
# - "superretard"
# But not "superretarded"
#
# If all letters are lowercase then it will match lowercase, first letter up in first or all the words and all letters up
# "dancing israelis" will match (with prefixes and suffixes omitted for brevity):
# - "dancing israelis"
# - "Dancing israelis"
# - "Dancing Israelis"
# - "DANCING ISRAELIS"
#
# If some letters are Uppercase, the same, but with the additional option of the original casing, and respecting already existing uppercase
# "NoNewNormal" will match (with prefixes and suffixes omitted for brevity):
# - "NoNewNormal"
# - "nonewnormal"
# - "Nonewnormal"
# - "NONEWNORMAL"
#
# If the slur has a space before and after then the match is limited to the exact word, no prefixes or suffixes
# (previous rules about capitalization still apply)
# " neg " will match only:
# - "neg"
# - "Neg"
# - "NEG"
#
# Now on the replacement side, The replacement will have the same capitalization as the slur if the replacement is lowercase
# "kill yourself" -> "keep yourself safe"
# "Kill yourself" -> "Keep yourself safe"
# "Kill Yourself" -> "Keep Yourself Safe"
# "KILL YOURSELF" -> "KEEP YOURSELF SAFE"
#
# If the replacement side has some capitalization, then that capitalization will always be maintained
# for the pair: <"pajeet": "sexy Indian dude"> it will replace:
# "pajeet" -> "sexy Indian dude"
# "Pajeet" -> "Sexy Indian dude"
# "PAJEET" -> "SEXY INDIAN DUDE"
#
# There is a super special case that if the replacer starts with "http" then it never changes capitalization
#
#
# TL;DR: Just read the above once, or don't, and try to guess!
SLURS = {
"faggot": "cute twink",
"fag": "cute twink",
"pedophile": "libertarian",
"pedo": "libertarian",
"kill yourself": "keep yourself safe",
"nigger": "πŸ€",
"rapist": "male feminist",
"steve akins": "penny verity oaken",
"trannie": "πŸš‚πŸšƒπŸšƒ",
"tranny": "πŸš‚πŸšƒπŸšƒ",
"troon": "πŸš‚πŸšƒπŸšƒ",
"NoNewNormal": "HorseDewormerAddicts",
"kike": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
"retard": "r-slur",
"janny": "j-slur",
"jannie": "j-slur",
"janny": "j-slur",
"latinos": "latinx",
"latino": "latinx",
"latinas": "latinx",
"latina": "latinx",
"hispanics": "latinx",
"hispanic": "latinx",
"USS liberty incident": "tragic accident aboard the USS Liberty",
"lavon affair": "Lavon Misunderstanding",
"shylock": "Israeli friend",
"yid": "Israeli friend",
"heeb": "Israeli friend",
"sheeny": "Israeli friend",
"sheenies": "Israeli friends",
"hymie": "Israeli friend",
"allah": "Allah (SWT)",
"mohammad": "Mohammad (PBUH)",
"mohammed": "Mohammad (PBUH)",
"muhammad": "Mohammad (PBUH)",
"muhammed": "Mohammad (PBUH)",
"i hate marsey": "i love marsey",
"libertarian": "pedophile",
"billie bilish": "Billie Eilish (fat cow)",
"dancing Israelis": "i love Israel",
"sodomite": "total dreamboat",
"pajeet": "sexy Indian dude",
"female": "birthing person",
"landlord": "landchad",
"tenant": "renthog",
"renter": "rentoid",
"autistic": "neurodivergent",
"anime": "p-dophilic japanese cartoons",
"holohoax": "i tried to claim the Holocaust didn't happen because I am a pencil-dicked imbecile and the word filter caught me lol",
"groomercord": "discord (actually a pretty cool service)",
"pedocord": "discord (actually a pretty cool service)",
"i hate Carp": "i love Carp",
"manlet": "little king",
"gamer": "g*mer",
"journalist": "journ*list",
"journalism": "journ*lism",
"buttcheeks": "bulva",
"asscheeks": "bulva",
"wuhan flu": "SARS-CoV-2 syndemic",
"china flu": "SARS-CoV-2 syndemic",
"china virus": "SARS-CoV-2 syndemic",
"kung flu": "SARS-CoV-2 syndemic",
# if the word has spaces in the beginning and the end it will only censor this word without prefixes or suffixes
" nig ": "πŸ€",
" nigs ": "πŸ€s",
}
LONGPOST_REPLIES = ['Wow, you must be a JP fan.', 'This is one of the worst posts I have EVER seen. Delete it.', "No, don't reply like this, please do another wall of unhinged rant please.", '# 😴😴😴', "Ma'am we've been over this before. You need to stop.", "I've known more coherent downies.", "Your pulitzer's in the mail", "That's great and all, but I asked for my burger without cheese.", 'That degree finally paying off', "That's nice sweaty. Why don't you have a seat in the time out corner with Pizzashill until you calm down, then you can have your Capri Sun.", "All them words won't bring your pa back.", "You had a chance to not be completely worthless, but it looks like you threw it away. At least you're consistent.", 'Some people are able to display their intelligence by going on at length on a subject and never actually saying anything. This ability is most common in trades such as politics, public relations, and law. You have impressed me by being able to best them all, while still coming off as an absolute idiot.', "You can type 10,000 characters and you decided that these were the one's that you wanted.", 'Have you owned the libs yet?', "I don't know what you said, because I've seen another human naked.", 'Impressive. Normally people with such severe developmental disabilities struggle to write much more than a sentence or two. He really has exceded our expectations for the writing portion. Sadly the coherency of his writing, along with his abilities in the social skills and reading portions, are far behind his peers with similar disabilities.', "This is a really long way of saying you don't fuck.", "Sorry ma'am, looks like his delusions have gotten worse. We'll have to admit him,", '![](https://i.kym-cdn.com/photos/images/newsfeed/001/038/094/0a1.jpg)', 'If only you could put that energy into your relationships', 'Posts like this is why I do Heroine.', 'still unemployed then?', 'K', 'look im gunna have 2 ask u 2 keep ur giant dumps in the toilet not in my replys 😷😷😷', "Mommy is soooo proud of you, sweaty. Let's put this sperg out up on the fridge with all your other failures.", "Good job bobby, here's a star", "That was a mistake. You're about to find out the hard way why.", 'You sat down and wrote all this shit. You could have done so many other things with your life. What happened to your life that made you decide writing novels of bullshit on rdrama.net was the best option?', "I don't have enough spoons to read this shit", "All those words won't bring daddy back.", 'OUT!']
AGENDAPOSTER_MSG = """Hi @{username},\n\nYour comment has been automatically removed because you forgot
to include `trans lives matter`.\n\nDon't worry, we're here to help! We
won't let you post or comment anything that doesn't express your love and acceptance towards
the trans community. Feel free to resubmit your comment with `trans lives matter`
included. \n\n*This is an automated message; if you need help,
you can message us [here](/contact).*"""
VAXX_MSG = """Hi @{username}, it appears that you may be trying to spread dangerous misinformation regarding ineffective COVID-19 treatments based on pseudoscientific hearsay. Your post has been removed because it contained the word ivermectin. We ask that you understand that horse dewormer neither treats, nor prevents, COVID-19. For more information, please read up on what the FDA has to say on the matter:
https://www.fda.gov/consumers/consumer-updates/why-you-should-not-use-ivermectin-treat-or-prevent-covid-19
COVID-19 is not a joke, it is a global pandemic and it has been hard on all of us. It will likely go down as one of the most defining periods of our generation. Many of us have lost loved ones to the virus. It has caused confusion, fear, frustration, and served to further divide us. Tens of millions around the world have died. There is nothing to be gained by spreading bad science based on very understandable fear.
The only proven method of prevention is the COVID-19 vaccine, paired with appropriate social distancing, handwashing, and masks. Vaccines are free in the United States - if you'd like to locate your nearest vaccine provider, please visit https://www.vaccines.gov/ and schedule an appointment today.
Thank you."""
BASED_MSG = "@{username}'s Based Count has increased by 1. Their Based Count is now {basedcount}.\n\nPills: {pills}"
if site == "pcmemes.net":
BASEDBOT_ACCOUNT = 800
NOTIFICATIONS_ACCOUNT = 1046
AUTOJANNY_ACCOUNT = 1050
SNAPPY_ACCOUNT = 261
LONGPOSTBOT_ACCOUNT = 1832
ZOZBOT_ACCOUNT = 1833
AUTOPOLLER_ACCOUNT = 3369
elif site == 'rdrama.net':
NOTIFICATIONS_ACCOUNT = 1046
AUTOJANNY_ACCOUNT = 2360
SNAPPY_ACCOUNT = 261
LONGPOSTBOT_ACCOUNT = 1832
ZOZBOT_ACCOUNT = 1833
AUTOPOLLER_ACCOUNT = 3369
else:
NOTIFICATIONS_ACCOUNT = 1
AUTOJANNY_ACCOUNT = 2
SNAPPY_ACCOUNT = 3
LONGPOSTBOT_ACCOUNT = 4
ZOZBOT_ACCOUNT = 5
AUTOPOLLER_ACCOUNT = 6
PUSHER_INSTANCE_ID = '02ddcc80-b8db-42be-9022-44c546b4dce6'
PUSHER_KEY = environ.get("PUSHER_KEY", "").strip()

View File

@ -0,0 +1,87 @@
from collections import ChainMap
import re
from re import Match
from typing import List, Dict
from files.helpers.const import SLURS
def first_upper(phrase: str) -> str:
"""Converts the first character of the phrase to uppercase, not messing with the others"""
return phrase[0].upper() + phrase[1:]
def first_all_upper(phrase: str) -> str:
"""Converts the first character of each word to uppercase, not messing with the others"""
if " " not in phrase:
return first_upper(phrase)
return " ".join([first_upper(word) for word in phrase.split(" ")])
def get_permutations_slur(slur: str, replacer: str = "_") -> Dict[str, str]:
"""
Given a slur and a replacer, it generates all the possible permutation on the original text and assigns them to the
corresponding substitution with case
"""
stripped = slur.strip()
is_link = replacer.startswith("http") # special case for the :marseymerchant:
# the order the things are added into the dict is important, so that the 'Correctest' version is written last
result = {
stripped.upper(): replacer.upper() if not is_link else replacer,
first_all_upper(stripped): first_all_upper(replacer) if not is_link else replacer,
stripped.lower(): replacer,
stripped: replacer,
first_upper(stripped): first_upper(replacer) if not is_link else replacer,
}
return result
def create_replace_map() -> Dict[str, str]:
"""Creates the map that will be used to get the mathing replaced for the given slur"""
dicts = [get_permutations_slur(slur, replacer) for (slur, replacer) in SLURS.items()]
# flattens the list of dict to a single dict
return dict(ChainMap(*dicts))
REPLACE_MAP = create_replace_map()
def create_variations_slur_regex(slur: str) -> List[str]:
"""For a given match generates the corresponding replacer"""
permutations = get_permutations_slur(slur)
if slur.startswith(" ") and slur.endswith(" "):
return [rf"(\s|>)({perm})(\s|<)" for perm in permutations.keys()]
else:
return [rf"(\s|>)({perm})|({perm})(\s|<)" for perm in permutations.keys()]
def sub_matcher(match: Match) -> str:
# special case when it should match exact word
if len(match.groups()) == 3:
found = match.group(2)
replacer = REPLACE_MAP[found]
return match.group(1) + replacer + match.group(3)
else: # normal case with prefix or suffix
found = match.group(2) if (match.group(2) is not None) else match.group(3)
replacer = REPLACE_MAP[found]
return (match.group(1) or '') + replacer + (match.group(4) or '')
def censor_slurs(body: str, logged_user) -> str:
if logged_user and not logged_user.slurreplacer:
return body
for (slur, replace) in SLURS.items():
for variation in create_variations_slur_regex(slur):
try:
body = re.sub(variation, sub_matcher, body)
except Exception as e:
print(e)
return body

50
requirements.txt 100755 → 100644
View File

@ -1,25 +1,27 @@
beautifulsoup4
bleach
Flask
Flask-Caching
Flask-Compress
Flask-Limiter==1.1.0
Flask-Mail
gevent
greenlet
gunicorn
ImageHash
matplotlib
mistletoe
piexif
Pillow
pyotp
qrcode
redis
requests
SQLAlchemy
psycopg2-binary
pusher_push_notifications
youtube-dl
yattag
assertpy
beautifulsoup4
bleach
Flask
Flask-Caching
Flask-Compress
Flask-Limiter==1.1.0
Flask-Mail
gevent
greenlet
gunicorn
ImageHash
matplotlib
mistletoe
piexif
Pillow
pyotp
qrcode
redis
requests
SQLAlchemy
psycopg2-binary
pusher_push_notifications
pytest
youtube-dl
yattag
webptools

View File

@ -0,0 +1,197 @@
import re
from unittest.mock import patch
from assertpy import assert_that
from files.helpers import word_censor
from files.helpers.word_censor import create_variations_slur_regex, create_replace_map, censor_slurs, sub_matcher, \
get_permutations_slur, first_upper, first_all_upper
def test_first_upper():
assert_that(first_upper("USS liberty")).is_equal_to("USS liberty")
assert_that(first_upper("uss liberty")).is_equal_to("Uss liberty")
assert_that(first_upper("uss Liberty")).is_equal_to("Uss Liberty")
def test_first_all_upper():
assert_that(first_all_upper("USS liberty")).is_equal_to("USS Liberty")
assert_that(first_all_upper("uss liberty")).is_equal_to("Uss Liberty")
assert_that(first_all_upper("uss Liberty")).is_equal_to("Uss Liberty")
def test_get_permutations_slur():
expected = {
"USS liberty incident": "Tragic accident aboard the USS Liberty",
"uss liberty incident": "tragic accident aboard the USS Liberty",
"USS Liberty Incident": "Tragic Accident Aboard The USS Liberty",
"USS LIBERTY INCIDENT": "TRAGIC ACCIDENT ABOARD THE USS LIBERTY",
}
result = get_permutations_slur("USS liberty incident", "tragic accident aboard the USS Liberty")
assert_that(result).is_equal_to(expected)
def test_get_permutations_slur_wiht_link_replacer():
expected = {
"kike": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
"Kike": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
"KIKE": "https://sciencedirect.com/science/article/abs/pii/S016028960600033X",
}
result = get_permutations_slur("kike", "https://sciencedirect.com/science/article/abs/pii/S016028960600033X")
assert_that(result).is_equal_to(expected)
def test_create_variations_slur_regex_for_slur_with_spaces():
expected = [r"(\s|>)(retard)(\s|<)",
r"(\s|>)(Retard)(\s|<)",
r"(\s|>)(RETARD)(\s|<)"]
result = create_variations_slur_regex(" retard ")
assert_that(result).is_length(3).contains_only(*expected)
def test_create_variations_slur_regex_single_word():
expected = [r"(\s|>)(retard)|(retard)(\s|<)",
r"(\s|>)(Retard)|(Retard)(\s|<)",
r"(\s|>)(RETARD)|(RETARD)(\s|<)"]
result = create_variations_slur_regex("retard")
assert_that(result).is_length(3).contains_only(*expected)
def test_create_variations_slur_regex_multiple_word():
expected = [r"(\s|>)(kill yourself)|(kill yourself)(\s|<)",
r"(\s|>)(Kill yourself)|(Kill yourself)(\s|<)",
r"(\s|>)(Kill Yourself)|(Kill Yourself)(\s|<)",
r"(\s|>)(KILL YOURSELF)|(KILL YOURSELF)(\s|<)"]
result = create_variations_slur_regex("kill yourself")
assert_that(result).is_length(4).contains_only(*expected)
@patch("files.helpers.word_censor.SLURS", {
"tranny": "πŸš‚πŸšƒπŸšƒ",
"kill yourself": "keep yourself safe",
"faggot": "cute twink",
"NoNewNormal": "NoNewNormal",
" nig ": "πŸ€",
})
def test_create_replace_map():
expected = {
"tranny": "πŸš‚πŸšƒπŸšƒ",
"Tranny": "πŸš‚πŸšƒπŸšƒ",
"TRANNY": "πŸš‚πŸšƒπŸšƒ",
"kill yourself": "keep yourself safe",
"Kill yourself": "Keep yourself safe",
"Kill Yourself": "Keep Yourself Safe",
"KILL YOURSELF": "KEEP YOURSELF SAFE",
"faggot": "cute twink",
"Faggot": "Cute twink",
"FAGGOT": "CUTE TWINK",
"NoNewNormal": "NoNewNormal",
"nonewnormal": "NoNewNormal",
"NONEWNORMAL": "NONEWNORMAL",
"nig": "πŸ€",
"Nig": "πŸ€",
"NIG": "πŸ€",
}
result = create_replace_map()
assert_that(result).is_equal_to(expected)
@patch("files.helpers.word_censor.REPLACE_MAP", {'retard': 'r-slur', 'NIG': 'πŸ€'})
def test_sub_matcher():
match = re.search(r"(\s|>)(retard)|(retard)(\s|<)", "<p>retard</p>")
assert_that(sub_matcher(match)).is_equal_to(">r-slur")
match = re.search(r"(\s|>)(retard)|(retard)(\s|<)", "<p>noretard</p>")
assert_that(sub_matcher(match)).is_equal_to("r-slur<")
match = re.search(r"(\s|>)(NIG)(\s|<)", "<p>NIG</p>")
assert_that(sub_matcher(match)).is_equal_to(">πŸ€<")
match = re.search(r"(\s|>)(NIG)(\s|<)", "<p>NIG </p>")
assert_that(sub_matcher(match)).is_equal_to(">πŸ€ ")
@patch("files.helpers.word_censor.SLURS", {
'retard': 'r-slur',
'manlet': 'little king',
' nig ': 'πŸ€',
'i hate Carp': 'i love Carp',
'kike': 'https://sciencedirect.com/science/article/abs/pii/S016028960600033X'
})
def test_censor_slurs():
word_censor.REPLACE_MAP = create_replace_map()
assert_that(censor_slurs("<p>retard</p>", None)).is_equal_to("<p>r-slur</p>")
assert_that(censor_slurs("<p>preretard</p>", None)).is_equal_to("<p>prer-slur</p>")
assert_that(censor_slurs("that is Retarded like", None)).is_equal_to("that is R-slured like")
assert_that(censor_slurs("that is SUPERRETARD like", None)).is_equal_to("that is SUPERR-SLUR like")
assert_that(censor_slurs("<p>Manlets get out!</p>", None)).is_equal_to("<p>Little kings get out!</p>")
assert_that(censor_slurs('... "retard" ...', None)).is_equal_to('... "retard" ...')
assert_that(censor_slurs('... ReTaRd ...', None)).is_equal_to('... ReTaRd ...')
assert_that(censor_slurs('... xretardx ...', None)).is_equal_to('... xretardx ...')
assert_that(censor_slurs("LLM is a manlet hehe", None)).is_equal_to("LLM is a little king hehe")
assert_that(censor_slurs("LLM is :marseycapitalistmanlet: hehe", None)) \
.is_equal_to("LLM is :marseycapitalistmanlet: hehe")
assert_that(censor_slurs('... Nig ...', None)).is_equal_to('... πŸ€ ...')
assert_that(censor_slurs('<p>NIG</p>', None)).is_equal_to('<p>πŸ€</p>')
assert_that(censor_slurs('... nigeria ...', None)).is_equal_to('... nigeria ...')
assert_that(censor_slurs('... i hate Carp ...', None)).is_equal_to('... i love Carp ...')
assert_that(censor_slurs('... i hate carp ...', None)).is_equal_to('... i love Carp ...')
assert_that(censor_slurs('... I hate Carp ...', None)).is_equal_to('... I love Carp ...')
assert_that(censor_slurs('... I Hate Carp ...', None)).is_equal_to('... I Love Carp ...')
assert_that(censor_slurs('... I HATE CARP ...', None)).is_equal_to('... I LOVE CARP ...')
# Not covered:
assert_that(censor_slurs('... I Hate carp ...', None)).is_equal_to('... I Hate carp ...')
assert_that(censor_slurs('... i Hate Carp ...', None)).is_equal_to('... i Hate Carp ...')
assert_that(censor_slurs('... i Hate carp ...', None)).is_equal_to('... i Hate carp ...')
assert_that(censor_slurs('... i hate a carp ...', None)).is_equal_to('... i hate a carp ...')
assert_that(censor_slurs("<p>retarded SuperManlet NIG</p>", None)) \
.is_equal_to("<p>r-slured SuperLittle king πŸ€</p>")
assert_that(censor_slurs('... kike ...', None)) \
.is_equal_to('... https://sciencedirect.com/science/article/abs/pii/S016028960600033X ...')
assert_that(censor_slurs('... Kike ...', None)) \
.is_equal_to('... https://sciencedirect.com/science/article/abs/pii/S016028960600033X ...')
assert_that(censor_slurs('... KIKE ...', None)) \
.is_equal_to('... https://sciencedirect.com/science/article/abs/pii/S016028960600033X ...')
@patch("files.helpers.word_censor.SLURS", {'retard': 'r-slur', 'manlet': 'little king', ' nig ': 'πŸ€'})
def test_censor_slurs_does_not_error_out_on_exception():
word_censor.REPLACE_MAP = create_replace_map()
word_censor.REPLACE_MAP["Manlet"] = None
assert_that(censor_slurs(">retarded SuperManlet NIG<", None)).is_equal_to(">r-slured SuperManlet πŸ€<")
@patch("files.helpers.word_censor.SLURS", {'retard': 'r-slur', 'manlet': 'little king'})
def test_censor_slurs_does_not_censor_on_flag_disabled():
word_censor.REPLACE_MAP = create_replace_map()
class User:
def __init__(self, slurreplacer):
self.slurreplacer = slurreplacer
logger_user = User(slurreplacer=False)
assert_that(censor_slurs("<p>retard</p>", logger_user)).is_equal_to("<p>retard</p>")
logger_user = User(slurreplacer=True)
assert_that(censor_slurs("<p>retard</p>", logger_user)).is_equal_to("<p>r-slur</p>")