forked from MarseyWorld/MarseyWorld
refactor slurs and profanities and put them in their own file
parent
fa8b7eff02
commit
a9b078007e
|
@ -12,6 +12,7 @@ from sqlalchemy.sql.sqltypes import *
|
|||
|
||||
from files.classes import Base
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.slurs_and_profanities import *
|
||||
from files.helpers.lazy import lazy
|
||||
from files.helpers.regex import *
|
||||
from files.helpers.sorting_and_time import *
|
||||
|
@ -368,7 +369,7 @@ class Comment(Base):
|
|||
|
||||
if body:
|
||||
if not (self.parent_post and self.post.sub == 'chudrama'):
|
||||
body = censor_slurs(body, v)
|
||||
body = censor_slurs_profanities(body, v)
|
||||
|
||||
body = normalize_urls_runtime(body, v)
|
||||
|
||||
|
@ -384,7 +385,7 @@ class Comment(Base):
|
|||
if not body: return ""
|
||||
|
||||
if not (self.parent_post and self.post.sub == 'chudrama'):
|
||||
body = censor_slurs(body, v)
|
||||
body = censor_slurs_profanities(body, v)
|
||||
body = replace_train_html(body)
|
||||
|
||||
return body
|
||||
|
|
|
@ -7,7 +7,7 @@ from flask import g
|
|||
|
||||
from files.classes import Base
|
||||
from files.helpers.lazy import lazy
|
||||
from files.helpers.regex import censor_slurs
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
|
||||
class HatDef(Base):
|
||||
__tablename__ = "hat_defs"
|
||||
|
@ -37,7 +37,7 @@ class HatDef(Base):
|
|||
|
||||
@lazy
|
||||
def censored_description(self, v):
|
||||
return censor_slurs(self.description, v)
|
||||
return censor_slurs_profanities(self.description, v)
|
||||
|
||||
@property
|
||||
@lazy
|
||||
|
|
|
@ -7,7 +7,7 @@ from sqlalchemy.sql.sqltypes import *
|
|||
from files.classes import Base
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.lazy import lazy
|
||||
from files.helpers.regex import censor_slurs
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
from files.helpers.sorting_and_time import make_age_string
|
||||
|
||||
class ModAction(Base):
|
||||
|
@ -63,7 +63,7 @@ class ModAction(Base):
|
|||
if self.target_user_id:
|
||||
return f'<a href="{self.target_user.url}">@{self.target_user.username}</a>'
|
||||
elif self.target_post_id:
|
||||
return censor_slurs(f'<a href="{self.target_post.permalink}">{self.target_post.title_html}</a>', None)
|
||||
return censor_slurs_profanities(f'<a href="{self.target_post.permalink}">{self.target_post.title_html}</a>', None)
|
||||
elif self.target_comment_id:
|
||||
return f'<a href="{self.target_comment.permalink}">comment</a>'
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ from sqlalchemy.sql.sqltypes import *
|
|||
|
||||
from files.classes import Base
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.slurs_and_profanities import *
|
||||
from files.helpers.lazy import lazy
|
||||
from files.helpers.regex import *
|
||||
from files.helpers.sorting_and_time import make_age_string
|
||||
|
@ -300,7 +301,7 @@ class Post(Base):
|
|||
body = add_options(self, body, v)
|
||||
|
||||
if self.sub != 'chudrama':
|
||||
body = censor_slurs(body, v)
|
||||
body = censor_slurs_profanities(body, v)
|
||||
|
||||
body = normalize_urls_runtime(body, v)
|
||||
|
||||
|
@ -315,7 +316,7 @@ class Post(Base):
|
|||
if not body: return ""
|
||||
|
||||
if self.sub != 'chudrama':
|
||||
body = censor_slurs(body, v)
|
||||
body = censor_slurs_profanities(body, v)
|
||||
body = replace_train_html(body)
|
||||
|
||||
body = normalize_urls_runtime(body, v)
|
||||
|
@ -327,7 +328,7 @@ class Post(Base):
|
|||
title = self.title_html
|
||||
|
||||
if self.sub != 'chudrama':
|
||||
title = censor_slurs(title, v)
|
||||
title = censor_slurs_profanities(title, v)
|
||||
|
||||
return title
|
||||
|
||||
|
@ -336,7 +337,7 @@ class Post(Base):
|
|||
title = self.title
|
||||
|
||||
if self.sub != 'chudrama':
|
||||
title = censor_slurs(title, v)
|
||||
title = censor_slurs_profanities(title, v)
|
||||
title = replace_train_html(title)
|
||||
|
||||
return title
|
||||
|
|
|
@ -6,7 +6,7 @@ from sqlalchemy.sql.sqltypes import *
|
|||
|
||||
from files.classes import Base
|
||||
from files.helpers.lazy import lazy
|
||||
from files.helpers.regex import censor_slurs
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
|
||||
class Report(Base):
|
||||
__tablename__ = "reports"
|
||||
|
@ -27,7 +27,7 @@ class Report(Base):
|
|||
|
||||
@lazy
|
||||
def realreason(self, v):
|
||||
return censor_slurs(self.reason, v)
|
||||
return censor_slurs_profanities(self.reason, v)
|
||||
|
||||
#lazy hack to avoid having to rename the comment_id column and causing potential new bugs
|
||||
@property
|
||||
|
@ -55,7 +55,7 @@ class CommentReport(Base):
|
|||
|
||||
@lazy
|
||||
def realreason(self, v):
|
||||
return censor_slurs(self.reason, v)
|
||||
return censor_slurs_profanities(self.reason, v)
|
||||
|
||||
#lazy hack to avoid having to rename the comment_id column and causing potential new bugs
|
||||
@property
|
||||
|
|
|
@ -7,7 +7,7 @@ from sqlalchemy.sql.sqltypes import *
|
|||
from files.classes import Base
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.lazy import lazy
|
||||
from files.helpers.regex import censor_slurs
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
from files.helpers.sorting_and_time import make_age_string
|
||||
|
||||
class SubAction(Base):
|
||||
|
@ -52,7 +52,7 @@ class SubAction(Base):
|
|||
if self.target_user_id:
|
||||
return f'<a href="{self.target_user.url}">@{self.target_user.username}</a>'
|
||||
elif self.target_post_id:
|
||||
return censor_slurs(f'<a href="{self.target_post.permalink}">{self.target_post.title_html}</a>', None)
|
||||
return censor_slurs_profanities(f'<a href="{self.target_post.permalink}">{self.target_post.title_html}</a>', None)
|
||||
elif self.target_comment_id:
|
||||
return f'<a href="{self.target_comment.permalink}">comment</a>'
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import random
|
||||
from operator import *
|
||||
import re
|
||||
|
||||
import pyotp
|
||||
from sqlalchemy import Column, ForeignKey, FetchedValue
|
||||
|
|
|
@ -13,6 +13,7 @@ from files.classes import Comment, Notification, PushSubscription, Group
|
|||
from .config.const import *
|
||||
from .regex import *
|
||||
from .sanitize import *
|
||||
from .slurs_and_profanities import censor_slurs_profanities
|
||||
|
||||
def create_comment(text_html):
|
||||
new_comment = Comment(author_id=AUTOJANNY_ID,
|
||||
|
@ -237,7 +238,7 @@ def push_notif(uids, title, body, url_or_comment):
|
|||
if len(body) > PUSH_NOTIF_LIMIT:
|
||||
body = body[:PUSH_NOTIF_LIMIT] + "..."
|
||||
|
||||
body = censor_slurs(body, None)
|
||||
body = censor_slurs_profanities(body, None)
|
||||
|
||||
subscriptions = g.db.query(PushSubscription.subscription_json).filter(PushSubscription.user_id.in_(uids)).all()
|
||||
subscriptions = [x[0] for x in subscriptions]
|
||||
|
|
|
@ -117,40 +117,6 @@ GIRL_PHRASES = [
|
|||
"$ PERIODT"
|
||||
]
|
||||
|
||||
tranny = f'<img loading="lazy" data-bs-toggle="tooltip" alt=":marseytrain:" title=":marseytrain:" src="{SITE_FULL_IMAGES}/e/marseytrain.webp">'
|
||||
trannie = f'<img loading="lazy" data-bs-toggle="tooltip" alt=":!marseytrain:" title=":!marseytrain:" src="{SITE_FULL_IMAGES}/e/marseytrain.webp">'
|
||||
troon = f'<img loading="lazy" data-bs-toggle="tooltip" alt=":marseytrain2:" title=":marseytrain2:" src="{SITE_FULL_IMAGES}/e/marseytrain2.webp">'
|
||||
|
||||
def replace_train_html(body):
|
||||
body = body.replace(tranny, ':marseytrain:')
|
||||
body = body.replace(trannie, ':!marseytrain:')
|
||||
body = body.replace(troon, ':marseytrain2:')
|
||||
return body
|
||||
|
||||
SLURS = {
|
||||
"tranny": tranny,
|
||||
"trannie": trannie,
|
||||
"troon": troon,
|
||||
"(?<!\\bs)nigger": "BIPOC",
|
||||
"negroid": "BIPOC",
|
||||
"nignog": "BIPOC",
|
||||
"nig nog": "BIPOC",
|
||||
"niglet": 'BIPOClet',
|
||||
"negress": "BIPOCette",
|
||||
"faggot": "cute twink",
|
||||
"fag": "strag",
|
||||
"(?<!\w)spic(?!\w)": "hard-working American",
|
||||
"(?<!\w)spics(?!\w)": "hard-working Americans",
|
||||
"kike": "jewish chad",
|
||||
"(?<!\w)heeb": "jewish chad",
|
||||
"daisy's destruction": "Cars 2",
|
||||
"daisys destruction": "Cars 2",
|
||||
"daisy destruction": "Cars 2",
|
||||
"pajeet": "sexy Indian dude",
|
||||
"hunter2": "*******",
|
||||
"dyke": "cute butch",
|
||||
}
|
||||
|
||||
patron = "Patron"
|
||||
|
||||
REDDIT_NOTIFS_SITE = set()
|
||||
|
@ -199,56 +165,6 @@ if SITE_NAME == 'rDrama':
|
|||
"watch Family Guy online",
|
||||
)
|
||||
|
||||
RDRAMA_SLURS = {
|
||||
"retarded": "r-slurred",
|
||||
"retard": "r-slur",
|
||||
"pedophile": "p-dophile",
|
||||
"kill youself": "keep yourself safe",
|
||||
"kill yourself": "keep yourself safe",
|
||||
"kill yourselves": "keep yourselves safe",
|
||||
"latinos": "latinx",
|
||||
"latino": "latinx",
|
||||
"latinas": "latinx",
|
||||
"latina": "latinx",
|
||||
"hispanics": "latinx",
|
||||
"hispanic": "latinx",
|
||||
"autistic": "neurodivergent",
|
||||
"gamer": "g*mer",
|
||||
"journalist": "journ*list",
|
||||
"journalism": "journ*lism",
|
||||
"fake and gay": "fake and straight",
|
||||
"(?<!\w)rapist": "male feminist",
|
||||
"(?<!\w)pedo(?!\w)": "p-do",
|
||||
"(?<!\w)kys": "keep yourself safe",
|
||||
"kys(?!\w)": "keep yourself safe",
|
||||
"republican": 'rethuglican',
|
||||
"america": 'ameriKKKa',
|
||||
"it's almost as if": "I'm an r-slur but",
|
||||
"it's almost like": "I'm an r-slur but",
|
||||
"its almost as if": "I'm an r-slur but",
|
||||
"its almost like": "I'm an r-slur but",
|
||||
"my brother in christ": "my brother in Allah (ﷻ)",
|
||||
"(?<!\w)cool(?! (it|down|off))": "fetch",
|
||||
"krayon(?! \()": "krayon (sister toucher)",
|
||||
"discord": "groomercord",
|
||||
"allah(?! \()": "Allah (ﷻ)",
|
||||
"my wife(?! \()": "my wife (male)",
|
||||
"(?<!cow )tools(?!\w)": "cow tools",
|
||||
"explain": "mansplain",
|
||||
'nigga': 'neighbor',
|
||||
'(?<![\w.])cat(?!\w)': 'marsey',
|
||||
'(?<!\w)cats(?!\w)': 'marseys',
|
||||
'hello': 'hecko',
|
||||
'ryan gosling': 'literally me',
|
||||
'howdy': 'meowdy',
|
||||
'corgi': 'klenny',
|
||||
"right now": "right meow",
|
||||
"(?<!\/)linux": "GNU/Linux",
|
||||
'(?<!-)based': 'keyed',
|
||||
'needful': 'sneedful',
|
||||
}
|
||||
SLURS.update(RDRAMA_SLURS)
|
||||
|
||||
BOOSTED_HOLES = {
|
||||
'furry',
|
||||
'femboy',
|
||||
|
@ -276,28 +192,6 @@ elif SITE_NAME == 'WPD':
|
|||
REDDIT_NOTIFS_SITE.update({'watchpeopledie', 'makemycoffin'})
|
||||
|
||||
|
||||
PROFANITIES = {
|
||||
'motherfucker': 'motherlover',
|
||||
'fuck': 'frick',
|
||||
'(?<!\w)ass(?!\w)': 'butt',
|
||||
'shitting': 'pooping',
|
||||
'damn': 'darn',
|
||||
'bitch(?!\w)': 'b-word',
|
||||
'toilet': 'potty',
|
||||
'(?<!\w)asshole': 'butthole',
|
||||
'(?<!\w)rape': 'r*pe',
|
||||
'(?<!\w)hell(?!\w)': 'heck',
|
||||
'(?<!\w)sex(?!\w)': 's*x',
|
||||
'(?<!\w)cum(?!\w)': 'c*m',
|
||||
'(?<!\w)dick': 'peepee',
|
||||
'cock(?!\w)': 'peepee',
|
||||
'cocks': 'peepees',
|
||||
'penis': 'peepee',
|
||||
'pussy': 'kitty',
|
||||
'pussies': 'kitties',
|
||||
'cunt': 'c*nt',
|
||||
}
|
||||
|
||||
LONGPOSTBOT_REPLIES = ('Wow, you must be a JP fan.', 'This is one of the worst posts I have EVER seen. Delete it.', "No, don't reply like this, please do another wall of unhinged rant please.", '# 😴😴😴', "Ma'am we've been over this before. You need to stop.", "I've known more coherent downies.", "Your pulitzer's in the mail", "That's great and all, but I asked for my burger without cheese.", 'That degree finally paying off', "That's nice sweaty. Why don't you have a seat in the time out corner with Pizzashill until you calm down, then you can have your Capri Sun.", "All them words won't bring your pa back.", "You had a chance to not be completely worthless, but it looks like you threw it away. At least you're consistent.", 'Some people are able to display their intelligence by going on at length on a subject and never actually saying anything. This ability is most common in trades such as politics, public relations, and law. You have impressed me by being able to best them all, while still coming off as an absolute idiot.', "You can type 10,000 characters and you decided that these were the one's that you wanted.", 'Have you owned the libs yet?', "I don't know what you said, because I've seen another human naked.", 'Impressive. Normally people with such severe developmental disabilities struggle to write much more than a sentence or two. He really has exceded our expectations for the writing portion. Sadly the coherency of his writing, along with his abilities in the social skills and reading portions, are far behind his peers with similar disabilities.', "This is a really long way of saying you don't fuck.", "Sorry ma'am, looks like his delusions have gotten worse. We'll have to admit him.", 'If only you could put that energy into your relationships', 'Posts like this is why I do Heroine.', 'still unemployed then?', 'K', 'look im gunna have 2 ask u 2 keep ur giant dumps in the toilet not in my replys 😷😷😷', "Mommy is soooo proud of you, sweaty. Let's put this sperg out up on the fridge with all your other failures.", "Good job bobby, here's a star", "That was a mistake. You're about to find out the hard way why.", 'You sat down and wrote all this shit. You could have done so many other things with your life. What happened to your life that made you decide writing novels of bullshit here was the best option?', "I don't have enough spoons to read this shit", "All those words won't bring daddy back.", 'OUT!', "Damn, you're really mad over this, but thanks for the effort you put into typing that all out! Sadly I won't read it all.", "Jesse what the fuck are you talking about??", "▼you're fucking bananas if you think I'm reading all that, take my downvote and shut up idiot", "Are you feeling okay bud?", ':#marseywoah:', 'At no point in your rambling, incoherent post were you even close to anything that could be considered a rational thought. Everyone on this site is now dumber for having read it. May God have mercy on your soul.', 'https://rdrama.net/videos/1671169024815045.mp4', 'https://i.rdrama.net/images/16766675896248007.webp', 'https://i.rdrama.net/images/1683531328305875.webp', 'https://i.rdrama.net/images/1691152552869678.webp', 'You could have done crack instead of this shit', 'Not one single person is gonna read all that', 'PlsRope', 'I hope you had chatgpt pen that one fam')
|
||||
|
||||
CHUD_MSGS = (
|
||||
|
|
|
@ -56,22 +56,6 @@ snappy_url_regex = re.compile('<a href="(https?:\/\/.+?)".*?>(.+?)<\/a>', flags=
|
|||
|
||||
email_regex = re.compile('[A-Za-z0-9._%+-]{1,64}@[A-Za-z0-9.-]{2,63}\.[A-Za-z]{2,63}', flags=re.A)
|
||||
|
||||
slur_single_words = "|".join([slur.lower() for slur in SLURS.keys()])
|
||||
slur_single_words_title = slur_single_words.title().replace('\W','\w')
|
||||
slur_single_words_upper = slur_single_words.upper().replace('\W','\w')
|
||||
|
||||
profanity_single_words = "|".join([profanity.lower() for profanity in PROFANITIES.keys()])
|
||||
profanity_single_words_title = profanity_single_words.title().replace('\W','\w')
|
||||
profanity_single_words_upper = profanity_single_words.upper().replace('\W','\w')
|
||||
|
||||
slur_regex = re.compile(f"<[^>]*>|{slur_single_words}", flags=re.I|re.A)
|
||||
slur_regex_title = re.compile(f"<[^>]*>|{slur_single_words_title}", flags=re.A)
|
||||
slur_regex_upper = re.compile(f"<[^>]*>|{slur_single_words_upper}", flags=re.A)
|
||||
|
||||
profanity_regex = re.compile(f"<[^>]*>|{profanity_single_words}", flags=re.I|re.A)
|
||||
profanity_regex_title = re.compile(f"<[^>]*>|{profanity_single_words_title}", flags=re.A)
|
||||
profanity_regex_upper = re.compile(f"<[^>]*>|{profanity_single_words_upper}", flags=re.A)
|
||||
|
||||
torture_regex = re.compile('(^|\s)(i|me)($|\s)', flags=re.I|re.A)
|
||||
torture_regex2 = re.compile("(^|\s)(i'm)($|\s)", flags=re.I|re.A)
|
||||
torture_regex3 = re.compile("(^|\s)(my|mine)($|\s)", flags=re.I|re.A)
|
||||
|
@ -158,81 +142,6 @@ pronouns_regex = re.compile("([a-z]{1,7})\/[a-z]{1,7}(\/[a-z]{1,7})?", flags=re.
|
|||
|
||||
html_title_regex = re.compile("<title>(.{1,200})</title>", flags=re.I)
|
||||
|
||||
|
||||
|
||||
SLURS_FOR_REPLACING = {}
|
||||
for k, val in SLURS.items():
|
||||
newkey = k.split('(?!')[0]
|
||||
if ')' in newkey:
|
||||
newkey = newkey.split(')')[1]
|
||||
SLURS_FOR_REPLACING[newkey] = val
|
||||
|
||||
PROFANITIES_FOR_REPLACING = {}
|
||||
for k, val in PROFANITIES.items():
|
||||
newkey = k.split('(?!')[0]
|
||||
if ')' in newkey:
|
||||
newkey = newkey.split(')')[1]
|
||||
PROFANITIES_FOR_REPLACING[newkey] = val
|
||||
|
||||
def sub_matcher(match, upper=False, title=False, replace_with=SLURS_FOR_REPLACING):
|
||||
group_num = 0
|
||||
match_str = match.group(group_num)
|
||||
if match_str.startswith('<'):
|
||||
return match_str
|
||||
else:
|
||||
repl = replace_with[match_str.lower()]
|
||||
if (not upper and not title) or "<img" in repl:
|
||||
return repl
|
||||
elif title:
|
||||
return repl.title()
|
||||
else:
|
||||
return repl.upper()
|
||||
|
||||
def sub_matcher_upper(match, replace_with=SLURS_FOR_REPLACING):
|
||||
return sub_matcher(match, upper=True, replace_with=replace_with)
|
||||
|
||||
|
||||
# TODO: make censoring a bit better
|
||||
def sub_matcher_slurs(match, upper=False, title=False):
|
||||
return sub_matcher(match, upper, title, replace_with=SLURS_FOR_REPLACING)
|
||||
|
||||
def sub_matcher_slurs_title(match):
|
||||
return sub_matcher_slurs(match, title=True)
|
||||
|
||||
def sub_matcher_slurs_upper(match):
|
||||
return sub_matcher_slurs(match, upper=True)
|
||||
|
||||
|
||||
def sub_matcher_profanities(match, upper=False, title=False):
|
||||
return sub_matcher(match, upper, title, replace_with=PROFANITIES_FOR_REPLACING)
|
||||
|
||||
def sub_matcher_profanities_title(match):
|
||||
return sub_matcher_profanities(match, title=True)
|
||||
|
||||
def sub_matcher_profanities_upper(match):
|
||||
return sub_matcher_profanities(match, upper=True)
|
||||
|
||||
|
||||
def replace_re(body, regex, regex_title, regex_upper, sub_func, sub_func_title, sub_func_upper):
|
||||
body = regex_upper.sub(sub_func_upper, body)
|
||||
body = regex_title.sub(sub_func_title, body)
|
||||
return regex.sub(sub_func, body)
|
||||
|
||||
def censor_slurs(body, logged_user):
|
||||
if not body: return ""
|
||||
|
||||
if '<pre>' in body or '<code>' in body:
|
||||
return body
|
||||
|
||||
if not logged_user or logged_user == 'chat' or logged_user.slurreplacer:
|
||||
body = replace_re(body, slur_regex, slur_regex_title, slur_regex_upper, sub_matcher_slurs, sub_matcher_slurs_title, sub_matcher_slurs_upper)
|
||||
|
||||
if SITE_NAME == 'rDrama':
|
||||
if not logged_user or logged_user == 'chat' or logged_user.profanityreplacer:
|
||||
body = replace_re(body, profanity_regex, profanity_regex_title, profanity_regex_upper, sub_matcher_profanities, sub_matcher_profanities_title, sub_matcher_profanities_upper)
|
||||
|
||||
return body
|
||||
|
||||
commands = {
|
||||
"fortune": FORTUNE_REPLIES,
|
||||
"factcheck": FACTCHECK_REPLIES,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import re
|
||||
from files.helpers.regex import *
|
||||
|
||||
def sharpen(string):
|
||||
|
|
|
@ -0,0 +1,167 @@
|
|||
import re
|
||||
from .config.const import *
|
||||
|
||||
tranny = f'<img loading="lazy" data-bs-toggle="tooltip" alt=":marseytrain:" title=":marseytrain:" src="{SITE_FULL_IMAGES}/e/marseytrain.webp">'
|
||||
trannie = f'<img loading="lazy" data-bs-toggle="tooltip" alt=":!marseytrain:" title=":!marseytrain:" src="{SITE_FULL_IMAGES}/e/marseytrain.webp">'
|
||||
troon = f'<img loading="lazy" data-bs-toggle="tooltip" alt=":marseytrain2:" title=":marseytrain2:" src="{SITE_FULL_IMAGES}/e/marseytrain2.webp">'
|
||||
|
||||
def replace_train_html(body):
|
||||
body = body.replace(tranny, ':marseytrain:')
|
||||
body = body.replace(trannie, ':!marseytrain:')
|
||||
body = body.replace(troon, ':marseytrain2:')
|
||||
return body
|
||||
|
||||
SLURS = {
|
||||
"tranny": tranny,
|
||||
"trannie": trannie,
|
||||
"troon": troon,
|
||||
"(?<!\\bs)nigger": "BIPOC",
|
||||
"negroid": "BIPOC",
|
||||
"nignog": "BIPOC",
|
||||
"nig nog": "BIPOC",
|
||||
"niglet": 'BIPOClet',
|
||||
"negress": "BIPOCette",
|
||||
"faggot": "cute twink",
|
||||
"fag": "strag",
|
||||
"(?<!\w)spic(?!\w)": "hard-working American",
|
||||
"(?<!\w)spics(?!\w)": "hard-working Americans",
|
||||
"kike": "jewish chad",
|
||||
"(?<!\w)heeb": "jewish chad",
|
||||
"daisy's destruction": "Cars 2",
|
||||
"daisys destruction": "Cars 2",
|
||||
"daisy destruction": "Cars 2",
|
||||
"pajeet": "sexy Indian dude",
|
||||
"hunter2": "*******",
|
||||
"dyke": "cute butch",
|
||||
}
|
||||
|
||||
if SITE_NAME == 'rDrama':
|
||||
SLURS |= {
|
||||
"retarded": "r-slurred",
|
||||
"retard": "r-slur",
|
||||
"pedophile": "p-dophile",
|
||||
"kill youself": "keep yourself safe",
|
||||
"kill yourself": "keep yourself safe",
|
||||
"kill yourselves": "keep yourselves safe",
|
||||
"latinos": "latinx",
|
||||
"latino": "latinx",
|
||||
"latinas": "latinx",
|
||||
"latina": "latinx",
|
||||
"hispanics": "latinx",
|
||||
"hispanic": "latinx",
|
||||
"autistic": "neurodivergent",
|
||||
"gamer": "g*mer",
|
||||
"journalist": "journ*list",
|
||||
"journalism": "journ*lism",
|
||||
"fake and gay": "fake and straight",
|
||||
"(?<!\w)rapist": "male feminist",
|
||||
"(?<!\w)pedo(?!\w)": "p-do",
|
||||
"(?<!\w)kys": "keep yourself safe",
|
||||
"kys(?!\w)": "keep yourself safe",
|
||||
"republican": 'rethuglican',
|
||||
"america": 'ameriKKKa',
|
||||
"it's almost as if": "I'm an r-slur but",
|
||||
"it's almost like": "I'm an r-slur but",
|
||||
"its almost as if": "I'm an r-slur but",
|
||||
"its almost like": "I'm an r-slur but",
|
||||
"my brother in christ": "my brother in Allah (ﷻ)",
|
||||
"(?<!\w)cool(?! (it|down|off))": "fetch",
|
||||
"krayon(?! \()": "krayon (sister toucher)",
|
||||
"discord": "groomercord",
|
||||
"allah(?! \()": "Allah (ﷻ)",
|
||||
"my wife(?! \()": "my wife (male)",
|
||||
"(?<!cow )tools(?!\w)": "cow tools",
|
||||
"explain": "mansplain",
|
||||
'nigga': 'neighbor',
|
||||
'(?<![\w.])cat(?!\w)': 'marsey',
|
||||
'(?<!\w)cats(?!\w)': 'marseys',
|
||||
'hello': 'hecko',
|
||||
'ryan gosling': 'literally me',
|
||||
'howdy': 'meowdy',
|
||||
'corgi': 'klenny',
|
||||
"right now": "right meow",
|
||||
"(?<!\/)linux": "GNU/Linux",
|
||||
'(?<!-)based': 'keyed',
|
||||
'needful': 'sneedful',
|
||||
}
|
||||
|
||||
PROFANITIES = {
|
||||
'motherfucker': 'motherlover',
|
||||
'fuck': 'frick',
|
||||
'(?<!\w)ass(?!\w)': 'butt',
|
||||
'shitting': 'pooping',
|
||||
'damn': 'darn',
|
||||
'bitch(?!\w)': 'b-word',
|
||||
'toilet': 'potty',
|
||||
'(?<!\w)asshole': 'butthole',
|
||||
'(?<!\w)rape': 'r*pe',
|
||||
'(?<!\w)hell(?!\w)': 'heck',
|
||||
'(?<!\w)sex(?!\w)': 's*x',
|
||||
'(?<!\w)cum(?!\w)': 'c*m',
|
||||
'(?<!\w)dick': 'peepee',
|
||||
'cock(?!\w)': 'peepee',
|
||||
'cocks': 'peepees',
|
||||
'penis': 'peepee',
|
||||
'pussy': 'kitty',
|
||||
'pussies': 'kitties',
|
||||
'cunt': 'c*nt',
|
||||
}
|
||||
|
||||
|
||||
|
||||
slur_single_words = "|".join([slur.lower() for slur in SLURS.keys()])
|
||||
profanity_single_words = "|".join([profanity.lower() for profanity in PROFANITIES.keys()])
|
||||
slur_regex = re.compile(f"<[^>]*>|{slur_single_words}", flags=re.I|re.A)
|
||||
profanity_regex = re.compile(f"<[^>]*>|{profanity_single_words}", flags=re.I|re.A)
|
||||
|
||||
SLURS_FOR_REPLACING = {}
|
||||
for k, val in SLURS.items():
|
||||
newkey = k.split('(?!')[0]
|
||||
if ')' in newkey:
|
||||
newkey = newkey.split(')')[1]
|
||||
SLURS_FOR_REPLACING[newkey] = val
|
||||
SLURS_FOR_REPLACING[newkey.title()] = val.title()
|
||||
SLURS_FOR_REPLACING[newkey.upper()] = val.upper()
|
||||
|
||||
PROFANITIES_FOR_REPLACING = {}
|
||||
for k, val in PROFANITIES.items():
|
||||
newkey = k.split('(?!')[0]
|
||||
if ')' in newkey:
|
||||
newkey = newkey.split(')')[1]
|
||||
PROFANITIES_FOR_REPLACING[newkey] = val
|
||||
PROFANITIES_FOR_REPLACING[newkey.title()] = val.title()
|
||||
PROFANITIES_FOR_REPLACING[newkey.upper()] = val.upper()
|
||||
|
||||
|
||||
|
||||
def sub_matcher(match, X_FOR_REPLACING):
|
||||
group_num = 0
|
||||
match_str = match.group(group_num)
|
||||
if match_str.startswith('<'):
|
||||
return match_str
|
||||
else:
|
||||
repl = X_FOR_REPLACING[match_str]
|
||||
return repl
|
||||
|
||||
def sub_matcher_slurs(match):
|
||||
return sub_matcher(match, SLURS_FOR_REPLACING)
|
||||
|
||||
def sub_matcher_profanities(match):
|
||||
return sub_matcher(match, PROFANITIES_FOR_REPLACING)
|
||||
|
||||
|
||||
|
||||
def censor_slurs_profanities(body, logged_user):
|
||||
if not body: return ""
|
||||
|
||||
if '<pre>' in body or '<code>' in body:
|
||||
return body
|
||||
|
||||
if not logged_user or logged_user == 'chat' or logged_user.slurreplacer:
|
||||
body = slur_regex.sub(sub_matcher_slurs, body)
|
||||
|
||||
if SITE_NAME == 'rDrama':
|
||||
if not logged_user or logged_user == 'chat' or logged_user.profanityreplacer:
|
||||
body = profanity_regex.sub(sub_matcher_profanities, body)
|
||||
|
||||
return body
|
|
@ -11,6 +11,7 @@ from files.helpers.actions import *
|
|||
from files.helpers.alerts import *
|
||||
from files.helpers.cloudflare import *
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
from files.helpers.get import *
|
||||
from files.helpers.media import *
|
||||
from files.helpers.sanitize import *
|
||||
|
@ -909,7 +910,7 @@ def admin_title_change(user_id, v):
|
|||
|
||||
user.customtitleplain = new_name
|
||||
new_name = filter_emojis_only(new_name)
|
||||
new_name = censor_slurs(new_name, None)
|
||||
new_name = censor_slurs_profanities(new_name, None)
|
||||
|
||||
user = get_account(user.id)
|
||||
user.customtitle=new_name
|
||||
|
|
|
@ -9,6 +9,7 @@ from files.classes.userblock import UserBlock
|
|||
from files.helpers.actions import *
|
||||
from files.helpers.alerts import *
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
from files.helpers.config.awards import AWARDS_ENABLED, HOUSE_AWARDS, LOOTBOX_ITEM_COUNT, LOOTBOX_CONTENTS
|
||||
from files.helpers.get import *
|
||||
from files.helpers.marsify import marsify
|
||||
|
@ -394,7 +395,7 @@ def award_thing(v, thing_type, id):
|
|||
else:
|
||||
author.customtitleplain = new_name
|
||||
new_name = filter_emojis_only(new_name)
|
||||
new_name = censor_slurs(new_name, None)
|
||||
new_name = censor_slurs_profanities(new_name, None)
|
||||
if len(new_name) > 1000: abort(403)
|
||||
author.customtitle = new_name
|
||||
author.flairchanged = int(time.time()) + 86400
|
||||
|
|
|
@ -9,6 +9,7 @@ from flask import request
|
|||
from files.helpers.actions import *
|
||||
from files.helpers.alerts import *
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
from files.helpers.regex import *
|
||||
from files.helpers.media import *
|
||||
from files.helpers.sanitize import *
|
||||
|
@ -167,9 +168,9 @@ def speak(data, v):
|
|||
"namecolor": v.name_color,
|
||||
"patron": v.patron,
|
||||
"text": text,
|
||||
"text_censored": censor_slurs(text, 'chat'),
|
||||
"text_censored": censor_slurs_profanities(text, 'chat'),
|
||||
"text_html": text_html,
|
||||
"text_html_censored": censor_slurs(text_html, 'chat'),
|
||||
"text_html_censored": censor_slurs_profanities(text_html, 'chat'),
|
||||
"time": int(time.time()),
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ from sqlalchemy.orm import load_only
|
|||
from files.helpers.actions import *
|
||||
from files.helpers.alerts import *
|
||||
from files.helpers.config.const import *
|
||||
from files.helpers.slurs_and_profanities import censor_slurs_profanities
|
||||
from files.helpers.get import *
|
||||
from files.helpers.mail import *
|
||||
from files.helpers.media import *
|
||||
|
@ -908,7 +909,7 @@ def settings_title_change(v):
|
|||
|
||||
if customtitleplain:
|
||||
customtitle = filter_emojis_only(customtitleplain)
|
||||
customtitle = censor_slurs(customtitle, None)
|
||||
customtitle = censor_slurs_profanities(customtitle, None)
|
||||
|
||||
if len(customtitle) > 1000:
|
||||
abort(400, "Flair too long!")
|
||||
|
|
Loading…
Reference in New Issue