Update MarseyFX

pull/207/head
transbitch 2023-09-24 02:02:53 -04:00
parent 897ee3e1cd
commit 22c9dd1908
8 changed files with 687 additions and 78 deletions

4
.gitignore vendored
View File

@ -7,3 +7,7 @@ __pycache__/
emojis.zip emojis.zip
emojis_original.zip emojis_original.zip
includes/content-security-policy includes/content-security-policy
includes/headers
nginx.conf
.gitignore
docker-compose.yml

View File

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="265"
height="265"
fill-rule="evenodd"
version="1.1"
id="svg2"
sodipodi:docname="prohibition.svg"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs2" />
<sodipodi:namedview
id="namedview2"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:zoom="2.1906969"
inkscape:cx="113.66246"
inkscape:cy="153.14762"
inkscape:window-width="1920"
inkscape:window-height="1017"
inkscape:window-x="-8"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="svg2"
showgrid="true">
<inkscape:grid
id="grid2"
units="px"
originx="0"
originy="0"
spacingx="1"
spacingy="1"
empcolor="#0099e5"
empopacity="0.30196078"
color="#0099e5"
opacity="0.14901961"
empspacing="5"
dotted="false"
gridanglex="30"
gridanglez="30"
visible="true" />
</sodipodi:namedview>
<path
d="m 230.37889,132.5 c 0,-54.05717 -43.82172,-97.878887 -97.87889,-97.878887 A 97.463774,97.463774 0 0 0 70.394811,56.846774 L 208.48791,194.18915 C 222.17277,177.35442 230.37889,155.88687 230.37889,132.5 Z M 56.512094,70.810849 C 42.828151,87.646509 34.621113,109.11313 34.621113,132.5 c 0,54.05717 43.821717,97.87889 97.878887,97.87889 23.57825,0 45.20851,-8.33832 62.10519,-22.22566 z M 255,132.5 C 255,64.844868 200.15513,10 132.5,10 64.844868,10 10,64.844868 10,132.5 10,200.15513 64.844868,255 132.5,255 200.15513,255 255,200.15513 255,132.5"
fill="#b71f2e"
id="path2"
style="stroke-width:8;stroke:#ffffff;stroke-opacity:1;stroke-dasharray:none;paint-order:stroke fill markers" />
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@ -0,0 +1,347 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="256"
height="256"
viewBox="0 0 256 256"
version="1.1"
id="svg5"
inkscape:version="1.3 (0e150ed6c4, 2023-07-21)"
sodipodi:docname="scope.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview7"
pagecolor="#d8d8d8"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="px"
showgrid="true"
inkscape:zoom="1.4912879"
inkscape:cx="164.95809"
inkscape:cy="127.07137"
inkscape:window-width="1920"
inkscape:window-height="1017"
inkscape:window-x="-8"
inkscape:window-y="-8"
inkscape:window-maximized="1"
inkscape:current-layer="layer1">
<inkscape:grid
type="xygrid"
id="grid132"
empspacing="8"
originx="0"
originy="0"
spacingy="4"
spacingx="4"
units="px"
visible="true" />
</sodipodi:namedview>
<defs
id="defs2">
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect3097"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0.52916668,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916668,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect3089"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0.52916668,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916668,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect3081"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0.52916668,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916668,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect3073"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0.52916668,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916668,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect1613"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0.52916668,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916663,0,1 @ F,0,0,1,0,0.52916668,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect1038"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,1,0,1 @ F,0,0,1,0,1,0,1 @ F,0,0,1,0,1,0,1 @ F,0,0,1,0,1,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<linearGradient
id="linearGradient916"
inkscape:swatch="solid">
<stop
style="stop-color:#ffffff;stop-opacity:1;"
offset="0"
id="stop914" />
</linearGradient>
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect3224"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,1.3229167,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
unit="px"
method="auto"
mode="F"
radius="0"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<linearGradient
id="linearGradient3219"
inkscape:swatch="solid">
<stop
style="stop-color:#ffffff;stop-opacity:1;"
offset="0"
id="stop3217" />
</linearGradient>
<linearGradient
id="Secondary"
inkscape:swatch="solid">
<stop
style="stop-color:#808080;stop-opacity:1;"
offset="0"
id="stop3104" />
</linearGradient>
<linearGradient
id="linearGradient3073"
inkscape:swatch="solid">
<stop
style="stop-color:#ffffff;stop-opacity:1;"
offset="0"
id="stop3071" />
</linearGradient>
<linearGradient
id="Primary"
inkscape:swatch="solid">
<stop
style="stop-color:#ffffff;stop-opacity:1;"
offset="0"
id="stop2068" />
</linearGradient>
</defs>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
<g
id="path1"
style="stroke:#ffffff;stroke-width:4;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
transform="matrix(1.2035109,0,0,1.2035109,-26.011018,-26.011018)">
<path
style="color:#000000;fill:#039600;fill-opacity:0.254202;stroke:#ffffff;stroke-width:4;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
d="m 224,128 a 96,96 0 0 1 -96,96 96,96 0 0 1 -96,-96 96,96 0 0 1 96,-96 96,96 0 0 1 96,96 z"
id="path43" />
<path
style="color:#000000;fill:#000000;stroke:#ffffff;stroke-width:4;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
d="M 128,27 C 72.278519,27 27,72.278519 27,128 27,183.72148 72.278519,229 128,229 183.72148,229 229,183.72148 229,128 229,72.278519 183.72148,27 128,27 Z m 0,10 c 50.31709,0 91,40.682915 91,91 0,50.31709 -40.68291,91 -91,91 -50.317085,0 -91,-40.68291 -91,-91 0,-50.317085 40.682915,-91 91,-91 z"
id="path44" />
</g>
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:3.8738;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="M 128.03838,243.57541 V 166.77639"
id="path11"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:3.8738;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="M 128.03838,12.50133 V 89.300371"
id="path12"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:3.8738;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 243.57541,128.03837 -76.79902,1e-5"
id="path13"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:3.8738;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="M 12.50133,128.03838 H 89.300371"
id="path14"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 128.03838,131.91218 v 34.86421"
id="path15"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="M 89.300371,128.03838 H 124.16458"
id="path16"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="M 128.03838,89.300371 V 124.16458"
id="path17"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="M 166.77639,128.03838 H 131.91218"
id="path18"
sodipodi:nodetypes="cc" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 120.29077,124.16458 v 7.7476"
id="path19" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 112.54317,124.16458 v 7.7476"
id="path20" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 104.79557,124.16458 v 7.7476"
id="path21" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 97.047972,124.16458 v 7.7476"
id="path22" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 89.300371,124.16458 v 7.7476"
id="path23" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,120.29077 h -7.7476"
id="path24" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,112.54317 h -7.7476"
id="path25" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,104.79557 h -7.7476"
id="path26" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,97.047972 h -7.7476"
id="path27" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,89.300371 h -7.7476"
id="path28" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 166.77639,124.16458 v 7.7476"
id="path33" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 159.02878,124.16458 v 7.7476"
id="path34" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 151.28118,124.16458 v 7.7476"
id="path35" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 143.53358,124.16458 v 7.7476"
id="path36" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 135.78597,124.16458 v 7.7476"
id="path37" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,166.77639 h -7.7476"
id="path38" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,159.02878 h -7.7476"
id="path39" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,151.28118 h -7.7476"
id="path40" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,143.53358 h -7.7476"
id="path41" />
<path
style="fill:#039600;fill-opacity:0.254202;stroke:#000000;stroke-width:1.9369;stroke-miterlimit:3;stroke-dasharray:none;stroke-opacity:1"
d="m 131.91218,135.78597 h -7.7476"
id="path42" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -1,4 +1,6 @@
import copy
import re import re
from typing import Optional
from bs4 import BeautifulSoup, Tag from bs4 import BeautifulSoup, Tag
from files.helpers.config.const import SITE_FULL_IMAGES from files.helpers.config.const import SITE_FULL_IMAGES
from files.helpers.marseyfx.tokenizer import GroupToken, NumberLiteralToken, StringLiteralToken, Token, Tokenizer from files.helpers.marseyfx.tokenizer import GroupToken, NumberLiteralToken, StringLiteralToken, Token, Tokenizer
@ -19,9 +21,14 @@ def modifier(fn):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
slf = args[0] slf = args[0]
ctx = ModifierContextFrame(fn.__name__)
slf.context_frames.insert(0, ctx)
slf.child = slf.container slf.child = slf.container
slf.container = slf.child.wrap(slf.soup.new_tag('div', attrs={'class': f'marseyfx-modifier marseyfx-modifier-{fn.__name__}'})) slf.container = slf.child.wrap(slf.soup.new_tag('div', attrs={'class': f'marseyfx-modifier marseyfx-modifier-{ctx.name}'}))
return fn(*args, **kwargs) slf.add_child_class(f'marseyfx-modifier-{ctx.name}-self')
res = fn(*args, **kwargs)
slf.context_frames.pop(0)
return res
return wrapper return wrapper
def heavy(fn): def heavy(fn):
@ -31,17 +38,27 @@ def heavy(fn):
return fn(*args, **kwargs) return fn(*args, **kwargs)
return wrapper return wrapper
class ModifierContextFrame:
name: str
def __init__(self, name: str):
self.name = name
class Modified: class Modified:
soup: BeautifulSoup soup: BeautifulSoup
container: Tag container: Tag
child: Tag child: Tag
tokenizer: Tokenizer tokenizer: Tokenizer
heavy_count = 0 heavy_count = 0
context_frames: list[ModifierContextFrame]
def __init__(self, el, tokenizer): def __init__(self, el, tokenizer):
self.soup = BeautifulSoup() self.soup = BeautifulSoup()
self.container = el self.container = el
self.tokenizer = tokenizer self.tokenizer = tokenizer
self.context_frames = []
def ctx(self):
return self.context_frames[0] if len(self.context_frames) > 0 else None
def add_class(self, class_: str): def add_class(self, class_: str):
if not 'class' in self.container.attrs: if not 'class' in self.container.attrs:
@ -58,15 +75,21 @@ class Modified:
def apply_modifiers(self, modifiers: list[Modifier]): def apply_modifiers(self, modifiers: list[Modifier]):
for modifier in modifiers: for modifier in modifiers:
if modifier.name in modifier_whitelist: if modifier.name in modifier_whitelist:
getattr(self, modifier.name)(*modifier.args) getattr(self, modifier.name)(*map(GroupToken.unwrap, modifier.args))
# Using this instead of throwing everything in a string and then parsing it helps # Using this instead of throwing everything in a string and then parsing it helps
# mitigate the risk of XSS attacks # mitigate the risk of XSS attacks
def image(self, name: str): def image(self, name: str):
filename = name
if not '.' in filename:
filename += '.webp'
image = self.soup.new_tag( image = self.soup.new_tag(
'img', 'img',
loading='lazy', loading='lazy',
src=f'{SITE_FULL_IMAGES}/i/{name}.webp', src=f'{SITE_FULL_IMAGES}/i/{filename}',
attrs={'class': f'marseyfx-image marseyfx-image-{name}'} attrs={'class': f'marseyfx-image marseyfx-image-{name}'}
) )
@ -90,9 +113,39 @@ class Modified:
self.container.attrs['style'] = style self.container.attrs['style'] = style
def meme_text(self, text: str, class_: Optional[str] = None):
attrs = {}
if class_ is not None:
attrs = {'class': f'marseyfx-memetext-{class_}'}
tag = self.soup.new_tag(
'span',
attrs=attrs
)
tag.string = text
self.overlay(tag)
def create_other(self, other: GroupToken = None):
wrapper = self.soup.new_tag('div', attrs={'class': f'marseyfx-modifier-{self.ctx().name}-other'})
if other is None:
return wrapper
other = other.wrap()
other_emoji = parser.parse_from_token(self.tokenizer, other)
if other_emoji is None:
return wrapper
other_emoji.is_primary = False
return other_emoji.create_el(self.tokenizer).wrap(wrapper)
@modifier @modifier
def pat(self): def pat(self):
self.overlay(self.image('pat')) self.overlay(self.image('hand'))
@modifier @modifier
def love(self): def love(self):
@ -107,18 +160,33 @@ class Modified:
def genocide(self): def genocide(self):
pass pass
@modifier
def party(self):
pass
@modifier @modifier
def says(self, msg): def says(self, msg):
if not isinstance(msg, StringLiteralToken): if not isinstance(msg, StringLiteralToken):
return return
self.overlay(self.image('says')) container = self.soup.new_tag(
self.container.append(self.soup.new_tag( 'div',
'span', attrs={'class': 'marseyfx-modifier-says-container'}
string=msg.value, )
attrs={'class': 'marseyfx-modifier-says-text'} self.container.append(container)
container.append(self.soup.new_tag(
'div',
attrs={'class': 'marseyfx-modifier-says-nub'}
)) ))
tag = self.soup.new_tag(
'span',
attrs={'class': 'marseyfx-modifier-says-text'}
)
tag.string = msg.value
container.append(tag)
@modifier @modifier
def fallover(self): def fallover(self):
self.container = self.container.wrap(self.soup.new_tag( self.container = self.container.wrap(self.soup.new_tag(
@ -142,54 +210,106 @@ class Modified:
attrs={'class': 'marseyfx-modifier-enraged-underlay'} attrs={'class': 'marseyfx-modifier-enraged-underlay'}
)) ))
@heavy
@modifier @modifier
def highcontrast(self): def meme(self, toptext: Optional[StringLiteralToken] = None, bottomtext: Optional[StringLiteralToken] = None):
pass if isinstance(toptext, StringLiteralToken):
self.meme_text(toptext.value, 'toptext')
@heavy if isinstance(bottomtext, StringLiteralToken):
@modifier self.meme_text(bottomtext.value, 'bottomtext')
def wavy(self):
self.container.wrap(self.soup.new_tag('svg'))
@modifier
def toptext(self, text: StringLiteralToken):
if not isinstance(text, StringLiteralToken):
return
self.overlay(self.soup.new_tag(
'span',
string=text.value,
attrs={'class': 'marseyfx-modifier-toptext-text'}
))
@modifier
def bottomtext(self, text: StringLiteralToken): def bottomtext(self, text: StringLiteralToken):
if not isinstance(text, StringLiteralToken): if not isinstance(text, StringLiteralToken):
return return
self.overlay(self.soup.new_tag( tag = self.soup.new_tag(
'span', 'span',
string=text.value,
attrs={'class': 'marseyfx-modifier-bottomtext-text'} attrs={'class': 'marseyfx-modifier-bottomtext-text'}
)) )
tag.string = text.value
self.overlay(tag)
@modifier @modifier
def spin(self, speed: NumberLiteralToken): def spin(self, speed=None):
self.add_style('--marseyfx-spin-peroid-multiplier: ' + (1/speed.value) + ';') if not isinstance(speed, NumberLiteralToken):
return
self.add_style(f'animation-duration: {1/speed.value}s;')
@modifier @modifier
def triumphs(self, other: GroupToken): def triumphs(self, other: GroupToken):
other = other.wrap()
other_emoji = parser.parse_from_token(self.tokenizer, other) other_emoji = parser.parse_from_token(self.tokenizer, other)
print(f'Other emoji: {other_emoji} / Token: {other}')
if other_emoji is None: if other_emoji is None:
return return
self.add_child_class('marseyfx-modifier-triumphs-self') self.add_child_class('marseyfx-modifier-triumphs-self')
other = other_emoji.create_el().wrap( other_emoji.is_primary = False
other = other_emoji.create_el(self.tokenizer).wrap(
self.soup.new_tag('div', attrs={'class': 'marseyfx-modifier-triumphs-other'}) self.soup.new_tag('div', attrs={'class': 'marseyfx-modifier-triumphs-other'})
) )
self.underlay(other) self.underlay(other)
@modifier
def nested(self, inside: GroupToken):
inside = inside.wrap()
inside_emoji = parser.parse_from_token(self.tokenizer, inside)
if inside_emoji is None:
return
inside_emoji.is_primary = False
inside = inside_emoji.create_el(self.tokenizer).wrap(
self.soup.new_tag('div', attrs={'class': 'marseyfx-modifier-nested-other'})
)
self.underlay(inside)
self.add_child_class('marseyfx-modifier-nested-side')
child = self.child
self.child = child.wrap(self.soup.new_tag('div', attrs={'class': 'marseyfx-modifier-nested-outer-container'}))
other_side = copy.copy(child)
self.child.append(other_side)
@modifier
def morph(self, other: GroupToken):
self.add_child_class('marseyfx-modifier-morph-self')
other = other.wrap()
other_emoji = parser.parse_from_token(self.tokenizer, other)
if other_emoji is None:
return
other_emoji.is_primary = False
other = other_emoji.create_el(self.tokenizer).wrap(
self.soup.new_tag('div', attrs={'class': 'marseyfx-modifier-morph-other'})
)
self.container.append(other)
@heavy
@modifier
def bulge(self, strength: NumberLiteralToken = None):
self.child = self.child.wrap(self.soup.new_tag('svg', attrs={'class': 'marseyfx-modifier-bulge-container'}))
@modifier
def prohibition(self):
self.overlay(self.image('prohibition.svg'))
@modifier
def snipe(self):
self.overlay(self.image('scope.svg'))
self.add_child_class('marseyfx-modifier-snipe-target')
@modifier
def fucks(self, other: GroupToken):
other = self.create_other(other)
self.container.append(other)

View File

@ -2,8 +2,8 @@ from tokenize import Token
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from files.helpers.config.const import SITE_FULL_IMAGES from files.helpers.config.const import SITE_FULL_IMAGES
from files.helpers.marseyfx.tokenizer import ArgsToken, DotToken, GroupToken, Tokenizer, WordToken from files.helpers.marseyfx.tokenizer import ArgsToken, DotToken, GroupToken, NumberLiteralToken, Tokenizer, WordToken
from files.helpers.marseyfx.modifiers import Modified, Modifier from files.helpers.marseyfx.modifiers import Modified, Modifier, modifier_whitelist
emoji_replacers = { emoji_replacers = {
'!': 'is_flipped', '!': 'is_flipped',
@ -18,8 +18,9 @@ class Emoji:
is_flipped = False is_flipped = False
is_user = False is_user = False
modifiers: list[Modifier] modifiers: list[Modifier]
is_primary = True
def __init__(self, name: str, modifiers, token: Token): def __init__(self, name: str, modifiers, token: Token, **args):
for symbol, value in emoji_replacers.items(): for symbol, value in emoji_replacers.items():
if symbol in name: if symbol in name:
name = name.replace(symbol, '') name = name.replace(symbol, '')
@ -28,25 +29,42 @@ class Emoji:
self.name = name self.name = name
self.modifiers = modifiers self.modifiers = modifiers
self.token = token self.token = token
self.is_primary = args.get('is_primary', True)
def create_el(self): def create_el(self, tokenizer: Tokenizer):
soup = BeautifulSoup() soup = BeautifulSoup()
el = soup.new_tag( el = soup.new_tag(
'img', 'img',
loading='lazy', loading='lazy',
src=f'{SITE_FULL_IMAGES}/e/{self.name}.webp', src=f'{SITE_FULL_IMAGES}/e/{self.name}.webp',
attrs={'class': f'marseyfx-emoji marseyfx-image'} attrs={
'class': f'marseyfx-emoji marseyfx-image',
}
) )
soup.append(el) soup.append(el)
el = el.wrap( el = el.wrap(
soup.new_tag('div', attrs={'class': 'marseyfx-emoji-container'}) soup.new_tag('div', attrs={
'class': 'marseyfx-emoji-container'
})
) )
mod = Modified(el) mod = Modified(el, tokenizer)
mod.apply_modifiers(self.modifiers) mod.apply_modifiers(self.modifiers)
container = soup.new_tag('div', attrs={'class': 'marseyfx-container'})
container_attrs = {
'class': 'marseyfx-container',
}
if self.is_primary:
container_attrs |= {
'data-bs-toggle': 'tooltip',
'title': tokenizer.str
}
container = soup.new_tag('div', attrs=container_attrs)
if (self.is_big): if (self.is_big):
container['class'].append(' marseyfx-big') container['class'].append(' marseyfx-big')
@ -55,8 +73,7 @@ class Emoji:
return mod.container.wrap(container) return mod.container.wrap(container)
def parse_emoji(str: str): def parse_emoji(tokenizer: Tokenizer):
tokenizer = Tokenizer(str)
token = tokenizer.parse_next_tokens() token = tokenizer.parse_next_tokens()
if len(tokenizer.errors) > 0 or token is None: if len(tokenizer.errors) > 0 or token is None:
@ -77,8 +94,8 @@ def parse_from_token(tokenizer: Tokenizer, token: GroupToken):
emoji = token.children[0] emoji = token.children[0]
if not isinstance(emoji, WordToken): if not isinstance(emoji, WordToken) and not isinstance(emoji, NumberLiteralToken):
tokenizer.error('Malformed token -- Expected an emoji (word token)') tokenizer.error('Malformed token -- Expected an emoji (word token) or number literal token')
return return
modifiers = [] modifiers = []
@ -96,12 +113,16 @@ def parse_from_token(tokenizer: Tokenizer, token: GroupToken):
tokenizer.error('Malformed token -- Expected a modifier name (word token)') tokenizer.error('Malformed token -- Expected a modifier name (word token)')
return return
if not modifier.value in modifier_whitelist:
tokenizer.error(f'Unknown modifier: {modifier.value}')
return
if not i + 2 < len(token.children) or not isinstance(token.children[i + 2], ArgsToken): if not i + 2 < len(token.children) or not isinstance(token.children[i + 2], ArgsToken):
modifiers.append(Modifier(modifier.value, [])) modifiers.append(Modifier(modifier.value, []))
i += 2 i += 2
else: else:
args = token.children[i + 2] args = token.children[i + 2]
modifiers.append(Modifier(modifier.value, *args.children)) modifiers.append(Modifier(modifier.value, args.children))
i += 3 i += 3
return Emoji(emoji.value, modifiers, token) return Emoji(tokenizer.str[emoji.span[0]:emoji.span[1]], modifiers, token)

View File

@ -5,10 +5,14 @@ class TokenizerError:
index: int index: int
error: str error: str
def __init__(self, index: int, error: str): def __init__(self, tokenizer, index: int, error: str):
self.tokenizer = tokenizer
self.index = index self.index = index
self.error = error self.error = error
def __str__(self):
return f'{self.error}\n {self.tokenizer.str}\n {" " * self.index}^'
class Tokenizer: class Tokenizer:
str: str str: str
index: int index: int
@ -23,6 +27,9 @@ class Tokenizer:
return self.index < len(self.str) return self.index < len(self.str)
def peek(self): def peek(self):
if not self.has_next():
self.error('Unexpected end of input')
return None
return self.str[self.index] return self.str[self.index]
def eat(self): def eat(self):
@ -34,7 +41,7 @@ class Tokenizer:
self.index -= 1 self.index -= 1
def error(self, error: str): def error(self, error: str):
self.errors.append(TokenizerError(self.index, error)) self.errors.append(TokenizerError(self, self.index, error))
def token_to_string(self, token): def token_to_string(self, token):
return self.str[token.span[0]:token.span[1]] return self.str[token.span[0]:token.span[1]]
@ -43,7 +50,9 @@ class Tokenizer:
start = self.index start = self.index
tokens = [] tokens = []
while self.has_next(): while self.has_next():
if NumberLiteralToken.can_parse(self): if self.peek() == ' ':
self.eat()
elif NumberLiteralToken.can_parse(self):
tokens.append(NumberLiteralToken.parse(self)) tokens.append(NumberLiteralToken.parse(self))
elif WordToken.can_parse(self): elif WordToken.can_parse(self):
tokens.append(WordToken.parse(self)) tokens.append(WordToken.parse(self))
@ -65,6 +74,12 @@ class Tokenizer:
class Token: class Token:
span: tuple[int, int] span: tuple[int, int]
def wrap(self):
if isinstance(self, GroupToken):
return self
else:
return GroupToken(self.span, [self])
@staticmethod @staticmethod
@abstractmethod @abstractmethod
def can_parse(tokenizer: Tokenizer) -> bool: def can_parse(tokenizer: Tokenizer) -> bool:
@ -138,6 +153,10 @@ class NumberLiteralToken(Token):
@staticmethod @staticmethod
def can_parse(tokenizer: Tokenizer): def can_parse(tokenizer: Tokenizer):
return re.fullmatch(r'[-\d]', tokenizer.peek())
@staticmethod
def can_parse_next(tokenizer: Tokenizer):
return re.fullmatch(r'[-\d\.]', tokenizer.peek()) return re.fullmatch(r'[-\d\.]', tokenizer.peek())
@staticmethod @staticmethod
@ -145,7 +164,7 @@ class NumberLiteralToken(Token):
start = tokenizer.index start = tokenizer.index
value = '' value = ''
while tokenizer.has_next(): while tokenizer.has_next():
if NumberLiteralToken.can_parse(tokenizer): if NumberLiteralToken.can_parse_next(tokenizer):
value += tokenizer.eat() value += tokenizer.eat()
else: else:
break break
@ -179,8 +198,16 @@ class GroupToken(Token):
def __init__(self, span: tuple[int, int], children: list[Token]): def __init__(self, span: tuple[int, int], children: list[Token]):
self.children = children self.children = children
# this span is probably wrong tbh but idc
self.span = span self.span = span
def unwrap(self):
if len(self.children) == 1:
return self.children[0]
else:
return self
class ArgsToken(Token): class ArgsToken(Token):
children: list[GroupToken] children: list[GroupToken]
def __init__(self, span: tuple[int, int], children: list[Token]): def __init__(self, span: tuple[int, int], children: list[Token]):
@ -196,13 +223,14 @@ class ArgsToken(Token):
start = tokenizer.index start = tokenizer.index
tokens = [] tokens = []
while tokenizer.has_next(): while tokenizer.has_next():
if tokenizer.peek() == '(':
tokenizer.eat()
if tokenizer.peek() == ')': if tokenizer.peek() == ')':
tokenizer.eat() tokenizer.eat()
break break
elif tokenizer.peek() == ',': elif tokenizer.peek() == ',':
tokenizer.eat() tokenizer.eat()
else: else:
tokenizer.eat() tokens.append(tokenizer.parse_next_tokens())
tokens.extend(tokenizer.parse_next_tokens())
return ArgsToken((start, tokenizer.index), tokens) return ArgsToken((start, tokenizer.index), tokens)

View File

@ -52,7 +52,7 @@ mute_regex = re.compile("\/mute @?([\w-]{1,30}) ([0-9]+)", flags=re.A|re.I)
emoji_regex = re.compile(f"<p>\s*(:[!#@\w\-]{{1,72}}:\s*)+<\/p>", flags=re.A) emoji_regex = re.compile(f"<p>\s*(:[!#@\w\-]{{1,72}}:\s*)+<\/p>", flags=re.A)
emoji_regex2 = re.compile(f'(?<!"):([!#@\w\-]{{1,72}}?):(?!([^<]*<\/(code|pre)>|[^`]*`))', flags=re.A) emoji_regex2 = re.compile(f'(?<!"):([!#@\w\-]{{1,72}}?):(?!([^<]*<\/(code|pre)>|[^`]*`))', flags=re.A)
marseyfx_emoji_regex = re.compile(':[^\s].{0,98}?[^\\\\]:', flags=re.A) marseyfx_emoji_regex = re.compile(':[\w#!].{0,98}?[^\\\\]:', flags=re.A)
snappy_url_regex = re.compile('<a href="(https?:\/\/.+?)".*?>(.+?)<\/a>', flags=re.A) snappy_url_regex = re.compile('<a href="(https?:\/\/.+?)".*?>(.+?)<\/a>', flags=re.A)

View File

@ -8,6 +8,7 @@ from typing_extensions import deprecated
from urllib.parse import parse_qs, urlparse, unquote, ParseResult, urlencode, urlunparse from urllib.parse import parse_qs, urlparse, unquote, ParseResult, urlencode, urlunparse
import time import time
from files.helpers.marseyfx.parser import parse_emoji from files.helpers.marseyfx.parser import parse_emoji
from files.helpers.marseyfx.tokenizer import Tokenizer
from sqlalchemy.sql import func from sqlalchemy.sql import func
@ -129,7 +130,7 @@ def build_url_re(tlds, protocols):
""" """
return re.compile( return re.compile(
r"""\(*# Match any opening parentheses. r"""\(*# Match any opening parentheses.
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)?# http:// \b(?<![@.#:])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)?# http://
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b# xx.yy.tld(:##)? ([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b# xx.yy.tld(:##)?
(?:[/?][^#\s\{{\}}\|\\\^\[\]`<>"]*)? (?:[/?][^#\s\{{\}}\|\\\^\[\]`<>"]*)?
# /path/zz (excluding "unsafe" chars from RFC 1738, # /path/zz (excluding "unsafe" chars from RFC 1738,
@ -274,18 +275,56 @@ def find_all_emote_endings(word):
return endings, word return endings, word
def render_emojis(markup: str): class RenderEmojisResult:
emojis_used = set() emojis_used: set[str]
heavy_count = 0
tags: list[str]
def __init__(self):
self.emojis_used = set()
self.tags = []
def update(self, other):
self.emojis_used |= other.emojis_used
self.heavy_count += other.heavy_count
self.tags.extend(other.tags)
def render_emojis(markup: str, **kwargs):
result = RenderEmojisResult()
last_match_end = 0
golden = kwargs.get('golden', True)
permit_big = kwargs.get('permit_big', True)
for emoji_match in marseyfx_emoji_regex.finditer(markup): for emoji_match in marseyfx_emoji_regex.finditer(markup):
emoji_str = emoji_match.group()[1:-1] # Cut off colons previous_text = markup[last_match_end:emoji_match.start()]
success, emoji, _ = parse_emoji(emoji_str) if previous_text != '':
if success: result.tags.append(previous_text)
emojis_used.add(emoji.name) last_match_end = emoji_match.end()
emoji_html = str(emoji.create_el())
markup = markup.replace(emoji_match.group(), emoji_html)
return markup, emojis_used emoji_str = emoji_match.group()[1:-1] # Cut off colons
tokenizer = Tokenizer(emoji_str)
success, emoji, _ = parse_emoji(tokenizer)
if success:
result.emojis_used.add(emoji.name)
if not permit_big:
emoji.is_big = False
emoji_html = emoji.create_el(tokenizer)
result.tags.append(emoji_html)
if len(tokenizer.errors) > 0:
soup = BeautifulSoup()
err_tag = soup.new_tag('pre', attrs={'class': 'marseyfx-error'})
nl = "\n "
err_tag.string = 'MarseyFX error:' + nl + nl.join(map(str,tokenizer.errors))
result.tags.append(err_tag)
#result.tags.append(f':{emoji_str}:')
result.tags.append(markup[last_match_end:])
return result
@deprecated("Use the new one") @deprecated("Use the new one")
def old_render_emoji(html, regexp, golden, emojis_used, b=False, is_title=False): def old_render_emoji(html, regexp, golden, emojis_used, b=False, is_title=False):
@ -554,11 +593,6 @@ def sanitize(sanitized, golden=True, limit_pings=0, showmore=False, count_emojis
sanitized = video_sub_regex.sub(r'<p class="resizable"><video controls preload="none" src="\1"></video></p>', sanitized) sanitized = video_sub_regex.sub(r'<p class="resizable"><video controls preload="none" src="\1"></video></p>', sanitized)
sanitized = audio_sub_regex.sub(r'<audio controls preload="none" src="\1"></audio>', sanitized) sanitized = audio_sub_regex.sub(r'<audio controls preload="none" src="\1"></audio>', sanitized)
if count_emojis:
for emoji in g.db.query(Emoji).filter(Emoji.submitter_id==None, Emoji.name.in_(emojis_used)):
emoji.count += 1
g.db.add(emoji)
sanitized = sanitized.replace('<p></p>', '') sanitized = sanitized.replace('<p></p>', '')
allowed_css_properties = allowed_styles.copy() allowed_css_properties = allowed_styles.copy()
@ -574,9 +608,8 @@ def sanitize(sanitized, golden=True, limit_pings=0, showmore=False, count_emojis
parse_email=False, url_re=url_re)] parse_email=False, url_re=url_re)]
).clean(sanitized) ).clean(sanitized)
sanitized, emojis_used = render_emojis(sanitized)
#doing this here cuz of the linkifyfilter right above it (therefore unifying all link processing logic) #doing this here cuz of the linkifyfilter right above it (therefore unifying all link processing logic) <-- i have no clue what this means lol
soup = BeautifulSoup(sanitized, 'lxml') soup = BeautifulSoup(sanitized, 'lxml')
has_transform = bool(soup.select('[style*=transform i]')) has_transform = bool(soup.select('[style*=transform i]'))
@ -660,9 +693,6 @@ def sanitize(sanitized, golden=True, limit_pings=0, showmore=False, count_emojis
html = f'<p class="resizable yt">{html}</p>' html = f'<p class="resizable yt">{html}</p>'
sanitized = sanitized.replace(i.group(0), html) sanitized = sanitized.replace(i.group(0), html)
if '<pre>' not in sanitized and blackjack != "rules":
sanitized = sanitized.replace('\n','')
if showmore: if showmore:
# Insert a show more button if the text is too long or has too many paragraphs # Insert a show more button if the text is too long or has too many paragraphs
CHARLIMIT = 3000 CHARLIMIT = 3000
@ -708,7 +738,9 @@ def filter_emojis_only(title, golden=True, count_emojis=False):
title = remove_cuniform(title) title = remove_cuniform(title)
title, emojis_used = render_emojis(title) #old_render_emoji(title, emoji_regex2, golden, emojis_used, is_title=True) res = render_emojis(title) #old_render_emoji(title, emoji_regex2, golden, emojis_used, is_title=True)
title = ''.join(map(str, res.tags))
if count_emojis: if count_emojis:
for emoji in g.db.query(Emoji).filter(Emoji.submitter_id==None, Emoji.name.in_(emojis_used)): for emoji in g.db.query(Emoji).filter(Emoji.submitter_id==None, Emoji.name.in_(emojis_used)):