2021-08-04 15:35:10 +00:00
|
|
|
from files.helpers.wrappers import *
|
2021-07-21 01:12:26 +00:00
|
|
|
import re
|
|
|
|
from sqlalchemy import *
|
|
|
|
from flask import *
|
2021-09-19 18:41:36 +00:00
|
|
|
from files.__main__ import app
|
2021-07-21 01:12:26 +00:00
|
|
|
import random
|
2021-09-19 18:41:36 +00:00
|
|
|
from sqlalchemy.orm import contains_eager
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
query_regex=re.compile("(\w+):(\S+)")
|
|
|
|
valid_params=[
|
|
|
|
'author',
|
|
|
|
'domain',
|
|
|
|
'over18'
|
|
|
|
]
|
|
|
|
|
|
|
|
def searchparse(text):
|
|
|
|
|
|
|
|
#takes test in filter:term format and returns data
|
|
|
|
|
|
|
|
criteria = {x[0]:x[1] for x in query_regex.findall(text)}
|
|
|
|
|
|
|
|
for x in criteria:
|
|
|
|
if x in valid_params:
|
|
|
|
text = text.replace(f"{x}:{criteria[x]}", "")
|
|
|
|
|
|
|
|
text=text.strip()
|
|
|
|
|
|
|
|
if text:
|
|
|
|
criteria['q']=text
|
|
|
|
|
|
|
|
return criteria
|
|
|
|
|
|
|
|
|
|
|
|
def searchlisting(criteria, v=None, page=1, t="None", sort="top", b=None):
|
|
|
|
|
|
|
|
posts = g.db.query(Submission).options(
|
|
|
|
lazyload('*')
|
|
|
|
).join(
|
|
|
|
Submission.submission_aux,
|
|
|
|
).join(
|
|
|
|
Submission.author
|
2021-09-01 22:47:16 +00:00
|
|
|
)
|
|
|
|
|
2021-09-01 22:50:15 +00:00
|
|
|
if not (v and v.admin_level == 6): posts = posts.filter(Submission.private == False)
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
if 'q' in criteria:
|
|
|
|
words=criteria['q'].split()
|
|
|
|
words=[SubmissionAux.title.ilike('%'+x+'%') for x in words]
|
|
|
|
words=tuple(words)
|
|
|
|
posts=posts.filter(*words)
|
|
|
|
|
|
|
|
if 'over18' in criteria:
|
|
|
|
posts = posts.filter(Submission.over_18==True)
|
|
|
|
|
|
|
|
if 'author' in criteria:
|
2021-09-09 20:34:03 +00:00
|
|
|
if v and v.admin_level == 6:
|
|
|
|
posts = posts.filter(Submission.author_id == get_user(criteria['author']).id)
|
|
|
|
else:
|
|
|
|
posts=posts.filter(
|
|
|
|
Submission.author_id==get_user(criteria['author']).id,
|
|
|
|
User.is_private==False,
|
|
|
|
)
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
if 'domain' in criteria:
|
|
|
|
domain=criteria['domain']
|
|
|
|
posts=posts.filter(
|
|
|
|
or_(
|
|
|
|
SubmissionAux.url.ilike("https://"+domain+'/%'),
|
|
|
|
SubmissionAux.url.ilike("https://"+domain+'/%'),
|
|
|
|
SubmissionAux.url.ilike("https://"+domain),
|
|
|
|
SubmissionAux.url.ilike("https://"+domain),
|
|
|
|
SubmissionAux.url.ilike("https://www."+domain+'/%'),
|
|
|
|
SubmissionAux.url.ilike("https://www."+domain+'/%'),
|
|
|
|
SubmissionAux.url.ilike("https://www."+domain),
|
|
|
|
SubmissionAux.url.ilike("https://www."+domain),
|
|
|
|
SubmissionAux.url.ilike("https://old." + domain + '/%'),
|
|
|
|
SubmissionAux.url.ilike("https://old." + domain + '/%'),
|
|
|
|
SubmissionAux.url.ilike("https://old." + domain),
|
|
|
|
SubmissionAux.url.ilike("https://old." + domain)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if not(v and v.admin_level >= 3):
|
|
|
|
posts = posts.filter(
|
|
|
|
Submission.deleted_utc == 0,
|
|
|
|
Submission.is_banned == False,
|
|
|
|
)
|
|
|
|
|
|
|
|
if v and v.admin_level >= 4:
|
|
|
|
pass
|
|
|
|
elif v:
|
2021-09-17 12:20:07 +00:00
|
|
|
blocking = [x[0] for x in g.db.query(
|
2021-07-21 01:12:26 +00:00
|
|
|
UserBlock.target_id).filter_by(
|
2021-09-17 12:20:07 +00:00
|
|
|
user_id=v.id).all()]
|
|
|
|
blocked = [x[0] for x in g.db.query(
|
2021-07-21 01:12:26 +00:00
|
|
|
UserBlock.user_id).filter_by(
|
2021-09-17 12:20:07 +00:00
|
|
|
target_id=v.id).all()]
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
posts = posts.filter(
|
|
|
|
Submission.author_id.notin_(blocking),
|
|
|
|
Submission.author_id.notin_(blocked),
|
|
|
|
)
|
|
|
|
|
|
|
|
if t:
|
|
|
|
now = int(time.time())
|
|
|
|
if t == 'hour':
|
|
|
|
cutoff = now - 3600
|
|
|
|
elif t == 'day':
|
|
|
|
cutoff = now - 86400
|
|
|
|
elif t == 'week':
|
|
|
|
cutoff = now - 604800
|
|
|
|
elif t == 'month':
|
|
|
|
cutoff = now - 2592000
|
|
|
|
elif t == 'year':
|
|
|
|
cutoff = now - 31536000
|
|
|
|
else:
|
|
|
|
cutoff = 0
|
|
|
|
posts = posts.filter(Submission.created_utc >= cutoff)
|
|
|
|
|
|
|
|
posts=posts.options(
|
|
|
|
contains_eager(Submission.submission_aux),
|
|
|
|
contains_eager(Submission.author),
|
|
|
|
)
|
|
|
|
|
|
|
|
if sort == "new":
|
|
|
|
posts = posts.order_by(Submission.created_utc.desc()).all()
|
|
|
|
elif sort == "old":
|
|
|
|
posts = posts.order_by(Submission.created_utc.asc()).all()
|
|
|
|
elif sort == "controversial":
|
|
|
|
posts = sorted(posts.all(), key=lambda x: x.score_disputed, reverse=True)
|
|
|
|
elif sort == "top":
|
2021-08-06 12:22:29 +00:00
|
|
|
posts = sorted(posts.all(), key=lambda x: x.score, reverse=True)
|
2021-07-21 01:12:26 +00:00
|
|
|
elif sort == "bottom":
|
2021-08-06 12:22:29 +00:00
|
|
|
posts = sorted(posts.all(), key=lambda x: x.score)
|
2021-07-21 01:12:26 +00:00
|
|
|
elif sort == "comments":
|
2021-08-22 13:15:13 +00:00
|
|
|
posts = posts.order_by(Submission.comment_count.desc()).all()
|
2021-07-21 01:12:26 +00:00
|
|
|
elif sort == "random":
|
|
|
|
posts = posts.all()
|
|
|
|
posts = random.sample(posts, k=len(posts))
|
|
|
|
else:
|
|
|
|
abort(400)
|
|
|
|
|
|
|
|
total = len(posts)
|
|
|
|
|
|
|
|
firstrange = 25 * (page - 1)
|
|
|
|
secondrange = firstrange+26
|
|
|
|
posts = posts[firstrange:secondrange]
|
|
|
|
|
|
|
|
return total, [x.id for x in posts]
|
|
|
|
|
|
|
|
|
|
|
|
def searchcommentlisting(criteria, v=None, page=1, t="None", sort="top"):
|
|
|
|
|
2021-09-17 08:29:05 +00:00
|
|
|
comments = g.db.query(Comment).options(lazyload('*')).options(lazyload('*')).filter(Comment.parent_submission != None).join(Comment.comment_aux)
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
if 'q' in criteria:
|
|
|
|
words=criteria['q'].split()
|
|
|
|
words=[CommentAux.body.ilike('%'+x+'%') for x in words]
|
|
|
|
words=tuple(words)
|
|
|
|
comments=comments.filter(*words)
|
|
|
|
|
|
|
|
if not(v and v.admin_level >= 3):
|
|
|
|
comments = comments.filter(
|
|
|
|
Comment.deleted_utc == 0,
|
|
|
|
Comment.is_banned == False)
|
|
|
|
|
|
|
|
if t:
|
|
|
|
now = int(time.time())
|
|
|
|
if t == 'hour':
|
|
|
|
cutoff = now - 3600
|
|
|
|
elif t == 'day':
|
|
|
|
cutoff = now - 86400
|
|
|
|
elif t == 'week':
|
|
|
|
cutoff = now - 604800
|
|
|
|
elif t == 'month':
|
|
|
|
cutoff = now - 2592000
|
|
|
|
elif t == 'year':
|
|
|
|
cutoff = now - 31536000
|
|
|
|
else:
|
|
|
|
cutoff = 0
|
|
|
|
comments = comments.filter(Comment.created_utc >= cutoff)
|
|
|
|
|
|
|
|
comments=comments.options(contains_eager(Comment.comment_aux))
|
|
|
|
|
|
|
|
if sort == "new":
|
|
|
|
comments = comments.order_by(Comment.created_utc.desc()).all()
|
|
|
|
elif sort == "old":
|
|
|
|
comments = comments.order_by(Comment.created_utc.asc()).all()
|
|
|
|
elif sort == "controversial":
|
|
|
|
comments = sorted(comments.all(), key=lambda x: x.score_disputed, reverse=True)
|
|
|
|
elif sort == "top":
|
2021-08-06 12:22:29 +00:00
|
|
|
comments = sorted(comments.all(), key=lambda x: x.score, reverse=True)
|
2021-07-21 01:12:26 +00:00
|
|
|
elif sort == "bottom":
|
2021-08-06 12:22:29 +00:00
|
|
|
comments = sorted(comments.all(), key=lambda x: x.score)
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
total = len(list(comments))
|
|
|
|
firstrange = 25 * (page - 1)
|
|
|
|
secondrange = firstrange+26
|
|
|
|
comments = comments[firstrange:secondrange]
|
|
|
|
return total, [x.id for x in comments]
|
|
|
|
|
2021-07-27 22:31:28 +00:00
|
|
|
@app.get("/search/posts")
|
2021-07-21 01:12:26 +00:00
|
|
|
@auth_desired
|
2021-08-02 07:37:46 +00:00
|
|
|
def searchposts(v):
|
2021-08-23 17:48:55 +00:00
|
|
|
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
query = request.values.get("q", '').strip()
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
page = max(1, int(request.values.get("page", 1)))
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
sort = request.values.get("sort", "top").lower()
|
|
|
|
t = request.values.get('t', 'all').lower()
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
criteria=searchparse(query)
|
|
|
|
total, ids = searchlisting(criteria, v=v, page=page, t=t, sort=sort)
|
|
|
|
|
2021-09-10 06:04:28 +00:00
|
|
|
next_exists = (len(ids) > 25)
|
2021-07-28 10:57:41 +00:00
|
|
|
ids = ids[:25]
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
posts = get_posts(ids, v=v)
|
|
|
|
|
|
|
|
if v and v.admin_level>3 and "domain" in criteria:
|
|
|
|
domain=criteria['domain']
|
|
|
|
domain_obj=get_domain(domain)
|
|
|
|
else:
|
|
|
|
domain=None
|
|
|
|
domain_obj=None
|
|
|
|
|
2021-07-31 06:18:59 +00:00
|
|
|
if request.headers.get("Authorization"): return {"data":[x.json for x in posts]}
|
|
|
|
else: return render_template("search.html",
|
|
|
|
v=v,
|
|
|
|
query=query,
|
|
|
|
total=total,
|
|
|
|
page=page,
|
|
|
|
listing=posts,
|
|
|
|
sort=sort,
|
|
|
|
t=t,
|
|
|
|
next_exists=next_exists,
|
|
|
|
domain=domain,
|
2021-08-03 12:19:01 +00:00
|
|
|
domain_obj=domain_obj
|
2021-07-31 06:18:59 +00:00
|
|
|
)
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-07-27 22:31:28 +00:00
|
|
|
@app.get("/search/comments")
|
2021-07-21 01:12:26 +00:00
|
|
|
@auth_desired
|
|
|
|
def searchcomments(v):
|
2021-08-23 17:48:55 +00:00
|
|
|
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
query = request.values.get("q", '').strip()
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
try: page = max(1, int(request.values.get("page", 1)))
|
2021-07-21 01:12:26 +00:00
|
|
|
except: page = 1
|
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
sort = request.values.get("sort", "top").lower()
|
|
|
|
t = request.values.get('t', 'all').lower()
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
criteria=searchparse(query)
|
|
|
|
total, ids = searchcommentlisting(criteria, v=v, page=page, t=t, sort=sort)
|
|
|
|
|
2021-09-10 06:04:28 +00:00
|
|
|
next_exists = (len(ids) > 25)
|
2021-07-28 10:57:41 +00:00
|
|
|
ids = ids[:25]
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
comments = get_comments(ids, v=v)
|
|
|
|
|
2021-07-31 06:18:59 +00:00
|
|
|
if request.headers.get("Authorization"): return [x.json for x in comments]
|
2021-07-31 05:28:05 +00:00
|
|
|
else: return render_template("search_comments.html", v=v, query=query, total=total, page=page, comments=comments, sort=sort, t=t, next_exists=next_exists)
|
|
|
|
|
|
|
|
|
2021-07-27 22:31:28 +00:00
|
|
|
@app.get("/search/users")
|
2021-07-21 01:12:26 +00:00
|
|
|
@auth_desired
|
2021-08-02 07:37:46 +00:00
|
|
|
def searchusers(v):
|
2021-08-23 17:48:55 +00:00
|
|
|
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
query = request.values.get("q", '').strip()
|
2021-07-21 01:12:26 +00:00
|
|
|
|
2021-09-19 13:11:34 +00:00
|
|
|
page = max(1, int(request.values.get("page", 1)))
|
|
|
|
sort = request.values.get("sort", "top").lower()
|
|
|
|
t = request.values.get('t', 'all').lower()
|
2021-07-21 01:12:26 +00:00
|
|
|
term=query.lstrip('@')
|
|
|
|
term=term.replace('\\','')
|
|
|
|
term=term.replace('_','\_')
|
|
|
|
|
2021-09-17 08:29:05 +00:00
|
|
|
users=g.db.query(User).options(lazyload('*')).filter(User.username.ilike(f'%{term}%'))
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
users=users.order_by(User.username.ilike(term).desc(), User.stored_subscriber_count.desc())
|
|
|
|
|
|
|
|
total=users.count()
|
|
|
|
|
|
|
|
users=[x for x in users.offset(25 * (page-1)).limit(26)]
|
|
|
|
next_exists=(len(users)==26)
|
2021-07-28 10:57:41 +00:00
|
|
|
users=users[:25]
|
2021-07-21 01:12:26 +00:00
|
|
|
|
|
|
|
|
2021-07-31 06:18:59 +00:00
|
|
|
if request.headers.get("Authorization"): return [x.json for x in users]
|
2021-07-31 05:28:05 +00:00
|
|
|
else: return render_template("search_users.html", v=v, query=query, total=total, page=page, users=users, sort=sort, t=t, next_exists=next_exists)
|