forked from MarseyWorld/MarseyWorld
dont archive site urls
parent
9b7b3fe805
commit
5fa854afd6
|
@ -33,14 +33,10 @@ def archiveorg(url):
|
|||
except: pass
|
||||
|
||||
def archive_url(url):
|
||||
if url.startswith(SITE_FULL): return
|
||||
|
||||
gevent.spawn(archiveorg, url)
|
||||
|
||||
if url.startswith('https://twitter.com/'):
|
||||
url = url.replace('https://twitter.com/', 'https://nitter.42l.fr/')
|
||||
gevent.spawn(archiveorg, url)
|
||||
|
||||
if url.startswith('https://instagram.com/'):
|
||||
url = newposturl.replace('https://instagram.com/', 'https://imginn.com/')
|
||||
gevent.spawn(archiveorg, url)
|
||||
|
@ -97,7 +93,7 @@ def execute_snappy(post, v):
|
|||
|
||||
body += "\n\n"
|
||||
|
||||
if post.url:
|
||||
if post.url and not post.url.startswith(SITE_FULL):
|
||||
if post.url.startswith('https://old.reddit.com/r/'):
|
||||
rev = post.url.replace('https://old.reddit.com/', '')
|
||||
rev = f"* [unddit.com](https://unddit.com/{rev})\n"
|
||||
|
@ -128,6 +124,8 @@ def execute_snappy(post, v):
|
|||
|
||||
|
||||
for href, title in captured:
|
||||
if href.startswith(SITE_FULL): continue
|
||||
|
||||
if "Snapshots:\n\n" not in body: body += "Snapshots:\n\n"
|
||||
|
||||
if f'**[{title}]({href})**:\n\n' not in body:
|
||||
|
|
Loading…
Reference in New Issue