archive_url: suppress errors on remote failure.
ghostarchive has been giving us 500 backs lately. They are making an absolute mess of the log for a non-central, opportunistic feature, and we already eat exceptions for archive.org. We merely extend that to ghostarchive.pull/2/head
parent
272e2ee936
commit
adeb7acb38
|
@ -16,12 +16,17 @@ from files.helpers.get import *
|
||||||
from files.helpers.sanitize import *
|
from files.helpers.sanitize import *
|
||||||
from files.helpers.slots import check_slots_command
|
from files.helpers.slots import check_slots_command
|
||||||
|
|
||||||
headers = {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'}
|
|
||||||
|
|
||||||
def _archiveorg(url):
|
def _archiveorg(url):
|
||||||
try: requests.get(f'https://web.archive.org/save/{url}', headers=headers, timeout=10, proxies=proxies)
|
headers = {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'}
|
||||||
|
try:
|
||||||
|
requests.get(f'https://web.archive.org/save/{url}',
|
||||||
|
headers=headers, timeout=10, proxies=proxies)
|
||||||
|
except: pass
|
||||||
|
try:
|
||||||
|
requests.post('https://ghostarchive.org/archive2', data={"archive": url},
|
||||||
|
headers=headers, timeout=10, proxies=proxies)
|
||||||
except: pass
|
except: pass
|
||||||
requests.post('https://ghostarchive.org/archive2', data={"archive": url}, headers=headers, timeout=10, proxies=proxies)
|
|
||||||
|
|
||||||
|
|
||||||
def archive_url(url):
|
def archive_url(url):
|
||||||
|
|
Loading…
Reference in New Issue