136 lines
4.3 KiB
Python
136 lines
4.3 KiB
Python
import aiohttp
|
|
import asyncio
|
|
import json
|
|
|
|
import sys
|
|
import os
|
|
import math
|
|
import random
|
|
import shelve
|
|
import io
|
|
|
|
from aiohttp_retry import RetryClient, ExponentialRetry
|
|
from collections import OrderedDict
|
|
from data import config, db
|
|
|
|
import logging
|
|
|
|
logging.basicConfig(level=logging.WARNING)
|
|
|
|
def copy_file_obj(file_obj):
|
|
# Read the contents of the original file
|
|
content = file_obj.read()
|
|
|
|
# Reset the position of the original file in case it needs to be read again
|
|
file_obj.seek(0)
|
|
|
|
# Create a new BytesIO object with the same content
|
|
new_file_obj = io.BytesIO(content)
|
|
|
|
return new_file_obj
|
|
|
|
class DramaClient:
|
|
BASE_URL = "https://rdrama.net"
|
|
|
|
def __init__(self, client=None):
|
|
#self.client = client or RetryClient(retry_options=ExponentialRetry(attempts=5))
|
|
self.client = client or aiohttp.ClientSession()
|
|
self.max_retries = 5 # define a maximum number of retries
|
|
|
|
|
|
#self.chud_phrase = asyncio.run(self.get("/@me")).get("chud_phrase", "")
|
|
|
|
async def get(self, endpoint):
|
|
print("GET", endpoint)
|
|
print(config["api_token"])
|
|
|
|
async with self.client.get(
|
|
f"{self.BASE_URL}{endpoint}",
|
|
headers={"Authorization": config["api_token"]},
|
|
) as r:
|
|
if r.status != 200:
|
|
print("Error!", r, r.status, await r.text())
|
|
sys.exit(1)
|
|
|
|
return await r.json()
|
|
|
|
async def post(self, endpoint, data=None, images=None):
|
|
for attempt in range(self.max_retries):
|
|
await asyncio.sleep(5)
|
|
try:
|
|
form_data = aiohttp.FormData()
|
|
|
|
if data is not None:
|
|
for key, value in data.items():
|
|
form_data.add_field(key, str(value))
|
|
|
|
if images is not None:
|
|
for file in images:
|
|
form_data.add_field('file', file, filename='image.webp', content_type='image/webp')
|
|
|
|
async with self.client.post(f"{self.BASE_URL}{endpoint}", data=form_data, headers={"Authorization": config["api_token"]}) as r:
|
|
if r.status != 200:
|
|
print("Error!", r, r.status, await r.text())
|
|
raise Exception("HTTP error") # raise an exception to trigger the retry
|
|
return await r.json()
|
|
except Exception as e:
|
|
if attempt < self.max_retries - 1: # if this wasn't the last attempt, continue to the next one
|
|
continue
|
|
else: # this was the last attempt, re-raise the exception
|
|
print("Exception", data)
|
|
print(e)
|
|
raise e # this was the last attempt, re-raise the exception
|
|
|
|
async def fetch_new_comments(self):
|
|
comments = []
|
|
|
|
earliest_id = math.inf
|
|
page = 1
|
|
|
|
if "last_processed_id" not in db:
|
|
page_comments = await self.fetch_page(1)
|
|
db["last_processed_id"] = max(c["id"] for c in page_comments)
|
|
db.commit()
|
|
return []
|
|
|
|
# Fetch comments until we find the last one processed.
|
|
while earliest_id > db["last_processed_id"]:
|
|
page_comments = await self.fetch_page(page)
|
|
|
|
if len(page_comments) == 0:
|
|
break
|
|
|
|
earliest_id = min([c["id"] for c in page_comments])
|
|
comments += [c for c in page_comments if c["id"] > db["last_processed_id"]]
|
|
|
|
page += 1
|
|
|
|
if not comments:
|
|
return []
|
|
|
|
db["last_processed_id"] = max(c["id"] for c in comments)
|
|
db.commit()
|
|
|
|
# New comments may have pushed others to page n+1 while fetching.
|
|
comments = {c["id"]: c for c in comments}.values()
|
|
comments = list(OrderedDict((c['id'], c) for c in comments).values())
|
|
|
|
# Oldest first.
|
|
comments.reverse()
|
|
|
|
return comments
|
|
|
|
async def fetch_page(self, page):
|
|
return (await self.get(f"/comments?page={page}"))["data"]
|
|
|
|
async def reply(self, comment, body, images=None):
|
|
#if self.chud_phrase and self.chud_phrase not in body:
|
|
# body += f"\n{self.chud_phrase}"
|
|
|
|
data = {
|
|
"parent_fullname": f"c_{comment['id']}",
|
|
"body": f"{body}<sub><sub><sub><sub><sub>{random.randint(1, 1000000000)}",
|
|
}
|
|
|
|
return await self.post("/comment", data=data, images=images)
|