commit
40ff7514c8
|
@ -0,0 +1,7 @@
|
||||||
|
POSTGRES_URI="postgresql://postgres:ChangeMe@postgres.example.tld/overseer"
|
||||||
|
USE_SQLITE=0
|
||||||
|
OVERSEER_LEMMY_DOMAIN="overctrl.example.tld"
|
||||||
|
OVERSEER_LEMMY_USERNAME="overseer"
|
||||||
|
OVERSEER_LEMMY_PASSWORD="LemmyPassword"
|
||||||
|
ADMIN_API_KEY="Password"
|
||||||
|
secret_key="VerySecretKey"
|
|
@ -5,6 +5,9 @@ class Models:
|
||||||
self.response_model_error = api.model('RequestError', {
|
self.response_model_error = api.model('RequestError', {
|
||||||
'message': fields.String(description="The error message for this status code."),
|
'message': fields.String(description="The error message for this status code."),
|
||||||
})
|
})
|
||||||
|
self.response_model_simple_response = api.model('SimpleResponse', {
|
||||||
|
"message": fields.String(default='OK',required=True, description="The result of this operation."),
|
||||||
|
})
|
||||||
self.response_model_suspicious_instances = api.model('SuspiciousInstances', {
|
self.response_model_suspicious_instances = api.model('SuspiciousInstances', {
|
||||||
'domain': fields.String(description="The instance domain"),
|
'domain': fields.String(description="The instance domain"),
|
||||||
'uptime_alltime': fields.Float(description="The instance uptime pct. 100% and thousand of users is unlikely"),
|
'uptime_alltime': fields.Float(description="The instance uptime pct. 100% and thousand of users is unlikely"),
|
||||||
|
@ -14,9 +17,24 @@ class Models:
|
||||||
'active_users_monthly': fields.Integer(description="The amount of active users monthly."),
|
'active_users_monthly': fields.Integer(description="The amount of active users monthly."),
|
||||||
'signup': fields.Boolean(default=False,description="True when subscriptions are open, else False"),
|
'signup': fields.Boolean(default=False,description="True when subscriptions are open, else False"),
|
||||||
'activity_suspicion': fields.Float(description="Local Comments+Posts per User. Higher is worse"),
|
'activity_suspicion': fields.Float(description="Local Comments+Posts per User. Higher is worse"),
|
||||||
|
'activity_suspicion': fields.Float(description="Local Comments+Posts per User. Higher is worse"),
|
||||||
})
|
})
|
||||||
self.response_model_model_SusInstances_get = api.model('SuspiciousInstancesDomainList', {
|
self.response_model_model_Suspicions_get = api.model('SuspiciousInstances', {
|
||||||
'instances': fields.List(fields.Nested(self.response_model_suspicious_instances)),
|
'instances': fields.List(fields.Nested(self.response_model_suspicious_instances)),
|
||||||
'domains': fields.List(fields.String(description="The suspicious domains as a list.")),
|
'domains': fields.List(fields.String(description="The suspicious domains as a list.")),
|
||||||
'csv': fields.String(description="The suspicious domains as a csv."),
|
'csv': fields.String(description="The suspicious domains as a csv."),
|
||||||
})
|
})
|
||||||
|
self.response_model_instances = api.model('InstanceDetails', {
|
||||||
|
'id': fields.Integer(description="The instance id"),
|
||||||
|
'domain': fields.String(description="The instance domain"),
|
||||||
|
'open_registrations': fields.Boolean(description="The instance uptime pct. 100% and thousand of users is unlikely"),
|
||||||
|
'email_verify': fields.Boolean(description="The amount of local posts in that instance"),
|
||||||
|
'approvals': fields.Integer(description="The amount of endorsements this instance has given out"),
|
||||||
|
'endorsements': fields.Integer(description="The amount of endorsements this instance has received"),
|
||||||
|
'guarantor': fields.String(description="The domain of the instance which guaranteed this instance."),
|
||||||
|
})
|
||||||
|
self.response_model_model_Whitelist_get = api.model('WhitelistedInstances', {
|
||||||
|
'instances': fields.List(fields.Nested(self.response_model_instances)),
|
||||||
|
'domains': fields.List(fields.String(description="The instance domains as a list.")),
|
||||||
|
'csv': fields.String(description="The instance domains as a csv."),
|
||||||
|
})
|
|
@ -1,4 +1,13 @@
|
||||||
import overseer.apis.v1.base as base
|
import overseer.apis.v1.base as base
|
||||||
|
import overseer.apis.v1.whitelist as whitelist
|
||||||
|
import overseer.apis.v1.endorsements as endorsements
|
||||||
|
import overseer.apis.v1.guarantees as guarantees
|
||||||
from overseer.apis.v1.base import api
|
from overseer.apis.v1.base import api
|
||||||
|
|
||||||
api.add_resource(base.SusInstances, "/instances")
|
api.add_resource(base.Suspicions, "/instances")
|
||||||
|
api.add_resource(whitelist.Whitelist, "/whitelist")
|
||||||
|
api.add_resource(whitelist.WhitelistDomain, "/whitelist/<string:domain>")
|
||||||
|
api.add_resource(endorsements.Endorsements, "/endorsements/<string:domain>")
|
||||||
|
api.add_resource(endorsements.Approvals, "/approvals/<string:domain>")
|
||||||
|
api.add_resource(guarantees.Guarantors, "/guarantors/<string:domain>")
|
||||||
|
api.add_resource(guarantees.Guarantees, "/guarantees/<string:domain>")
|
||||||
|
|
|
@ -1,8 +1,15 @@
|
||||||
|
import os
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import Namespace, Resource, reqparse
|
from flask_restx import Namespace, Resource, reqparse
|
||||||
from overseer.flask import cache
|
from overseer.flask import cache, db
|
||||||
from overseer.observer import retrieve_suspicious_instances
|
from overseer.observer import retrieve_suspicious_instances
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
from overseer.classes.instance import Instance
|
||||||
|
from overseer.database import functions as database
|
||||||
|
from overseer import exceptions as e
|
||||||
|
from overseer.utils import hash_api_key
|
||||||
|
from overseer.lemmy import pm_new_api_key, pm_instance
|
||||||
|
from pythorhead import Lemmy
|
||||||
|
|
||||||
api = Namespace('v1', 'API Version 1' )
|
api = Namespace('v1', 'API Version 1' )
|
||||||
|
|
||||||
|
@ -10,13 +17,18 @@ from overseer.apis.models.v1 import Models
|
||||||
|
|
||||||
models = Models(api)
|
models = Models(api)
|
||||||
|
|
||||||
|
handle_bad_request = api.errorhandler(e.BadRequest)(e.handle_bad_requests)
|
||||||
|
handle_forbidden = api.errorhandler(e.Forbidden)(e.handle_bad_requests)
|
||||||
|
handle_unauthorized = api.errorhandler(e.Unauthorized)(e.handle_bad_requests)
|
||||||
|
handle_not_found = api.errorhandler(e.NotFound)(e.handle_bad_requests)
|
||||||
|
|
||||||
# Used to for the flask limiter, to limit requests per url paths
|
# Used to for the flask limiter, to limit requests per url paths
|
||||||
def get_request_path():
|
def get_request_path():
|
||||||
# logger.info(dir(request))
|
# logger.info(dir(request))
|
||||||
return f"{request.remote_addr}@{request.method}@{request.path}"
|
return f"{request.remote_addr}@{request.method}@{request.path}"
|
||||||
|
|
||||||
|
|
||||||
class SusInstances(Resource):
|
class Suspicions(Resource):
|
||||||
get_parser = reqparse.RequestParser()
|
get_parser = reqparse.RequestParser()
|
||||||
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
get_parser.add_argument("activity_suspicion", required=False, default=20, type=int, help="How many users per local post+comment to consider suspicious", location="args")
|
get_parser.add_argument("activity_suspicion", required=False, default=20, type=int, help="How many users per local post+comment to consider suspicious", location="args")
|
||||||
|
@ -26,7 +38,7 @@ class SusInstances(Resource):
|
||||||
@api.expect(get_parser)
|
@api.expect(get_parser)
|
||||||
@logger.catch(reraise=True)
|
@logger.catch(reraise=True)
|
||||||
@cache.cached(timeout=10, query_string=True)
|
@cache.cached(timeout=10, query_string=True)
|
||||||
@api.marshal_with(models.response_model_model_SusInstances_get, code=200, description='Suspicious Instances', skip_none=True)
|
@api.marshal_with(models.response_model_model_Suspicions_get, code=200, description='Suspicious Instances', skip_none=True)
|
||||||
def get(self):
|
def get(self):
|
||||||
'''A List with the details of all suspicious instances
|
'''A List with the details of all suspicious instances
|
||||||
'''
|
'''
|
||||||
|
@ -37,3 +49,4 @@ class SusInstances(Resource):
|
||||||
if self.args.domains:
|
if self.args.domains:
|
||||||
return {"domains": [instance["domain"] for instance in sus_instances]},200
|
return {"domains": [instance["domain"] for instance in sus_instances]},200
|
||||||
return {"instances": sus_instances},200
|
return {"instances": sus_instances},200
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,129 @@
|
||||||
|
from overseer.apis.v1.base import *
|
||||||
|
from overseer.classes.instance import Endorsement
|
||||||
|
|
||||||
|
class Approvals(Resource):
|
||||||
|
get_parser = reqparse.RequestParser()
|
||||||
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
get_parser.add_argument("csv", required=False, type=bool, help="Set to true to return just the domains as a csv. Mutually exclusive with domains", location="args")
|
||||||
|
get_parser.add_argument("domains", required=False, type=bool, help="Set to true to return just the domains as a list. Mutually exclusive with csv", location="args")
|
||||||
|
|
||||||
|
@api.expect(get_parser)
|
||||||
|
@cache.cached(timeout=10, query_string=True)
|
||||||
|
@api.marshal_with(models.response_model_model_Whitelist_get, code=200, description='Instances', skip_none=True)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def get(self, domain):
|
||||||
|
'''Display all endorsements given by a specific domain
|
||||||
|
'''
|
||||||
|
self.args = self.get_parser.parse_args()
|
||||||
|
instance = database.find_instance_by_domain(domain)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided domain. Have you remembered to register it?")
|
||||||
|
instance_details = []
|
||||||
|
for instance in database.get_all_endorsed_instances_by_approving_id(instance.id):
|
||||||
|
instance_details.append(instance.get_details())
|
||||||
|
if self.args.csv:
|
||||||
|
return {"csv": ",".join([instance["domain"] for instance in instance_details])},200
|
||||||
|
if self.args.domains:
|
||||||
|
return {"domains": [instance["domain"] for instance in instance_details]},200
|
||||||
|
return {"instances": instance_details},200
|
||||||
|
|
||||||
|
class Endorsements(Resource):
|
||||||
|
get_parser = reqparse.RequestParser()
|
||||||
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
get_parser.add_argument("csv", required=False, type=bool, help="Set to true to return just the domains as a csv. Mutually exclusive with domains", location="args")
|
||||||
|
get_parser.add_argument("domains", required=False, type=bool, help="Set to true to return just the domains as a list. Mutually exclusive with csv", location="args")
|
||||||
|
|
||||||
|
@api.expect(get_parser)
|
||||||
|
@cache.cached(timeout=10, query_string=True)
|
||||||
|
@api.marshal_with(models.response_model_model_Whitelist_get, code=200, description='Instances', skip_none=True)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def get(self, domain):
|
||||||
|
'''Display all endorsements received by a specific domain
|
||||||
|
'''
|
||||||
|
self.args = self.get_parser.parse_args()
|
||||||
|
instance = database.find_instance_by_domain(domain)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided domain. Have you remembered to register it?")
|
||||||
|
instance_details = []
|
||||||
|
for instance in database.get_all_approving_instances_by_endorsed_id(instance.id):
|
||||||
|
instance_details.append(instance.get_details())
|
||||||
|
if self.args.csv:
|
||||||
|
return {"csv": ",".join([instance["domain"] for instance in instance_details])},200
|
||||||
|
if self.args.domains:
|
||||||
|
return {"domains": [instance["domain"] for instance in instance_details]},200
|
||||||
|
return {"instances": instance_details},200
|
||||||
|
|
||||||
|
put_parser = reqparse.RequestParser()
|
||||||
|
put_parser.add_argument("apikey", type=str, required=True, help="The sending instance's API key.", location='headers')
|
||||||
|
put_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
|
||||||
|
|
||||||
|
@api.expect(put_parser)
|
||||||
|
@api.marshal_with(models.response_model_simple_response, code=200, description='Endorse Instance')
|
||||||
|
@api.response(400, 'Bad Request', models.response_model_error)
|
||||||
|
@api.response(401, 'Invalid API Key', models.response_model_error)
|
||||||
|
@api.response(403, 'Not Guaranteed', models.response_model_error)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def put(self, domain):
|
||||||
|
'''Endorse an instance
|
||||||
|
'''
|
||||||
|
self.args = self.put_parser.parse_args()
|
||||||
|
if not self.args.apikey:
|
||||||
|
raise e.Unauthorized("You must provide the API key that was PM'd to your overctrl.dbzer0.com account")
|
||||||
|
instance = database.find_instance_by_api_key(self.args.apikey)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided API key and domain. Have you remembered to register it?")
|
||||||
|
if len(instance.guarantors) == 0:
|
||||||
|
raise e.Forbidden("Only guaranteed instances can endorse others.")
|
||||||
|
if instance.domain == domain:
|
||||||
|
raise e.BadRequest("Nice try, but you can't endorse yourself.")
|
||||||
|
unbroken_chain, chainbreaker = database.has_unbroken_chain(instance.id)
|
||||||
|
if not unbroken_chain:
|
||||||
|
raise e.Forbidden(f"Guarantee chain for this instance has been broken. Chain ends at {chainbreaker.domain}!")
|
||||||
|
target_instance = database.find_instance_by_domain(domain=domain)
|
||||||
|
if len(target_instance.guarantors) == 0:
|
||||||
|
raise e.Forbidden("Not Guaranteed instances can be endorsed. Please guarantee for them, or find someone who will.")
|
||||||
|
if not target_instance:
|
||||||
|
raise e.BadRequest("Instance to endorse not found")
|
||||||
|
if database.get_endorsement(target_instance.id,instance.id):
|
||||||
|
return {"message":'OK'}, 200
|
||||||
|
new_endorsement = Endorsement(
|
||||||
|
approving_id=instance.id,
|
||||||
|
endorsed_id=target_instance.id,
|
||||||
|
)
|
||||||
|
db.session.add(new_endorsement)
|
||||||
|
db.session.commit()
|
||||||
|
pm_instance(target_instance.domain, f"Your instance has just been endorsed by {instance.domain}")
|
||||||
|
logger.info(f"{instance.domain} Endorsed {domain}")
|
||||||
|
return {"message":'Changed'}, 200
|
||||||
|
|
||||||
|
|
||||||
|
delete_parser = reqparse.RequestParser()
|
||||||
|
delete_parser.add_argument("apikey", type=str, required=True, help="The sending instance's API key.", location='headers')
|
||||||
|
delete_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
|
||||||
|
@api.expect(delete_parser)
|
||||||
|
@api.marshal_with(models.response_model_simple_response, code=200, description='Withdraw Instance Endorsement')
|
||||||
|
@api.response(400, 'Bad Request', models.response_model_error)
|
||||||
|
@api.response(401, 'Invalid API Key', models.response_model_error)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def delete(self,domain):
|
||||||
|
'''Withdraw an instance endorsement
|
||||||
|
'''
|
||||||
|
self.args = self.delete_parser.parse_args()
|
||||||
|
if not self.args.apikey:
|
||||||
|
raise e.Unauthorized("You must provide the API key that was PM'd to your overctrl.dbzer0.com account")
|
||||||
|
instance = database.find_instance_by_api_key(self.args.apikey)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided API key and domain. Have you remembered to register it?")
|
||||||
|
target_instance = database.find_instance_by_domain(domain=domain)
|
||||||
|
if not target_instance:
|
||||||
|
raise e.BadRequest("Instance from which to withdraw endorsement not found")
|
||||||
|
endorsement = database.get_endorsement(target_instance.id,instance.id)
|
||||||
|
if not endorsement:
|
||||||
|
return {"message":'OK'}, 200
|
||||||
|
db.session.delete(endorsement)
|
||||||
|
db.session.commit()
|
||||||
|
pm_instance(target_instance.domain, f"Oh now. {instance.domain} has just withdrawn the endorsement of your instance")
|
||||||
|
logger.info(f"{instance.domain} Withdrew endorsement from {domain}")
|
||||||
|
return {"message":'Changed'}, 200
|
|
@ -0,0 +1,158 @@
|
||||||
|
from overseer.apis.v1.base import *
|
||||||
|
from overseer.classes.instance import Guarantee, Endorsement
|
||||||
|
|
||||||
|
class Guarantors(Resource):
|
||||||
|
get_parser = reqparse.RequestParser()
|
||||||
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
get_parser.add_argument("csv", required=False, type=bool, help="Set to true to return just the domains as a csv. Mutually exclusive with domains", location="args")
|
||||||
|
get_parser.add_argument("domains", required=False, type=bool, help="Set to true to return just the domains as a list. Mutually exclusive with csv", location="args")
|
||||||
|
|
||||||
|
@api.expect(get_parser)
|
||||||
|
@cache.cached(timeout=10, query_string=True)
|
||||||
|
@api.marshal_with(models.response_model_model_Whitelist_get, code=200, description='Instances', skip_none=True)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def get(self, domain):
|
||||||
|
'''Display all guarantees given by a specific domain
|
||||||
|
'''
|
||||||
|
self.args = self.get_parser.parse_args()
|
||||||
|
instance = database.find_instance_by_domain(domain)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided domain. Have you remembered to register it?")
|
||||||
|
instance_details = []
|
||||||
|
for guaranteed in database.get_all_guaranteed_instances_by_guarantor_id(instance.id):
|
||||||
|
instance_details.append(guaranteed.get_details())
|
||||||
|
if self.args.csv:
|
||||||
|
return {"csv": ",".join([guaranteed["domain"] for guaranteed in instance_details])},200
|
||||||
|
if self.args.domains:
|
||||||
|
return {"domains": [guaranteed["domain"] for guaranteed in instance_details]},200
|
||||||
|
return {"instances": instance_details},200
|
||||||
|
|
||||||
|
class Guarantees(Resource):
|
||||||
|
get_parser = reqparse.RequestParser()
|
||||||
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
get_parser.add_argument("csv", required=False, type=bool, help="Set to true to return just the domains as a csv. Mutually exclusive with domains", location="args")
|
||||||
|
get_parser.add_argument("domains", required=False, type=bool, help="Set to true to return just the domains as a list. Mutually exclusive with csv", location="args")
|
||||||
|
|
||||||
|
@api.expect(get_parser)
|
||||||
|
@cache.cached(timeout=10, query_string=True)
|
||||||
|
@api.marshal_with(models.response_model_model_Whitelist_get, code=200, description='Instances', skip_none=True)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def get(self, domain):
|
||||||
|
'''Display all instances guaranteeing for this domain
|
||||||
|
'''
|
||||||
|
self.args = self.get_parser.parse_args()
|
||||||
|
instance = database.find_instance_by_domain(domain)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided domain. Have you remembered to register it?")
|
||||||
|
instance_details = []
|
||||||
|
for guarantor in database.get_all_guarantor_instances_by_guaranteed_id(instance.id):
|
||||||
|
instance_details.append(guarantor.get_details())
|
||||||
|
if self.args.csv:
|
||||||
|
return {"csv": ",".join([guarantor["domain"] for guarantor in instance_details])},200
|
||||||
|
if self.args.domains:
|
||||||
|
return {"domains": [guarantor["domain"] for guarantor in instance_details]},200
|
||||||
|
logger.debug(database.get_guarantor_chain(instance.id))
|
||||||
|
return {"instances": instance_details},200
|
||||||
|
|
||||||
|
put_parser = reqparse.RequestParser()
|
||||||
|
put_parser.add_argument("apikey", type=str, required=True, help="The sending instance's API key.", location='headers')
|
||||||
|
put_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
|
||||||
|
|
||||||
|
@api.expect(put_parser)
|
||||||
|
@api.marshal_with(models.response_model_simple_response, code=200, description='Endorse Instance')
|
||||||
|
@api.response(400, 'Bad Request', models.response_model_error)
|
||||||
|
@api.response(401, 'Invalid API Key', models.response_model_error)
|
||||||
|
@api.response(403, 'Instance Not Guaranteed or Tartget instance Guaranteed by others', models.response_model_error)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def put(self, domain):
|
||||||
|
'''Endorse an instance
|
||||||
|
'''
|
||||||
|
self.args = self.put_parser.parse_args()
|
||||||
|
if not self.args.apikey:
|
||||||
|
raise e.Unauthorized("You must provide the API key that was PM'd to your overctrl.dbzer0.com account")
|
||||||
|
instance = database.find_instance_by_api_key(self.args.apikey)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided API key and domain. Have you remembered to register it?")
|
||||||
|
if len(instance.guarantors) == 0:
|
||||||
|
raise e.Forbidden("Only guaranteed instances can guarantee others.")
|
||||||
|
if len(instance.guarantors) >= 20 and instance.id != 0:
|
||||||
|
raise e.Forbidden("You cannot guarantee for more than 20 instances")
|
||||||
|
unbroken_chain, chainbreaker = database.has_unbroken_chain(instance.id)
|
||||||
|
if not unbroken_chain:
|
||||||
|
raise e.Forbidden(f"Guarantee chain for this instance has been broken. Chain ends at {chainbreaker.domain}!")
|
||||||
|
target_instance = database.find_instance_by_domain(domain=domain)
|
||||||
|
if not target_instance:
|
||||||
|
raise e.BadRequest("Instance to endorse not found")
|
||||||
|
if database.get_guarantee(target_instance.id,instance.id):
|
||||||
|
return {"message":'OK'}, 200
|
||||||
|
gdomain = target_instance.get_guarantor_domain()
|
||||||
|
if gdomain:
|
||||||
|
raise e.Forbidden("Target instance already guaranteed by {gdomain}")
|
||||||
|
new_guarantee = Guarantee(
|
||||||
|
guaranteed_id=target_instance.id,
|
||||||
|
guarantor_id=instance.id,
|
||||||
|
)
|
||||||
|
db.session.add(new_guarantee)
|
||||||
|
# Guaranteed instances get their automatic first endorsement
|
||||||
|
new_endorsement = Endorsement(
|
||||||
|
approving_id=instance.id,
|
||||||
|
endorsed_id=target_instance.id,
|
||||||
|
)
|
||||||
|
db.session.add(new_endorsement)
|
||||||
|
db.session.commit()
|
||||||
|
pm_instance(target_instance.domain, f"Congratulations! Your instance has just been guaranteed by {instance.domain}. This also comes with your first endorsement.")
|
||||||
|
orphan_ids = database.get_guarantee_chain(target_instance.id)
|
||||||
|
for orphan in database.get_instances_by_ids(orphan_ids):
|
||||||
|
pm_instance(orphan.domain, f"Phew! You guarantor chain has been repaired as {instance.domain} has guaranteed for {domain}.")
|
||||||
|
orphan.unset_as_orphan()
|
||||||
|
logger.info(f"{instance.domain} Guaranteed for {domain}")
|
||||||
|
return {"message":'Changed'}, 200
|
||||||
|
|
||||||
|
|
||||||
|
delete_parser = reqparse.RequestParser()
|
||||||
|
delete_parser.add_argument("apikey", type=str, required=True, help="The sending instance's API key.", location='headers')
|
||||||
|
delete_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
|
||||||
|
@api.expect(delete_parser)
|
||||||
|
@api.marshal_with(models.response_model_simple_response, code=200, description='Withdraw Instance Endorsement')
|
||||||
|
@api.response(400, 'Bad Request', models.response_model_error)
|
||||||
|
@api.response(401, 'Invalid API Key', models.response_model_error)
|
||||||
|
@api.response(404, 'Instance not registered', models.response_model_error)
|
||||||
|
def delete(self,domain):
|
||||||
|
'''Withdraw an instance guarantee
|
||||||
|
'''
|
||||||
|
self.args = self.delete_parser.parse_args()
|
||||||
|
if not self.args.apikey:
|
||||||
|
raise e.Unauthorized("You must provide the API key that was PM'd to your overctrl.dbzer0.com account")
|
||||||
|
instance = database.find_instance_by_api_key(self.args.apikey)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided API key and domain. Have you remembered to register it?")
|
||||||
|
target_instance = database.find_instance_by_domain(domain=domain)
|
||||||
|
if not target_instance:
|
||||||
|
raise e.BadRequest("Instance from which to withdraw endorsement not found")
|
||||||
|
# If API key matches the target domain, we assume they want to remove the guarantee added to them to allow another domain to guarantee them
|
||||||
|
if instance.id == target_instance.id:
|
||||||
|
guarantee = instance.get_guarantee()
|
||||||
|
else:
|
||||||
|
guarantee = database.get_guarantee(target_instance.id,instance.id)
|
||||||
|
if not guarantee:
|
||||||
|
return {"message":'OK'}, 200
|
||||||
|
# Removing a guarantee removes the endorsement
|
||||||
|
endorsement = database.get_endorsement(target_instance.id,instance.id)
|
||||||
|
if endorsement:
|
||||||
|
db.session.delete(endorsement)
|
||||||
|
db.session.delete(guarantee)
|
||||||
|
db.session.commit()
|
||||||
|
pm_instance(target_instance.domain,
|
||||||
|
f"Attention! You guarantor instance {instance.domain} has withdrawn their backing.\n\n"
|
||||||
|
"IMPORTANT: You are still considered guaranteed for the next 24hours, but you cannot further endorse or guarantee others."
|
||||||
|
"If you find a new guarantor then your guarantees will be reactivated!.\n\n"
|
||||||
|
"Note that if you do not find a guarantor within 7 days, all your endorsements will be removed."
|
||||||
|
)
|
||||||
|
orphan_ids = database.get_guarantee_chain(target_instance.id)
|
||||||
|
for orphan in database.get_instances_by_ids(orphan_ids):
|
||||||
|
pm_instance(orphan.domain, f"Attention! You guarantor chain has been broken because {instance.domain} has withdrawn their backing from {domain}.\n\nIMPORTANT: All your guarantees will be deleted unless the chain is repaired or you find a new guarantor within 24hours!")
|
||||||
|
orphan.set_as_oprhan()
|
||||||
|
logger.info(f"{instance.domain} Withdrew guarantee from {domain}")
|
||||||
|
return {"message":'Changed'}, 200
|
|
@ -0,0 +1,142 @@
|
||||||
|
from overseer.apis.v1.base import *
|
||||||
|
|
||||||
|
class Whitelist(Resource):
|
||||||
|
get_parser = reqparse.RequestParser()
|
||||||
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
get_parser.add_argument("endorsements", required=False, default=0, type=int, help="Limit to this amount of endorsements of more", location="args")
|
||||||
|
get_parser.add_argument("guarantors", required=False, default=1, type=int, help="Limit to this amount of guarantors of more", location="args")
|
||||||
|
get_parser.add_argument("csv", required=False, type=bool, help="Set to true to return just the domains as a csv. Mutually exclusive with domains", location="args")
|
||||||
|
get_parser.add_argument("domains", required=False, type=bool, help="Set to true to return just the domains as a list. Mutually exclusive with csv", location="args")
|
||||||
|
|
||||||
|
@api.expect(get_parser)
|
||||||
|
@cache.cached(timeout=10, query_string=True)
|
||||||
|
@api.marshal_with(models.response_model_model_Whitelist_get, code=200, description='Instances', skip_none=True)
|
||||||
|
def get(self):
|
||||||
|
'''A List with the details of all instances and their endorsements
|
||||||
|
'''
|
||||||
|
self.args = self.get_parser.parse_args()
|
||||||
|
instance_details = []
|
||||||
|
for instance in database.get_all_instances(self.args.endorsements,self.args.guarantors):
|
||||||
|
instance_details.append(instance.get_details())
|
||||||
|
if self.args.csv:
|
||||||
|
return {"csv": ",".join([instance["domain"] for instance in instance_details])},200
|
||||||
|
if self.args.domains:
|
||||||
|
return {"domains": [instance["domain"] for instance in instance_details]},200
|
||||||
|
return {"instances": instance_details},200
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class WhitelistDomain(Resource):
|
||||||
|
get_parser = reqparse.RequestParser()
|
||||||
|
get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
|
||||||
|
@api.expect(get_parser)
|
||||||
|
@cache.cached(timeout=10, query_string=True)
|
||||||
|
@api.marshal_with(models.response_model_instances, code=200, description='Instances')
|
||||||
|
def get(self, domain):
|
||||||
|
'''Display info about a specific instance
|
||||||
|
'''
|
||||||
|
self.args = self.get_parser.parse_args()
|
||||||
|
instance = database.find_instance_by_domain(domain)
|
||||||
|
if not instance:
|
||||||
|
raise e.NotFound(f"No Instance found matching provided domain. Have you remembered to register it?")
|
||||||
|
return instance.get_details(),200
|
||||||
|
|
||||||
|
|
||||||
|
put_parser = reqparse.RequestParser()
|
||||||
|
put_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
put_parser.add_argument("guarantor", required=False, type=str, help="(Optiona) The domain of the guaranteeing instance. They will receive a PM to validate you", location="json")
|
||||||
|
|
||||||
|
|
||||||
|
@api.expect(put_parser)
|
||||||
|
@api.marshal_with(models.response_model_instances, code=200, description='Instances')
|
||||||
|
@api.response(400, 'Bad Request', models.response_model_error)
|
||||||
|
def put(self, domain):
|
||||||
|
'''Register a new instance to the overseer
|
||||||
|
An instance account has to exist in the overseer lemmylemmy instance
|
||||||
|
That account will recieve the new API key via PM
|
||||||
|
'''
|
||||||
|
self.args = self.put_parser.parse_args()
|
||||||
|
existing_instance = database.find_instance_by_domain(domain)
|
||||||
|
if existing_instance:
|
||||||
|
return existing_instance.get_details(),200
|
||||||
|
guarantor_instance = None
|
||||||
|
if self.args.guarantor:
|
||||||
|
guarantor_instance = database.find_instance_by_domain(self.args.guarantor)
|
||||||
|
if not guarantor_instance:
|
||||||
|
raise e.BadRequest(f"Requested guarantor domain {self.args.guarantor} is not registered with the Overseer yet!")
|
||||||
|
if domain.endswith("test.dbzer0.com"):
|
||||||
|
requested_lemmy = Lemmy(f"https://{domain}")
|
||||||
|
requested_lemmy._requestor.nodeinfo = {"software":{"name":"lemmy"}}
|
||||||
|
site = {"site_view":{"local_site":{"require_email_verification": True,"registration_mode":"open"}}}
|
||||||
|
else:
|
||||||
|
requested_lemmy = Lemmy(f"https://{domain}")
|
||||||
|
site = requested_lemmy.site.get()
|
||||||
|
if not site:
|
||||||
|
raise e.BadRequest(f"Error encountered while polling domain {domain}. Please check it's running correctly")
|
||||||
|
api_key = pm_new_api_key(domain)
|
||||||
|
if not api_key:
|
||||||
|
raise e.BadRequest("Failed to generate API Key")
|
||||||
|
new_instance = Instance(
|
||||||
|
domain=domain,
|
||||||
|
api_key=hash_api_key(api_key),
|
||||||
|
open_registrations=site["site_view"]["local_site"]["registration_mode"] == "open",
|
||||||
|
email_verify=site["site_view"]["local_site"]["require_email_verification"],
|
||||||
|
software=requested_lemmy.nodeinfo['software']['name'],
|
||||||
|
)
|
||||||
|
new_instance.create()
|
||||||
|
if guarantor_instance:
|
||||||
|
pm_instance(guarantor_instance.domain, f"New instance {domain} was just registered with the Overseer and have asked you to guarantee for them!")
|
||||||
|
return new_instance.get_details(),200
|
||||||
|
|
||||||
|
patch_parser = reqparse.RequestParser()
|
||||||
|
patch_parser.add_argument("apikey", type=str, required=True, help="The sending instance's API key.", location='headers')
|
||||||
|
patch_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
patch_parser.add_argument("regenerate_key", required=False, type=bool, help="If True, will PM a new api key to this instance", location="json")
|
||||||
|
|
||||||
|
|
||||||
|
@api.expect(patch_parser)
|
||||||
|
@api.marshal_with(models.response_model_instances, code=200, description='Instances', skip_none=True)
|
||||||
|
@api.response(401, 'Invalid API Key', models.response_model_error)
|
||||||
|
@api.response(403, 'Instance Not Registered', models.response_model_error)
|
||||||
|
def patch(self, domain):
|
||||||
|
'''Regenerate API key for instance
|
||||||
|
'''
|
||||||
|
self.args = self.patch_parser.parse_args()
|
||||||
|
if not self.args.apikey:
|
||||||
|
raise e.Unauthorized("You must provide the API key that was PM'd to your overctrl.dbzer0.com account")
|
||||||
|
instance = database.find_instance_by_api_key(self.args.apikey)
|
||||||
|
if not instance:
|
||||||
|
raise e.Forbidden(f"No Instance found matching provided API key and domain. Have you remembered to register it?")
|
||||||
|
if self.args.regenerate_key:
|
||||||
|
new_key = pm_new_api_key(domain)
|
||||||
|
instance.api_key = hash_api_key(new_key)
|
||||||
|
db.session.commit()
|
||||||
|
return instance.get_details(),200
|
||||||
|
|
||||||
|
delete_parser = reqparse.RequestParser()
|
||||||
|
delete_parser.add_argument("apikey", type=str, required=True, help="The sending instance's API key.", location='headers')
|
||||||
|
delete_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
|
||||||
|
|
||||||
|
|
||||||
|
@api.expect(delete_parser)
|
||||||
|
@api.marshal_with(models.response_model_simple_response, code=200, description='Instances', skip_none=True)
|
||||||
|
@api.response(400, 'Bad Request', models.response_model_error)
|
||||||
|
@api.response(401, 'Invalid API Key', models.response_model_error)
|
||||||
|
@api.response(403, 'Forbidden', models.response_model_error)
|
||||||
|
def delete(self, domain):
|
||||||
|
'''Delete instance from overseer
|
||||||
|
'''
|
||||||
|
self.args = self.patch_parser.parse_args()
|
||||||
|
if not self.args.apikey:
|
||||||
|
raise e.Unauthorized("You must provide the API key that was PM'd to your overctrl.dbzer0.com account")
|
||||||
|
instance = database.find_authenticated_instance(domain, self.args.apikey)
|
||||||
|
if not instance:
|
||||||
|
raise e.BadRequest(f"No Instance found matching provided API key and domain. Have you remembered to register it?")
|
||||||
|
if domain == os.getenv('OVERSEER_LEMMY_DOMAIN'):
|
||||||
|
raise e.Forbidden("Cannot delete overseer control instance")
|
||||||
|
db.session.delete(instance)
|
||||||
|
db.session.commit()
|
||||||
|
logger.warning(f"{domain} deleted")
|
||||||
|
return {"message":'Changed'}, 200
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import os
|
||||||
|
from loguru import logger
|
||||||
|
from overseer.argparser import args
|
||||||
|
from importlib import import_module
|
||||||
|
from overseer.flask import db, OVERSEER
|
||||||
|
from overseer.utils import hash_api_key
|
||||||
|
|
||||||
|
# Importing for DB creation
|
||||||
|
from overseer.classes.instance import Instance, Guarantee
|
||||||
|
|
||||||
|
with OVERSEER.app_context():
|
||||||
|
|
||||||
|
db.create_all()
|
||||||
|
|
||||||
|
admin_domain = os.getenv("OVERSEER_LEMMY_DOMAIN")
|
||||||
|
admin = db.session.query(Instance).filter_by(domain=admin_domain).first()
|
||||||
|
if not admin:
|
||||||
|
admin = Instance(
|
||||||
|
id=0,
|
||||||
|
domain=admin_domain,
|
||||||
|
api_key=hash_api_key(os.getenv("ADMIN_API_KEY")),
|
||||||
|
open_registrations=False,
|
||||||
|
email_verify=False,
|
||||||
|
software="lemmy",
|
||||||
|
)
|
||||||
|
admin.create()
|
||||||
|
guarantee = Guarantee(
|
||||||
|
guarantor_id = admin.id,
|
||||||
|
guaranteed_id = admin.id,
|
||||||
|
)
|
||||||
|
db.session.add(guarantee)
|
||||||
|
db.session.commit()
|
|
@ -0,0 +1,95 @@
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
|
||||||
|
import dateutil.relativedelta
|
||||||
|
from datetime import datetime
|
||||||
|
from sqlalchemy import Enum, UniqueConstraint
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from overseer.flask import db, SQLITE_MODE
|
||||||
|
|
||||||
|
uuid_column_type = lambda: UUID(as_uuid=True) if not SQLITE_MODE else db.String(36)
|
||||||
|
|
||||||
|
|
||||||
|
class Guarantee(db.Model):
|
||||||
|
__tablename__ = "guarantees"
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
guarantor_id = db.Column(db.Integer, db.ForeignKey("instances.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
guarantor_instance = db.relationship("Instance", back_populates="guarantees", foreign_keys=[guarantor_id])
|
||||||
|
guaranteed_id = db.Column(db.Integer, db.ForeignKey("instances.id", ondelete="CASCADE"), unique=True, nullable=False)
|
||||||
|
guaranteed_instance = db.relationship("Instance", back_populates="guarantors", foreign_keys=[guaranteed_id])
|
||||||
|
created = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||||
|
|
||||||
|
|
||||||
|
class Endorsement(db.Model):
|
||||||
|
__tablename__ = "endorsements"
|
||||||
|
__table_args__ = (UniqueConstraint('approving_id', 'endorsed_id', name='endoresements_approving_id_endorsed_id'),)
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
approving_id = db.Column(db.Integer, db.ForeignKey("instances.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
approving_instance = db.relationship("Instance", back_populates="approvals", foreign_keys=[approving_id])
|
||||||
|
endorsed_id = db.Column(db.Integer, db.ForeignKey("instances.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
endorsed_instance = db.relationship("Instance", back_populates="endorsements", foreign_keys=[endorsed_id])
|
||||||
|
created = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||||
|
|
||||||
|
|
||||||
|
class Instance(db.Model):
|
||||||
|
__tablename__ = "instances"
|
||||||
|
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
domain = db.Column(db.String(255), unique=True, nullable=False, index=True)
|
||||||
|
api_key = db.Column(db.String(100), unique=True, nullable=False, index=True)
|
||||||
|
created = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||||
|
updated = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||||
|
oprhan_since = db.Column(db.DateTime, nullable=True)
|
||||||
|
|
||||||
|
open_registrations = db.Column(db.Boolean, unique=False, nullable=False, index=True)
|
||||||
|
email_verify = db.Column(db.Boolean, unique=False, nullable=False, index=True)
|
||||||
|
software = db.Column(db.String(50), unique=False, nullable=False, index=True)
|
||||||
|
|
||||||
|
approvals = db.relationship("Endorsement", back_populates="approving_instance", cascade="all, delete-orphan", foreign_keys=[Endorsement.approving_id])
|
||||||
|
endorsements = db.relationship("Endorsement", back_populates="endorsed_instance", cascade="all, delete-orphan", foreign_keys=[Endorsement.endorsed_id])
|
||||||
|
guarantees = db.relationship("Guarantee", back_populates="guarantor_instance", cascade="all, delete-orphan", foreign_keys=[Guarantee.guarantor_id])
|
||||||
|
guarantors = db.relationship("Guarantee", back_populates="guaranteed_instance", cascade="all, delete-orphan", foreign_keys=[Guarantee.guaranteed_id])
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
db.session.add(self)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
def get_details(self):
|
||||||
|
ret_dict = {
|
||||||
|
"id": self.id,
|
||||||
|
"domain": self.domain,
|
||||||
|
"open_registrations": self.open_registrations,
|
||||||
|
"email_verify": self.email_verify,
|
||||||
|
"endorsements": len(self.endorsements),
|
||||||
|
"approvals": len(self.approvals),
|
||||||
|
"guarantor": self.get_guarantor_domain(),
|
||||||
|
}
|
||||||
|
return ret_dict
|
||||||
|
|
||||||
|
|
||||||
|
def get_guarantee(self):
|
||||||
|
if len(self.guarantors) == 0:
|
||||||
|
return None
|
||||||
|
return self.guarantors[0]
|
||||||
|
|
||||||
|
def get_guarantor(self):
|
||||||
|
guarantee = self.get_guarantee()
|
||||||
|
if not guarantee:
|
||||||
|
return None
|
||||||
|
return guarantee.guarantor_instance
|
||||||
|
return Instance.query.filter_by(id=guarantee.guarantor_id).first()
|
||||||
|
|
||||||
|
def get_guarantor_domain(self):
|
||||||
|
guarantor = self.get_guarantor()
|
||||||
|
return guarantor.domain if guarantor else None
|
||||||
|
|
||||||
|
def set_as_oprhan(self):
|
||||||
|
self.oprhan_since = datetime.utcnow()
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
def unset_as_orphan(self):
|
||||||
|
self.oprhan_since = None
|
||||||
|
db.session.commit()
|
||||||
|
|
|
@ -0,0 +1,449 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
class News:
|
||||||
|
|
||||||
|
HORDE_NEWS = [
|
||||||
|
{
|
||||||
|
"date_published": "2023-06-01",
|
||||||
|
"newspiece":
|
||||||
|
"LoRas support has now been merged into the main worker branch! "
|
||||||
|
"All kudos to [Jug](https://github.com/jug-dev/) and [Tazlin](https://github.com/tazlin/) for their invaluable efforts! "
|
||||||
|
"Read the [devlog](https://dbzer0.com/blog/the-ai-horde-now-seamlessly-provides-all-civitai-loras/)",
|
||||||
|
"tags": ["devlog", "lora", "text2img"],
|
||||||
|
"importance": "Workers",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-30",
|
||||||
|
"newspiece":
|
||||||
|
"Early support for LoRa has been added to the AI Horde with a few workers providing it. "
|
||||||
|
"UIs are still adding it, with [Lucid Creations](https://dbzer0.itch.io/lucid-creations/devlog/537949/1170-loras), ArtBot and the Krita plugin already supporting it."
|
||||||
|
"Try it out and let us know how it works for you.",
|
||||||
|
"tags": ["UI", "lora", "text2img"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-25",
|
||||||
|
"newspiece":
|
||||||
|
"I wanted to point out a very cool voice-2-text-2-voice AI Horde integration: [ProtoReplicant](https://github.com/OpenReplicant/ProtoReplicant). "
|
||||||
|
"It converts your voice into text which it then sends to an LLM model, and finally converts the resulting text into voice and plays it back."
|
||||||
|
"Here's the new [Discord integration channel](https://discordapp.com/channels/781145214752129095/1111189841120596008)",
|
||||||
|
"tags": ["UI", "voice", "llm"],
|
||||||
|
"importance": "Integration",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-22",
|
||||||
|
"newspiece":
|
||||||
|
"A new AI Horde integration has been created. A Telegram bot by the name of [Imaginarium](https://t.me/ImaginariumAIbot). "
|
||||||
|
"Here's the new [Discord integration channel](https://discordapp.com/channels/781145214752129095/1109825249933000714)",
|
||||||
|
"tags": ["bot", "telegram"],
|
||||||
|
"importance": "Integration",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-14",
|
||||||
|
"newspiece":
|
||||||
|
"The AI Horde has finally moved to the [hordelib](https://pypi.org/project/hordelib/) library. "
|
||||||
|
"Which is powered by the [ComfyUI](https://github.com/comfyanonymous/ComfyUI) inference backend. "
|
||||||
|
"[Read the Devlog](https://dbzer0.com/blog/the-ai-horde-worker-moves-to-a-completely-new-inference-backend/)!",
|
||||||
|
"tags": ["devlog", "backend", "Jug", "Tazlin", "dreamer", "alchemist"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-11",
|
||||||
|
"newspiece":
|
||||||
|
"With the upcoming deployment of the [hordelib](https://pypi.org/project/hordelib/)-based worker. "
|
||||||
|
"[Jug](https://github.com/jug-dev/) has looked into creating a more efficient model to determine generation kudos "
|
||||||
|
"instead of reusing the numbers I hallucinated one day. "
|
||||||
|
"He used what we know best and we trained an explicit model to calculate kudos, based on the performance of his own GPU on the comfy branch "
|
||||||
|
"This new calculation should be much more accurate in terms of things like controlnet and resolution impact. "
|
||||||
|
"The good news is that the new comfy branch this seems to reduce kudos costs for high resolutions accross the board. "
|
||||||
|
"Note: Due to the current worker (based on nataili) being slightly lower quality at the benefit of speed, and thus getting a boost due to the new kudos model, "
|
||||||
|
"we have implemented a 25% reduction for its rewards to bring it up to line with its actual performance.",
|
||||||
|
"tags": ["kudos", "dreamer", "Jug"],
|
||||||
|
"importance": "Workers",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-09",
|
||||||
|
"newspiece":
|
||||||
|
"A new feature appeared on the Horde. "
|
||||||
|
"You can now create [API keys you can share with others](https://dbzer0.com/blog/key-sharing/) to use your own priority.",
|
||||||
|
"tags": ["apikey", "shared key"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-05-05",
|
||||||
|
"newspiece":
|
||||||
|
"You can now run an AI Horde worker inside a docker container. "
|
||||||
|
"http://ghcr.io/db0/ai-horde-worker:main "
|
||||||
|
"Our README [contains information on how to configure it](https://github.com/db0/AI-Horde-Worker/blob/main/README.md#docker) "
|
||||||
|
"All kudos to [Gus Puffy#8887](https://github.com/guspuffygit)",
|
||||||
|
"tags": ["docker", "dreamer"],
|
||||||
|
"importance": "Workers",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-04-23",
|
||||||
|
"newspiece":
|
||||||
|
"The Command Line Interface for the AI Horde has now been extended to support Image Generation, Text Generation and Image Alchemy. "
|
||||||
|
"It has been split into three files and is now available in its own repository: "
|
||||||
|
"https://github.com/db0/AI-Horde-CLI",
|
||||||
|
"tags": ["cli"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-04-16",
|
||||||
|
"newspiece":
|
||||||
|
"The AI Horde has received its first patreon sponsorship "
|
||||||
|
"Many thanks to [pawkygame VR](https://discord.gg/Zbe63QTU9X) for their support!",
|
||||||
|
"tags": ["sponsor", "patreon"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-03-23",
|
||||||
|
"newspiece":
|
||||||
|
"Inpainting is re-enabled that to the work of [ResidentChief](https://github.com/ResidentChief)! "
|
||||||
|
"Now also have support for multiple inpainting models.",
|
||||||
|
"tags": ["inpainting", "ResidentChief"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-03-19",
|
||||||
|
"newspiece":
|
||||||
|
"The AI Horde Interrogator Worker has now been renamed to 'Alchemist' "
|
||||||
|
"The Horde alchemist can now run all the post-processors, along with all the interrogation forms. "
|
||||||
|
"This means that if you have an existing image you wish to face-fix or upscale, you can just do that "
|
||||||
|
"by requesting it via alchemy. "
|
||||||
|
"For now, the alchemist does not support extracting ControlNet intermediate images, but this will be coming soon. "
|
||||||
|
"The endpoints remain as `api/v2interrogation/` for now but I plan to rename them in v3.",
|
||||||
|
"tags": ["upscale", "post-processing", "alchemy"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-03-15",
|
||||||
|
"newspiece":
|
||||||
|
"the AI Horde now supports the DDIM sampler and the RealESRGAN_x4plus_anime_6B upscaler! "
|
||||||
|
"Keep in mind that you cannot use two upscalers at the same time. "
|
||||||
|
"All kudos to [ResidentChief](https://github.com/ResidentChief)!",
|
||||||
|
"tags": ["upscale", "post-processing", "ResidentChief", "samplers"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-03-13",
|
||||||
|
"newspiece":
|
||||||
|
"A new option `replacement_filter` is available for image generations. "
|
||||||
|
"When set to True and a potential CSAM prompt is detected, "
|
||||||
|
"all underage context will be transparently replaced or removed "
|
||||||
|
"and some extra negative prompts will be added to the negative prompt."
|
||||||
|
"When set to False (default) or the prompt size is over 500 chars "
|
||||||
|
"The previous behaviour will be used, where the prompt is rejected and an IP timeout will be put in place. "
|
||||||
|
"This feature should make sending text generations to be turned into images a less frustrating experience.",
|
||||||
|
"tags": ["csam", "text2text", "text2img"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-03-10",
|
||||||
|
"newspiece": "We now have an AI-driven anti-CSAM filter as well. Read about it on [the main developer's blog](https://dbzer0.com/blog/ai-powered-anti-csam-filter-for-stable-diffusion/).",
|
||||||
|
"tags": ["csam"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-03-03",
|
||||||
|
"newspiece": "The Horde Ratings are back in action. Go to your typical UI and rate away!",
|
||||||
|
"tags": ["ratings"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-02-23",
|
||||||
|
"newspiece": "KoboldAI Horde has been merged into Stable Horde as a unified AI Horde!",
|
||||||
|
"tags": ["text2text", "ai horde"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-02-21",
|
||||||
|
"newspiece": (
|
||||||
|
'The Horde now supports ControlNet on all models! All kudos go to [hlky](https://github.com/hlky) who again weaved the dark magic!'
|
||||||
|
),
|
||||||
|
"tags": ["controlnet", "img2img", "hlky"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-02-14",
|
||||||
|
"newspiece": (
|
||||||
|
'You can now use an almost unlimited prompt size thanks to the work of ResidentChief!'
|
||||||
|
),
|
||||||
|
"tags": ["text2img", "img2img", "ResidentChief"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-02-09",
|
||||||
|
"newspiece": (
|
||||||
|
'You can now select to generate a higher-sized image using hires_fix, which uses the composition of stable diffusion at 512x512 which tends to be more consistent.'
|
||||||
|
),
|
||||||
|
"tags": ["text2img", "img2img", "ResidentChief"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-02-03",
|
||||||
|
"newspiece": (
|
||||||
|
'The horde now supports pix2pix. All you have to do is use img2img as normal and select the pix2pix model!'
|
||||||
|
),
|
||||||
|
"tags": ["img2img", "ResidentChief"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-24",
|
||||||
|
"newspiece": (
|
||||||
|
'We now support sending tiling requests! Send `"tiling":true` into your payload params to request an image that seamlessly tiles.'
|
||||||
|
),
|
||||||
|
"tags": ["text2img", "img2img", "ResidentChief"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-23",
|
||||||
|
"newspiece": (
|
||||||
|
"I have tightened the rules around NSFW models. As they seem to be straying into 'unethical' territory even when not explicitly prompted, "
|
||||||
|
"I am forced to tighten the safety controls around them. From now on, otherwise generic terms for young people like `girl` ,`boy` etc "
|
||||||
|
"Cannot be used on those models. Please either use terms like `woman` or `man` or switch to a non-NSFW model instead."
|
||||||
|
),
|
||||||
|
"tags": ["countermeasures", "nsfw"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-23",
|
||||||
|
"newspiece": (
|
||||||
|
"The horde now has a [Blender Plugin](https://github.com/benrugg/AI-Render)!"
|
||||||
|
),
|
||||||
|
"tags": ["plugin", "blender"],
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-18",
|
||||||
|
"newspiece": (
|
||||||
|
"We now have a [New Discord Bot](https://github.com/ZeldaFan0225/Stable_Horde_Discord), courtesy of Zelda_Fan#0225. Check out [their other bot](https://slashbot.de/) as well! "
|
||||||
|
"Only downside is that if you were already logged in to the old bot, you will need to /login again."
|
||||||
|
),
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-18",
|
||||||
|
"newspiece": (
|
||||||
|
"The prompts now support weights! Use them like so `(sub prompt:1.1)` where 1.1 corresponds to +10% weight "
|
||||||
|
"You can tweak upwards more like `1.25` or downwards like `0.7`, but don't go above +=30%"
|
||||||
|
),
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-12",
|
||||||
|
"newspiece": (
|
||||||
|
"We plan to be replacing our official discord bot with [new a new codebase](https://github.com/ZeldaFan0225/Stable_Horde_Discord) based on the work of Zelda_Fan#0225. "
|
||||||
|
"Once we do, be aware that the controls will be slightly different and you will have to log-in again with your API key."
|
||||||
|
),
|
||||||
|
"importance": "Upcoming"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-11",
|
||||||
|
"newspiece": (
|
||||||
|
"The Stable Horde has its first browser extension! "
|
||||||
|
"[GenAlt](https://chrome.google.com/webstore/detail/genalt-generated-alt-text/ekbmkapnmnhhgfmjdnchgmcfggibebnn) is an accessibility plugin to help people with bad eyesight always find alt text for images."
|
||||||
|
"The extension relies on the Stable Horde's newly added image interrogation capabilities to generate captions which are then serves as the image's alt text."
|
||||||
|
),
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-04",
|
||||||
|
"newspiece": "We are proud to announce that we have [initiated a collaboration with LAION](https://dbzer0.com/blog/a-collaboration-begins-between-stable-horde-and-laion/) to help them improve their dataset!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-06",
|
||||||
|
"newspiece": (
|
||||||
|
"The amount of kudos consumed when generating images [has been slightly adjusted](https://dbzer0.com/blog/sharing-is-caring/). "
|
||||||
|
"To simulate the resource costs of the horde, each image generation request will now burn +3 kudos. Those will not go to the generating worker! "
|
||||||
|
"However we also have a new opt-in feature: You can choose to share your text2img generations with [LAION](https://laion.ai/). "
|
||||||
|
"If you do, this added cost will be just +1 kudos. "
|
||||||
|
"We have also updated our Terms of Service to make this more obvious."
|
||||||
|
),
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-05",
|
||||||
|
"newspiece": "[Worker now have a WebUI](https://dbzer0.com/blog/the-ai-horde-worker-has-a-control-ui/) which they can use to configure themselves. Use it by running `worker-webui.sh/cmd`",
|
||||||
|
"importance": "Workers"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-04",
|
||||||
|
"newspiece": "[You can now interrogate images](https://dbzer0.com/blog/image-interrogations-are-now-available-on-the-stable-horde/) (AKA img2txt) to retrieve information about them such as captions and whether they are NSFW. Check the api/v2/interrogate endpoint documentation.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2023-01-01",
|
||||||
|
"newspiece": "Stable Horde can now be used on the automatic1111 Web UI via [an external script](https://github.com/natanjunges/stable-diffusion-webui-stable-horde)",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-12-30",
|
||||||
|
"newspiece": "Stable Horde now supports depth2img! To use it you need to send a source image and select the `Stable Difffusion 2 Depth` model",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-12-28",
|
||||||
|
"newspiece": "Stable Horde workers can now opt-in to loading post-processors. Check your bridge_data.py for options. This should help workers who started being more unstable due to the PP requirements.",
|
||||||
|
"importance": "Workers"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-12-24",
|
||||||
|
"newspiece": "Stable Horde has now support for [CodeFormer](https://shangchenzhou.com/projects/CodeFormer/). Simply use 'CodeFormers' for your postprocessor (case sensitive). This will fix any faces in the image. Be aware that due to the processing cost of this model, the kudos requirement will be 50% higher! Note: The inbuilt upscaler has been disabled",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-12-08",
|
||||||
|
"newspiece": "The Stable Horde workers now support dynamically swapping models. This means that models will always switch to support the most in demand models every minute, allowing us to support demand much better!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-28",
|
||||||
|
"newspiece": "The Horde has undertaken a massive code refactoring to allow me to move to a proper SQL DB. This will finally allow me to scale the frontend systems horizontally and allow for way more capacity!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-24",
|
||||||
|
"newspiece": "Due to the massive increase in demand from the Horde, we have to limit the amount of concurrent anonymous requests we can serve. We will revert this once our infrastructure can scale better.",
|
||||||
|
"importance": "Crisis"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-24",
|
||||||
|
"newspiece": "Stable Diffusion 2.0 has been released and now it is available on the Horde as well.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-22",
|
||||||
|
"newspiece": "A new Stable Horde Bot has been deployed, this time for Mastodon. You can find [the stablehorde_generator}(https://sigmoid.social/@stablehorde_generator) as well as our [official Stable Horde account](https://sigmoid.social/@stablehorde) on sigmoid.social",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-22",
|
||||||
|
"newspiece": "We now have [support for the Unreal Engine](https://github.com/Mystfit/Unreal-StableDiffusionTools/releases/tag/v0.5.0) via a community-provided plugin",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-18",
|
||||||
|
"newspiece": "The stable horde [now supports post-processing](https://www.patreon.com/posts/post-processing-74815675) on images automatically",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-05",
|
||||||
|
"newspiece": "Due to suddenly increased demand, we have adjusted how much requests accounts can request before needing to have the kudos upfront. More than 50 steps will require kudos and the max resolution will be adjusted based on the current horde demand.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-05",
|
||||||
|
"newspiece": "Workers can now [join teams](https://www.patreon.com/posts/teams-74247978) to get aggregated stats.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-11-02",
|
||||||
|
"newspiece": "The horde can now generate images up to 3072x3072 and 500 steps! However you need to already have the kudos to burn to do so!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-29",
|
||||||
|
"newspiece": "Inpainting is now available on the stable horde! Many kudos to [blueturtle](https://github.com/blueturtleai) for the support!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-25",
|
||||||
|
"newspiece": "Another [Discord Bot for Stable Horde integration](https://github.com/ZeldaFan0225/Stable_Horde_Discord) has appeared!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-24",
|
||||||
|
"newspiece": "The Stable Horde Client has been renamed to [Lucid Creations](https://dbzer0.itch.io/lucid-creations) and has a new version and UI out which supports multiple models and img2img!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-22",
|
||||||
|
"newspiece": "We have [a new npm SDK](https://github.com/ZeldaFan0225/stable_horde) for integrating into the Stable Horde.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-22",
|
||||||
|
"newspiece": "Krita and GIMP plugins now support img2img",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-21",
|
||||||
|
"newspiece": "Image 2 Image is now available for everyone!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-20",
|
||||||
|
"newspiece": "Stable Diffusion 1.5 is now available!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-17",
|
||||||
|
"newspiece": "We now have [a Krita plugin](https://github.com/blueturtleai/krita-stable-diffusion).",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-17",
|
||||||
|
"newspiece": "Img2img on the horde is now on pilot for trusted users.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-16",
|
||||||
|
"newspiece": "Yet [another Web UI](https://tinybots.net/artbot) has appeared.",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-11",
|
||||||
|
"newspiece": "A [new dedicated Web UI](https://aqualxx.github.io/stable-ui/) has entered the scene!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-10",
|
||||||
|
"newspiece": "You can now contribute a worker to the horde [via google colab](https://colab.research.google.com/github/harrisonvanderbyl/ravenbot-ai/blob/master/Horde.ipynb). Just fill-in your API key and run!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-06",
|
||||||
|
"newspiece": "We have a [new installation video](https://youtu.be/wJrp5lpByCc) for both the Stable Horde Client and the Stable horde worker.",
|
||||||
|
"importance": "Information"
|
||||||
|
}, {
|
||||||
|
"date_published": "2023-01-23",
|
||||||
|
"newspiece": "All workers must start sending the `bridge_agent` key in their job pop payloads. See API documentation.",
|
||||||
|
"importance": "Workers"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-10",
|
||||||
|
"newspiece": "The [discord rewards bot](https://www.patreon.com/posts/new-kind-of-73097166) has been unleashed. Reward good contributions to the horde directly from the chat!",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-13",
|
||||||
|
"newspiece": "KoboldAI Has been upgraded to the new countermeasures",
|
||||||
|
"tags": ["countermeasures", "ai horde"],
|
||||||
|
"importance": "Information",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date_published": "2022-10-09",
|
||||||
|
"newspiece": "The horde now includes News functionality. Also [In the API!](/api/v2/status/news)",
|
||||||
|
"importance": "Information"
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_news(self):
|
||||||
|
'''extensible function from gathering nodes from extensing classes'''
|
||||||
|
return(self.HORDE_NEWS)
|
||||||
|
|
||||||
|
def sort_news(self, raw_news):
|
||||||
|
# unsorted_news = []
|
||||||
|
# for piece in raw_news:
|
||||||
|
# piece_dict = {
|
||||||
|
# "date": datetime.strptime(piece["piece"], '%y-%m-%d'),
|
||||||
|
# "piece": piece["news"],
|
||||||
|
# }
|
||||||
|
# unsorted_news.append(piece_dict)
|
||||||
|
sorted_news = sorted(raw_news, key=lambda p: datetime.strptime(p["date_published"], '%Y-%m-%d'), reverse=True)
|
||||||
|
return(sorted_news)
|
||||||
|
|
||||||
|
def sorted_news(self):
|
||||||
|
return(self.sort_news(self.get_news()))
|
|
@ -0,0 +1,157 @@
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
import json
|
||||||
|
from loguru import logger
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from sqlalchemy import func, or_, and_, not_, Boolean
|
||||||
|
from sqlalchemy.orm import noload
|
||||||
|
from overseer.flask import db, SQLITE_MODE
|
||||||
|
from overseer.utils import hash_api_key
|
||||||
|
from sqlalchemy.orm import joinedload
|
||||||
|
from overseer.classes.instance import Instance, Endorsement, Guarantee
|
||||||
|
|
||||||
|
def get_all_instances(min_endorsements = 0, min_guarantors = 1):
|
||||||
|
query = db.session.query(
|
||||||
|
Instance
|
||||||
|
).outerjoin(
|
||||||
|
Instance.endorsements,
|
||||||
|
Instance.guarantors,
|
||||||
|
).options(
|
||||||
|
joinedload(Instance.guarantors),
|
||||||
|
joinedload(Instance.endorsements),
|
||||||
|
).group_by(
|
||||||
|
Instance.id
|
||||||
|
).filter(
|
||||||
|
or_(
|
||||||
|
Instance.oprhan_since == None,
|
||||||
|
Instance.oprhan_since > datetime.utcnow() - timedelta(hours=24)
|
||||||
|
)
|
||||||
|
).having(
|
||||||
|
db.func.count(Instance.endorsements) >= min_endorsements,
|
||||||
|
).having(
|
||||||
|
db.func.count(Instance.guarantors) >= min_guarantors,
|
||||||
|
)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_endorsed_instances_by_approving_id(approving_id):
|
||||||
|
query = db.session.query(
|
||||||
|
Instance
|
||||||
|
).outerjoin(
|
||||||
|
Instance.endorsements,
|
||||||
|
).options(
|
||||||
|
joinedload(Instance.endorsements),
|
||||||
|
).filter(
|
||||||
|
Endorsement.approving_id == approving_id
|
||||||
|
).group_by(
|
||||||
|
Instance.id
|
||||||
|
)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
|
def get_all_approving_instances_by_endorsed_id(endorsed_id):
|
||||||
|
query = db.session.query(
|
||||||
|
Instance
|
||||||
|
).outerjoin(
|
||||||
|
Instance.approvals,
|
||||||
|
).options(
|
||||||
|
joinedload(Instance.approvals),
|
||||||
|
).filter(
|
||||||
|
Endorsement.endorsed_id == endorsed_id
|
||||||
|
).group_by(
|
||||||
|
Instance.id
|
||||||
|
)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
|
def get_all_guaranteed_instances_by_guarantor_id(guarantor_id):
|
||||||
|
query = db.session.query(
|
||||||
|
Instance
|
||||||
|
).outerjoin(
|
||||||
|
Instance.guarantors,
|
||||||
|
).options(
|
||||||
|
joinedload(Instance.guarantors),
|
||||||
|
).filter(
|
||||||
|
Guarantee.guarantor_id == guarantor_id
|
||||||
|
).group_by(
|
||||||
|
Instance.id
|
||||||
|
)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
|
def get_all_guarantor_instances_by_guaranteed_id(guaranteed_id):
|
||||||
|
query = db.session.query(
|
||||||
|
Instance
|
||||||
|
).outerjoin(
|
||||||
|
Instance.guarantees,
|
||||||
|
).options(
|
||||||
|
joinedload(Instance.guarantees),
|
||||||
|
).filter(
|
||||||
|
Guarantee.guaranteed_id == guaranteed_id
|
||||||
|
).group_by(
|
||||||
|
Instance.id
|
||||||
|
)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
|
|
||||||
|
def find_instance_by_api_key(api_key):
|
||||||
|
instance = Instance.query.filter_by(api_key=hash_api_key(api_key)).first()
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def find_instance_by_domain(domain):
|
||||||
|
instance = Instance.query.filter_by(domain=domain).first()
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def find_authenticated_instance(domain,api_key):
|
||||||
|
instance = Instance.query.filter_by(domain=domain, api_key=hash_api_key(api_key)).first()
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def get_endorsement(instance_id, endorsing_instance_id):
|
||||||
|
query = Endorsement.query.filter_by(
|
||||||
|
endorsed_id=instance_id,
|
||||||
|
approving_id=endorsing_instance_id,
|
||||||
|
)
|
||||||
|
return query.first()
|
||||||
|
|
||||||
|
def get_guarantee(instance_id, guarantor_id):
|
||||||
|
query = Guarantee.query.filter_by(
|
||||||
|
guaranteed_id=instance_id,
|
||||||
|
guarantor_id=guarantor_id,
|
||||||
|
)
|
||||||
|
return query.first()
|
||||||
|
|
||||||
|
def get_guarantor_chain(instance_id):
|
||||||
|
guarantors = set()
|
||||||
|
chainbreaker = None
|
||||||
|
query = Guarantee.query.filter_by(
|
||||||
|
guaranteed_id=instance_id,
|
||||||
|
)
|
||||||
|
guarantor = query.first()
|
||||||
|
if not guarantor:
|
||||||
|
return set(),instance_id
|
||||||
|
guarantors.add(guarantor.guarantor_id)
|
||||||
|
if guarantor.guarantor_id != 0:
|
||||||
|
higher_guarantors, chainbreaker = get_guarantor_chain(guarantor.guarantor_id)
|
||||||
|
guarantors = higher_guarantors | guarantors
|
||||||
|
return guarantors,chainbreaker
|
||||||
|
|
||||||
|
def has_unbroken_chain(instance_id):
|
||||||
|
guarantors, chainbreaker = get_guarantor_chain(instance_id)
|
||||||
|
if chainbreaker:
|
||||||
|
chainbreaker = Instance.query.filter_by(id=chainbreaker).first()
|
||||||
|
return 0 in guarantors,chainbreaker
|
||||||
|
|
||||||
|
def get_guarantee_chain(instance_id):
|
||||||
|
query = Guarantee.query.filter_by(
|
||||||
|
guarantor_id=instance_id,
|
||||||
|
)
|
||||||
|
guarantees = query.all()
|
||||||
|
if not guarantees:
|
||||||
|
return set()
|
||||||
|
guarantees_ids = set([g.guaranteed_id for g in guarantees])
|
||||||
|
for gid in guarantees_ids:
|
||||||
|
guarantees_ids = guarantees_ids | get_guarantee_chain(gid)
|
||||||
|
return guarantees_ids
|
||||||
|
|
||||||
|
def get_instances_by_ids(instance_ids):
|
||||||
|
query = Instance.query.filter(
|
||||||
|
Instance.id.in_(instance_ids)
|
||||||
|
)
|
||||||
|
return query
|
|
@ -0,0 +1,33 @@
|
||||||
|
from werkzeug import exceptions as wze
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
class BadRequest(wze.BadRequest):
|
||||||
|
def __init__(self, message, log=None):
|
||||||
|
self.specific = message
|
||||||
|
self.log = log
|
||||||
|
|
||||||
|
class Forbidden(wze.Forbidden):
|
||||||
|
def __init__(self, message, log=None):
|
||||||
|
self.specific = message
|
||||||
|
self.log = log
|
||||||
|
|
||||||
|
class Unauthorized(wze.Unauthorized):
|
||||||
|
def __init__(self, message, log=None):
|
||||||
|
self.specific = message
|
||||||
|
self.log = log
|
||||||
|
|
||||||
|
class NotFound(wze.NotFound):
|
||||||
|
def __init__(self, message, log=None):
|
||||||
|
self.specific = message
|
||||||
|
self.log = log
|
||||||
|
|
||||||
|
class Locked(wze.Locked):
|
||||||
|
def __init__(self, message, log=None):
|
||||||
|
self.specific = message
|
||||||
|
self.log = log
|
||||||
|
|
||||||
|
def handle_bad_requests(error):
|
||||||
|
'''Namespace error handler'''
|
||||||
|
if error.log:
|
||||||
|
logger.warning(error.log)
|
||||||
|
return({'message': error.specific}, error.code)
|
|
@ -13,9 +13,9 @@ SQLITE_MODE = os.getenv("USE_SQLITE", "0") == "1"
|
||||||
|
|
||||||
if SQLITE_MODE:
|
if SQLITE_MODE:
|
||||||
logger.warning("Using SQLite for database")
|
logger.warning("Using SQLite for database")
|
||||||
OVERSEER.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///horde.db"
|
OVERSEER.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///overseer.db"
|
||||||
else:
|
else:
|
||||||
OVERSEER.config["SQLALCHEMY_DATABASE_URI"] = f"postgresql://postgres:{os.getenv('POSTGRES_PASS')}@{os.getenv('POSTGRES_URL')}"
|
OVERSEER.config["SQLALCHEMY_DATABASE_URI"] = os.getenv('POSTGRES_URI')
|
||||||
OVERSEER.config['SQLALCHEMY_ENGINE_OPTIONS'] = {
|
OVERSEER.config['SQLALCHEMY_ENGINE_OPTIONS'] = {
|
||||||
"pool_size": 50,
|
"pool_size": 50,
|
||||||
"max_overflow": -1,
|
"max_overflow": -1,
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
from pythorhead import Lemmy
|
||||||
|
from loguru import logger
|
||||||
|
import os
|
||||||
|
import secrets
|
||||||
|
import overseer.exceptions as e
|
||||||
|
|
||||||
|
overctrl_lemmy = Lemmy(f"https://{os.getenv('OVERSEER_LEMMY_DOMAIN')}")
|
||||||
|
_login = overctrl_lemmy.log_in(os.getenv('OVERSEER_LEMMY_USERNAME'),os.getenv('OVERSEER_LEMMY_PASSWORD'))
|
||||||
|
if not _login:
|
||||||
|
raise Exception("Failed to login to overctrl")
|
||||||
|
overseer_lemmy_user = overctrl_lemmy.user.get(username=os.getenv('OVERSEER_LEMMY_USERNAME'))
|
||||||
|
|
||||||
|
def pm_instance(domain: str, message: str):
|
||||||
|
domain_username = domain.replace(".", "_")
|
||||||
|
domain_user = overctrl_lemmy.user.get(username=domain_username)
|
||||||
|
if not domain_user:
|
||||||
|
raise e.BadRequest(f"Could not find domain user '{domain_username}'")
|
||||||
|
pm = overctrl_lemmy.private_message(message,domain_user["person_view"]["person"]["id"])
|
||||||
|
return pm
|
||||||
|
|
||||||
|
def pm_new_api_key(domain: str):
|
||||||
|
api_key = secrets.token_urlsafe(16)
|
||||||
|
pm_content = f"The API Key for domain {domain} is\n\n{api_key}\n\nUse this to perform operations on the overseer."
|
||||||
|
if not pm_instance(domain, pm_content):
|
||||||
|
raise e.BadRequest("API Key PM failed")
|
||||||
|
return api_key
|
|
@ -0,0 +1,103 @@
|
||||||
|
import uuid
|
||||||
|
import bleach
|
||||||
|
import secrets
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import regex as re
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
import dateutil.relativedelta
|
||||||
|
from loguru import logger
|
||||||
|
from overseer.flask import SQLITE_MODE
|
||||||
|
|
||||||
|
|
||||||
|
random.seed(random.SystemRandom().randint(0, 2**32 - 1))
|
||||||
|
|
||||||
|
|
||||||
|
def count_digits(number):
|
||||||
|
digits = 1
|
||||||
|
while number > 10:
|
||||||
|
number = number / 10
|
||||||
|
digits += 1
|
||||||
|
return digits
|
||||||
|
|
||||||
|
class ConvertAmount:
|
||||||
|
|
||||||
|
def __init__(self,amount,decimals = 1):
|
||||||
|
self.digits = count_digits(amount)
|
||||||
|
self.decimals = decimals
|
||||||
|
if self.digits < 4:
|
||||||
|
self.amount = round(amount, self.decimals)
|
||||||
|
self.prefix = ''
|
||||||
|
self.char = ''
|
||||||
|
elif self.digits < 7:
|
||||||
|
self.amount = round(amount / 1000, self.decimals)
|
||||||
|
self.prefix = 'kilo'
|
||||||
|
self.char = 'K'
|
||||||
|
elif self.digits < 10:
|
||||||
|
self.amount = round(amount / 1000000, self.decimals)
|
||||||
|
self.prefix = 'mega'
|
||||||
|
self.char = 'M'
|
||||||
|
elif self.digits < 13:
|
||||||
|
self.amount = round(amount / 1000000000, self.decimals)
|
||||||
|
self.prefix = 'giga'
|
||||||
|
self.char = 'G'
|
||||||
|
else:
|
||||||
|
self.amount = round(amount / 1000000000000, self.decimals)
|
||||||
|
self.prefix = 'tera'
|
||||||
|
self.char = 'T'
|
||||||
|
|
||||||
|
def get_db_uuid():
|
||||||
|
if SQLITE_MODE:
|
||||||
|
return str(uuid.uuid4())
|
||||||
|
else:
|
||||||
|
return uuid.uuid4()
|
||||||
|
|
||||||
|
def generate_client_id():
|
||||||
|
return secrets.token_urlsafe(16)
|
||||||
|
|
||||||
|
def sanitize_string(text):
|
||||||
|
santxt = bleach.clean(text).lstrip().rstrip()
|
||||||
|
return santxt
|
||||||
|
|
||||||
|
def hash_api_key(unhashed_api_key):
|
||||||
|
salt = os.getenv("secret_key", "s0m3s3cr3t") # Note default here, just so it can run without env file
|
||||||
|
hashed_key = hashlib.sha256(salt.encode() + unhashed_api_key.encode()).hexdigest()
|
||||||
|
# logger.warning([os.getenv("secret_key", "s0m3s3cr3t"), hashed_key,unhashed_api_key])
|
||||||
|
return hashed_key
|
||||||
|
|
||||||
|
|
||||||
|
def hash_dictionary(dictionary):
|
||||||
|
# Convert the dictionary to a JSON string
|
||||||
|
json_string = json.dumps(dictionary, sort_keys=True)
|
||||||
|
# Create a hash object
|
||||||
|
hash_object = hashlib.sha256(json_string.encode())
|
||||||
|
# Get the hexadecimal representation of the hash
|
||||||
|
hash_hex = hash_object.hexdigest()
|
||||||
|
return hash_hex
|
||||||
|
|
||||||
|
def get_expiry_date():
|
||||||
|
return datetime.utcnow() + dateutil.relativedelta.relativedelta(minutes=+20)
|
||||||
|
|
||||||
|
def get_random_seed(start_point=0):
|
||||||
|
'''Generated a random seed, using a random number unique per node'''
|
||||||
|
return random.randint(start_point, 2**32 - 1)
|
||||||
|
|
||||||
|
def count_parentheses(s):
|
||||||
|
open_p = False
|
||||||
|
count = 0
|
||||||
|
for c in s:
|
||||||
|
if c == "(":
|
||||||
|
open_p = True
|
||||||
|
elif c == ")" and open_p:
|
||||||
|
open_p = False
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
|
def validate_regex(regex_string):
|
||||||
|
try:
|
||||||
|
re.compile(regex_string, re.IGNORECASE)
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
return True
|
|
@ -6,13 +6,11 @@ Flask-Caching
|
||||||
waitress~=2.1.2
|
waitress~=2.1.2
|
||||||
requests >= 2.27
|
requests >= 2.27
|
||||||
Markdown~=3.4.1
|
Markdown~=3.4.1
|
||||||
flask-dance[sqla]
|
|
||||||
blinker
|
|
||||||
python-dotenv
|
python-dotenv
|
||||||
loguru
|
loguru
|
||||||
python-dateutil~=2.8.2
|
python-dateutil~=2.8.2
|
||||||
redis~=4.3.5
|
|
||||||
flask_sqlalchemy==3.0.2
|
flask_sqlalchemy==3.0.2
|
||||||
SQLAlchemy~=1.4.44
|
SQLAlchemy~=1.4.44
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
regex
|
regex
|
||||||
|
pythorhead>=0.6.0
|
Loading…
Reference in New Issue