diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..dfbe880
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,139 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+
+db/*
+test_commands.txt
+SQL_statements.txt
+horde.log
+horde*.bz2
+horde.db
+/.idea
+/boto3oeo.py
\ No newline at end of file
diff --git a/overseer/__init__.py b/overseer/__init__.py
new file mode 100644
index 0000000..17c2a12
--- /dev/null
+++ b/overseer/__init__.py
@@ -0,0 +1,22 @@
+import os
+import socket
+from uuid import uuid4
+
+from overseer.logger import logger
+from overseer.flask import OVERSEER
+from overseer.routes import *
+from overseer.apis import apiv1
+from overseer.argparser import args
+from overseer.consts import OVERSEER_VERSION
+
+
+OVERSEER.register_blueprint(apiv1)
+
+
+@OVERSEER.after_request
+def after_request(response):
+ response.headers["Access-Control-Allow-Origin"] = "*"
+ response.headers["Access-Control-Allow-Methods"] = "POST, GET, OPTIONS, PUT, DELETE, PATCH"
+ response.headers["Access-Control-Allow-Headers"] = "Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, apikey, Client-Agent, X-Fields"
+ response.headers["Horde-Node"] = f"{socket.gethostname()}:{args.port}:{OVERSEER_VERSION}"
+ return response
diff --git a/overseer/apis/__init__.py b/overseer/apis/__init__.py
new file mode 100644
index 0000000..133c811
--- /dev/null
+++ b/overseer/apis/__init__.py
@@ -0,0 +1 @@
+from .apiv1 import blueprint as apiv1
diff --git a/overseer/apis/apiv1.py b/overseer/apis/apiv1.py
new file mode 100644
index 0000000..6ccbcf0
--- /dev/null
+++ b/overseer/apis/apiv1.py
@@ -0,0 +1,18 @@
+from flask import Blueprint
+from flask_restx import Api
+from importlib import import_module
+
+from overseer.apis.v1 import api as v1
+
+blueprint = Blueprint('apiv1', __name__, url_prefix='/api')
+api = Api(blueprint,
+ version='1.0',
+ title=f'Lemmy Overseer',
+ description=f'The API documentation for the Lemmy Overseer',
+ contact_email="mail@dbzer0.com",
+ default="v1",
+ default_label="Latest Version",
+ ordered=True,
+)
+
+api.add_namespace(v1)
diff --git a/overseer/apis/models/__init__.py b/overseer/apis/models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/overseer/apis/models/v1.py b/overseer/apis/models/v1.py
new file mode 100644
index 0000000..5be2b19
--- /dev/null
+++ b/overseer/apis/models/v1.py
@@ -0,0 +1,13 @@
+from flask_restx import fields
+
+class Models:
+ def __init__(self,api):
+ self.response_model_suspicious_instances = api.model('SuspiciousInstances', {
+ 'domain': fields.String(description="The instance domain"),
+ 'uptime_alltime': fields.Float(description="The instance uptime pct. 100% and thousand of users is unlikely"),
+ 'local_posts': fields.Integer(description="The amount of local posts in that instance"),
+ 'total_users': fields.Integer(description="The total amount of users registered in that instance"),
+ 'active_users_monthly': fields.Integer(description="The amount of active users monthly."),
+ 'signup': fields.Boolean(default=False,description="True when subscriptions are open, else False"),
+ 'user_post_ratio': fields.Float(description="Users to Post Ratio"),
+ })
diff --git a/overseer/apis/v1/__init__.py b/overseer/apis/v1/__init__.py
new file mode 100644
index 0000000..02c3678
--- /dev/null
+++ b/overseer/apis/v1/__init__.py
@@ -0,0 +1,4 @@
+import overseer.apis.v1.base as base
+from overseer.apis.v1.base import api
+
+api.add_resource(base.SusInstances, "/instances")
diff --git a/overseer/apis/v1/base.py b/overseer/apis/v1/base.py
new file mode 100644
index 0000000..b746f4f
--- /dev/null
+++ b/overseer/apis/v1/base.py
@@ -0,0 +1,32 @@
+from flask import request
+from flask_restx import Namespace, Resource, reqparse
+from overseer.flask import cache
+from overseer.observer import retrieve_suspicious_instances
+from loguru import logger
+
+api = Namespace('v1', 'API Version 1' )
+
+from overseer.apis.models.v1 import Models
+
+models = Models(api)
+
+# Used to for the flask limiter, to limit requests per url paths
+def get_request_path():
+ # logger.info(dir(request))
+ return f"{request.remote_addr}@{request.method}@{request.path}"
+
+
+class SusInstances(Resource):
+ get_parser = reqparse.RequestParser()
+ get_parser.add_argument("Client-Agent", default="unknown:0:unknown", type=str, required=False, help="The client name and version.", location="headers")
+ get_parser.add_argument("user_to_post_ratio", required=False, default=20, type=int, help="The amount of local users / amount of local posts to consider suspicious", location="args")
+
+ @api.expect(get_parser)
+ @logger.catch(reraise=True)
+ @cache.cached(timeout=10, query_string=True)
+ @api.marshal_with(models.response_model_suspicious_instances, code=200, description='Suspicious Instances', as_list=True, skip_none=True)
+ def get(self):
+ '''A List with the details of all suspicious instances
+ '''
+ self.args = self.get_parser.parse_args()
+ return retrieve_suspicious_instances(self.args.user_to_post_ratio),200
\ No newline at end of file
diff --git a/overseer/argparser.py b/overseer/argparser.py
new file mode 100644
index 0000000..3974db5
--- /dev/null
+++ b/overseer/argparser.py
@@ -0,0 +1,10 @@
+import argparse
+
+arg_parser = argparse.ArgumentParser()
+arg_parser.add_argument('-i', '--insecure', action="store_true", help="If set, will use http instead of https (useful for testing)")
+arg_parser.add_argument('-v', '--verbosity', action='count', default=0, help="The default logging level is ERROR or higher. This value increases the amount of logging seen in your screen")
+arg_parser.add_argument('-q', '--quiet', action='count', default=0, help="The default logging level is ERROR or higher. This value decreases the amount of logging seen in your screen")
+arg_parser.add_argument('-p', '--port', action='store', default=10001, required=False, type=int, help="Provide a different port to start with")
+arg_parser.add_argument('--test', action="store_true", help="Test")
+arg_parser.add_argument('--color', default=False, action="store_true", help="Enabled colorized logs")
+args = arg_parser.parse_args()
diff --git a/overseer/consts.py b/overseer/consts.py
new file mode 100644
index 0000000..47b796e
--- /dev/null
+++ b/overseer/consts.py
@@ -0,0 +1 @@
+OVERSEER_VERSION = "0.0.1"
diff --git a/overseer/flask.py b/overseer/flask.py
new file mode 100644
index 0000000..c41644b
--- /dev/null
+++ b/overseer/flask.py
@@ -0,0 +1,40 @@
+import os
+from flask import Flask
+from flask_caching import Cache
+from werkzeug.middleware.proxy_fix import ProxyFix
+from flask_sqlalchemy import SQLAlchemy
+from loguru import logger
+
+cache = None
+OVERSEER = Flask(__name__)
+OVERSEER.wsgi_app = ProxyFix(OVERSEER.wsgi_app, x_for=1)
+
+SQLITE_MODE = os.getenv("USE_SQLITE", "0") == "1"
+
+if SQLITE_MODE:
+ logger.warning("Using SQLite for database")
+ OVERSEER.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///horde.db"
+else:
+ OVERSEER.config["SQLALCHEMY_DATABASE_URI"] = f"postgresql://postgres:{os.getenv('POSTGRES_PASS')}@{os.getenv('POSTGRES_URL')}"
+ OVERSEER.config['SQLALCHEMY_ENGINE_OPTIONS'] = {
+ "pool_size": 50,
+ "max_overflow": -1,
+ }
+OVERSEER.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+db = SQLAlchemy(OVERSEER)
+db.init_app(OVERSEER)
+
+if not SQLITE_MODE:
+ with OVERSEER.app_context():
+ logger.debug("pool size = {}".format(db.engine.pool.size()))
+logger.init_ok("Horde Database", status="Started")
+
+# Allow local workstation run
+if cache is None:
+ cache_config = {
+ "CACHE_TYPE": "SimpleCache",
+ "CACHE_DEFAULT_TIMEOUT": 300
+ }
+ cache = Cache(config=cache_config)
+ cache.init_app(OVERSEER)
+ logger.init_warn("Flask Cache", status="SimpleCache")
diff --git a/overseer/limiter.py b/overseer/limiter.py
new file mode 100644
index 0000000..6164ef4
--- /dev/null
+++ b/overseer/limiter.py
@@ -0,0 +1,18 @@
+from flask_limiter import Limiter
+from flask_limiter.util import get_remote_address
+from overseer.flask import OVERSEER
+from loguru import logger
+
+limiter = None
+# Very basic DOS prevention
+logger.init("Limiter Cache", status="Connecting")
+
+# Allow local workstation run
+if limiter is None:
+ limiter = Limiter(
+ OVERSEER,
+ key_func=get_remote_address,
+ default_limits=["90 per minute"],
+ headers_enabled=True
+ )
+ logger.init_warn("Limiter Cache", status="Memory Only")
diff --git a/overseer/logger.py b/overseer/logger.py
new file mode 100644
index 0000000..7e004c5
--- /dev/null
+++ b/overseer/logger.py
@@ -0,0 +1,111 @@
+import sys
+from functools import partialmethod
+from loguru import logger
+from overseer.argparser import args
+
+STDOUT_LEVELS = ["GENERATION", "PROMPT"]
+INIT_LEVELS = ["INIT", "INIT_OK", "INIT_WARN", "INIT_ERR"]
+MESSAGE_LEVELS = ["MESSAGE"]
+# By default we're at error level or higher
+verbosity = 40
+quiet = 0
+
+def set_logger_verbosity(count):
+ global verbosity
+ # The count comes reversed. So count = 0 means minimum verbosity
+ # While count 5 means maximum verbosity
+ # So the more count we have, the lowe we drop the versbosity maximum
+ verbosity = 40 - (count * 10)
+
+def quiesce_logger(count):
+ global quiet
+ # The bigger the count, the more silent we want our logger
+ quiet = count * 10
+
+def is_stdout_log(record):
+ if record["level"].name not in STDOUT_LEVELS:
+ return(False)
+ if record["level"].no < verbosity + quiet:
+ return(False)
+ return(True)
+
+def is_init_log(record):
+ if record["level"].name not in INIT_LEVELS:
+ return(False)
+ if record["level"].no < verbosity + quiet:
+ return(False)
+ return(True)
+
+def is_msg_log(record):
+ if record["level"].name not in MESSAGE_LEVELS:
+ return(False)
+ if record["level"].no < verbosity + quiet:
+ return(False)
+ return(True)
+
+def is_stderr_log(record):
+ if record["level"].name in STDOUT_LEVELS + INIT_LEVELS + MESSAGE_LEVELS:
+ return(False)
+ if record["level"].no < verbosity + quiet:
+ return(False)
+ return(True)
+
+def test_logger():
+ logger.generation("This is a generation message\nIt is typically multiline\nThee Lines".encode("unicode_escape").decode("utf-8"))
+ logger.prompt("This is a prompt message")
+ logger.debug("Debug Message")
+ logger.info("Info Message")
+ logger.warning("Info Warning")
+ logger.error("Error Message")
+ logger.critical("Critical Message")
+ logger.init("This is an init message", status="Starting")
+ logger.init_ok("This is an init message", status="OK")
+ logger.init_warn("This is an init message", status="Warning")
+ logger.init_err("This is an init message", status="Error")
+ logger.message("This is user message")
+ sys.exit()
+
+
+logfmt = "{level: <10} | {time:YYYY-MM-DD HH:mm:ss} | {name}:{function}:{line} - {message}"
+genfmt = "{level: <10} @ {time:YYYY-MM-DD HH:mm:ss} | {message}"
+initfmt = "INIT | {extra[status]: <14} | {message}"
+msgfmt = "{level: <10} | {message}"
+
+logger.level("GENERATION", no=24, color="")
+logger.level("PROMPT", no=23, color="")
+logger.level("INIT", no=31, color="")
+logger.level("INIT_OK", no=31, color="")
+logger.level("INIT_WARN", no=31, color="")
+logger.level("INIT_ERR", no=31, color="")
+logger.level("AUDIT", no=19, color="")
+# Messages contain important information without which this application might not be able to be used
+# As such, they have the highest priority
+logger.level("MESSAGE", no=61, color="")
+
+logger.__class__.generation = partialmethod(logger.__class__.log, "GENERATION")
+logger.__class__.prompt = partialmethod(logger.__class__.log, "PROMPT")
+logger.__class__.init = partialmethod(logger.__class__.log, "INIT")
+logger.__class__.init_ok = partialmethod(logger.__class__.log, "INIT_OK")
+logger.__class__.init_warn = partialmethod(logger.__class__.log, "INIT_WARN")
+logger.__class__.init_err = partialmethod(logger.__class__.log, "INIT_ERR")
+logger.__class__.message = partialmethod(logger.__class__.log, "MESSAGE")
+logger.__class__.audit = partialmethod(logger.__class__.log, "AUDIT")
+
+config = {
+ "handlers": [
+ {"sink": sys.stderr, "format": logfmt, "colorize":args.color, "filter": is_stderr_log},
+ {"sink": sys.stdout, "format": genfmt, "level": "PROMPT", "colorize":args.color, "filter": is_stdout_log},
+ {"sink": sys.stdout, "format": initfmt, "level": "INIT", "colorize":args.color, "filter": is_init_log},
+ {"sink": sys.stdout, "format": msgfmt, "level": "MESSAGE", "colorize":args.color, "filter": is_msg_log}
+ ],
+}
+logger.configure(**config)
+logger.add("horde.log", retention="7 days", rotation="1d", compression="bz2", level=19)
+logger.disable("__main__")
+logger.warning("disabled")
+logger.enable("")
+logger.enable(None)
+
+
+set_logger_verbosity(args.verbosity)
+quiesce_logger(args.quiet)
diff --git a/overseer/observer.py b/overseer/observer.py
new file mode 100644
index 0000000..dcc9540
--- /dev/null
+++ b/overseer/observer.py
@@ -0,0 +1,84 @@
+import requests
+from loguru import logger
+
+
+def retrieve_suspicious_instances(users_to_posts_ratio = 20):
+ # GraphQL query
+ query = '''
+ {
+ nodes(softwarename: "lemmy") {
+ domain
+ name
+ metatitle
+ metadescription
+ metaimage
+ date_created
+ uptime_alltime
+ total_users
+ active_users_monthly
+ active_users_halfyear
+ signup
+ local_posts
+ }
+ }
+ '''
+
+ # GraphQL endpoint URL
+ url = 'https://api.fediverse.observer/'
+
+ # Request headers
+ headers = {
+ 'User-Agent': 'Lemmy Overseer / mail@dbzer0.com',
+ 'Accept': '*/*',
+ 'Accept-Language': 'en-US,en;q=0.5',
+ 'Accept-Encoding': 'gzip, deflate, br',
+ 'Referer': 'https://api.fediverse.observer/',
+ 'Content-Type': 'application/json',
+ 'Origin': 'https://api.fediverse.observer',
+ 'DNT': '1',
+ 'Connection': 'keep-alive',
+ 'Sec-Fetch-Dest': 'empty',
+ 'Sec-Fetch-Mode': 'cors',
+ 'Sec-Fetch-Site': 'same-origin',
+ 'TE': 'trailers'
+ }
+
+ # Create the request payload
+ payload = {
+ 'query': query
+ }
+
+ # Send the POST request to the GraphQL endpoint
+ response = requests.post(url, headers=headers, json=payload)
+
+ # Check if the request was successful (HTTP 200 status code)
+ if response.ok:
+ # Extract the JSON response
+ data = response.json()
+ bad_nodes = []
+ for node in data["data"]["nodes"]:
+ is_bad = False
+ local_posts = node["local_posts"]
+ if node["total_users"] < 300:
+ continue
+ if local_posts == 0:
+ local_posts= 1
+ if node["total_users"] / local_posts > users_to_posts_ratio:
+ is_bad = True
+ # print(node)
+ if is_bad:
+ bad_node = {
+ "domain": node["domain"],
+ "uptime_alltime": node["uptime_alltime"],
+ "local_posts": node["local_posts"],
+ "total_users": node["total_users"],
+ "active_users_monthly": node["active_users_monthly"],
+ "signup": node["signup"],
+ "user_post_ratio": node["total_users"] / local_posts,
+ }
+ bad_nodes.append(bad_node)
+ return bad_nodes
+ else:
+ # Print the error message if the request failed
+ logger.error(f'Observer failed with status code {response.status_code}: {response.text}')
+ return None
diff --git a/overseer/routes.py b/overseer/routes.py
new file mode 100644
index 0000000..53145ef
--- /dev/null
+++ b/overseer/routes.py
@@ -0,0 +1,32 @@
+from flask import render_template, redirect, url_for, request
+from markdown import markdown
+from loguru import logger
+from overseer.flask import OVERSEER
+
+@logger.catch(reraise=True)
+@OVERSEER.route('/')
+# @cache.cached(timeout=300)
+def index():
+ with open(f'overseer/templates/index.md') as index_file:
+ index = index_file.read()
+ findex = index.format()
+
+ style = """
+ """
+
+ head = f"""
+ Horde Overseer
+
+ {style}
+
+ """
+ return(head + markdown(findex))
\ No newline at end of file
diff --git a/overseer/templates/index.md b/overseer/templates/index.md
new file mode 100644
index 0000000..bd01202
--- /dev/null
+++ b/overseer/templates/index.md
@@ -0,0 +1,13 @@
+# Lemmy Overseer
+
+This is a service to help Lemmy instances detect and avoid suspcicious instances
+
+## REST API
+
+[Full Documentation](/api)
+
+## Credits
+
+These are the people who made this software possible.
+
+* [Db0](https://dbzer0.com) - Development and Maintenance
diff --git a/quickrun.py b/quickrun.py
index f9eb8d9..b7769f7 100644
--- a/quickrun.py
+++ b/quickrun.py
@@ -1,82 +1,6 @@
-import requests
import json
+from overseer.observer import retrieve_suspicious_instances
-# GraphQL query
-query = '''
-{
- nodes(softwarename: "lemmy") {
- domain
- name
- metatitle
- metadescription
- metaimage
- date_created
- uptime_alltime
- total_users
- active_users_monthly
- active_users_halfyear
- signup
- local_posts
- }
-}
-'''
-
-# GraphQL endpoint URL
-url = 'https://api.fediverse.observer/'
-
-# Request headers
-headers = {
- 'User-Agent': 'Lemmy Overseer / mail@dbzer0.com',
- 'Accept': '*/*',
- 'Accept-Language': 'en-US,en;q=0.5',
- 'Accept-Encoding': 'gzip, deflate, br',
- 'Referer': 'https://api.fediverse.observer/',
- 'Content-Type': 'application/json',
- 'Origin': 'https://api.fediverse.observer',
- 'DNT': '1',
- 'Connection': 'keep-alive',
- 'Sec-Fetch-Dest': 'empty',
- 'Sec-Fetch-Mode': 'cors',
- 'Sec-Fetch-Site': 'same-origin',
- 'TE': 'trailers'
-}
-
-# Create the request payload
-payload = {
- 'query': query
-}
-
-# Send the POST request to the GraphQL endpoint
-response = requests.post(url, headers=headers, json=payload)
-
-# Check if the request was successful (HTTP 200 status code)
-if response.ok:
- # Extract the JSON response
- data = response.json()
- bad_nodes = []
- for node in data["data"]["nodes"]:
- is_bad = False
- local_posts = node["local_posts"]
- if node["total_users"] < 300:
- continue
- if local_posts == 0:
- local_posts= 1
- if node["total_users"] / local_posts > 20:
- is_bad = True
- # print(node)
- if is_bad:
- bad_node = {
- "domain": node["domain"],
- "uptime_alltime": node["uptime_alltime"],
- "local_posts": node["local_posts"],
- "total_users": node["total_users"],
- "active_users_monthly": node["active_users_monthly"],
- "signup": node["signup"],
- "local_posts": node["local_posts"],
- "user_post_ratio": node["total_users"] / local_posts,
- }
- bad_nodes.append(bad_node)
- print(json.dumps([bn["domain"] for bn in bad_nodes], indent=4))
-else:
- # Print the error message if the request failed
- print(f'Request failed with status code {response.status_code}: {response.text}')
+sus = retrieve_suspicious_instances(20)
+if sus:
+ print(json.dumps([bn["domain"] for bn in sus], indent=4))
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 663bd1f..51a6a9f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1 +1,18 @@
-requests
\ No newline at end of file
+werkzeug~=2.2.2
+Flask~=2.2.2
+flask-restx
+flask_limiter~=2.8.1
+Flask-Caching
+waitress~=2.1.2
+requests >= 2.27
+Markdown~=3.4.1
+flask-dance[sqla]
+blinker
+python-dotenv
+loguru
+python-dateutil~=2.8.2
+redis~=4.3.5
+flask_sqlalchemy==3.0.2
+SQLAlchemy~=1.4.44
+psycopg2-binary
+regex
diff --git a/server.py b/server.py
new file mode 100644
index 0000000..c6553d9
--- /dev/null
+++ b/server.py
@@ -0,0 +1,26 @@
+from dotenv import load_dotenv
+import os
+import logging
+
+load_dotenv()
+
+from overseer.argparser import args
+from overseer.flask import OVERSEER
+from loguru import logger
+
+if __name__ == "__main__":
+ # Only setting this for the WSGI logs
+ logging.basicConfig(format='%(asctime)s - %(levelname)s - %(module)s:%(lineno)d - %(message)s', level=logging.WARNING)
+ from waitress import serve
+
+ logger.init("WSGI Server", status="Starting")
+ url_scheme = 'https'
+ if args.insecure:
+ os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1' # Disable this on prod
+ url_scheme = 'http'
+ allowed_host = "overseer.dbzer0.net"
+ if args.insecure:
+ allowed_host = "0.0.0.0"
+ logger.init_warn("WSGI Mode", status="Insecure")
+ serve(OVERSEER, port=args.port, url_scheme=url_scheme, threads=45, connection_limit=1024, asyncore_use_poll=True)
+ logger.init("WSGI Server", status="Stopped")