diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..96c4cff --- /dev/null +++ b/.editorconfig @@ -0,0 +1,6 @@ +root = true +[*] +indent_style = tab +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true \ No newline at end of file diff --git a/.gitignore b/.gitignore index b07889e..f5b4577 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,11 @@ *~ -node_modules -__pycache__ -static/list.json -static/servers.js -config.py *.mmdb +*.sqlite +node_modules/ +__pycache__/ +/server_list/static/list.json +/server_list/static/servers.js +/config.py +/celerybeat-schedule +/package-lock.json +/Pipfile.lock diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..b718db0 --- /dev/null +++ b/Pipfile @@ -0,0 +1,18 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +maxminddb = ">=2.0.0" +Flask = "~=2.0" +flask-sqlalchemy = "~=3.0" +flask-migrate = "~=4.0" +celery = "~=5.0" + +[dev-packages] +pylint = "*" +rope = "*" + +[requires] +python_version = "3.11" diff --git a/README.md b/README.md index 48d3308..ddc47d2 100644 --- a/README.md +++ b/README.md @@ -1,131 +1,203 @@ -Minetest server list +Minetest Server List ==================== -Setting up the webpage ----------------------- +Webpage Setup +--- -You will have to install node.js, doT.js and their dependencies to compile -the server list webpage template. +You will have to install node.js, doT.js and their dependencies to compile the server list webpage template. First install node.js, e.g.: - # apt-get install nodejs - # # OR: - # pacman -S nodejs +```sh +sudo pacman -S nodejs +# OR: +sudo apt-get install nodejs +``` Then install doT.js and its dependencies: - $ npm install dot commander mkdirp +```sh +npm install +``` And finally compile the template: - $ cd static - $ ../node_modules/dot/bin/dot-packer -s . +```sh +cd server_list/static +../../node_modules/dot/bin/dot-packer -s . +``` -You can now serve the webpage by copying the files in static/ to your web root, or by [starting the server list](#setting-up-the-server). +You can now serve the webpage by copying the files in `server_list/static/` to your web root, or by [starting the server list](#server-setup). +Embedding in a Webpage +--- -Embedding the server list in a page ------------------------------------ +```html + + ... + + ... + + + ... +
+ ... + + +``` - - ... - - ... - - - ... -
- ... - - +Server Setup +--- + 1. Install Python 3 and Pipenv: -Setting up the server ---------------------- + ```sh + sudo pacman -S python python-pipenv + # OR: + sudo apt-get install python3 python3-pip && pip install pipenv + ``` - 1. Install Python 3 and pip: + 2. Install required Python packages: - pacman -S python python-pip - # OR: - apt-get install python3 python3-pip + ```sh + pipenv sync + ``` - 2. Install required Python packages: + 3. Set up Celery message broker. Pick a Celery backend (Redis or RabbitMQ are recommended), and install and enable the required packages. For example: - # You might have to use pip3 if your system defaults to Python 2 - pip install -r requirements.txt + ```sh + # Redis support requires an additional package + pipenv run pip install redis + sudo pacman -S redis # or sudo apt-get install redis + sudo systemctl enable --now redis + ``` - 3. If using in production, install uwsgi and it's python plugin: + 4. Configure the server by adding options to `config.py`. + See `server_list/config.py` for defaults. - pacman -S uwsgi uwsgi-plugin-python - # OR: - apt-get install uwsgi uwsgi-plugin-python - # OR: - pip install uwsgi + 5. Start the server for development: - 4. Configure the server by adding options to `config.py`. - See `config-example.py` for defaults. + ```sh + pipenv run flask run + ``` - 5. Start the server: + 6. Start the celery background worker: - $ ./server.py - $ # Or for production: - $ uwsgi -s /tmp/minetest-master.sock --plugin python -w server:app --enable-threads - $ # Then configure according to http://flask.pocoo.org/docs/deploying/uwsgi/ + ```sh + pipenv run celery --app server_list:celery worker --beat + ``` - 7. (optional) Configure the proxy server, if any. You should make the server - load static files directly from the static directory. Also, `/list` - should be served from `list.json`. Example for nginx: +Running in Production +--- - root /path/to/server/static; - rewrite ^/list$ /list.json; - try_files $uri @uwsgi; - location @uwsgi { - uwsgi_pass ...; - } +When running in production you should set up a proxy server that calls the server list through WSGI. -Setting up the server (Apache version) ---------------------- +These examples assume that the server list is installed to `/srv/http/serverlist`. -If you wish to use Apache to host the server list, do steps 1-2, 4, above. Additionally install/enable mod_wsgi and an Apache site config like the following: +### Nginx - # This config assumes you have the server list at DocumentRoot. - # Visitors to the server list in this config would visit http://local.server/ and - # apache would serve up the output from server.py. Static resources would be served - # from http://local.server/static. +First [set up uWSGI](#uwsgi), then update the Nginx configuration to proxy to uWSGI. You should make the server load static files directly from the static directory. Also, `/list` should be aliased to `list.json`. - # Where are the minetest-server files located? - DocumentRoot /var/games/minetest/serverlist +Here's an example configuration: - # Serve up server.py at the root of the URL. - WSGIScriptAlias / /var/games/minetest/serverlist/server.py +```nginx +root /srv/http/serverlist/server_list/static; +rewrite ^/list$ /list.json; +try_files $uri @uwsgi; +location @uwsgi { + uwsgi_pass unix:/run/uwsgi/server_list.sock; +} +``` - # The name of the function that we call when we invoke server.py - WSGICallableObject app +Also see [the Flask uwsgi documentation](https://flask.palletsprojects.com/en/2.0.x/deploying/uwsgi/). - # These options are necessary to enable Daemon mode. Without this, you'll have strange behavior - # with servers dropping off your list! You can tweak threads as needed. See mod_wsgi documentation. - WSGIProcessGroup minetest-serverlist - WSGIDaemonProcess minetest-serverlist threads=2 +### Apache +There are two options for Apache, you can use either `mod_wsgi` or `mod_proxy_uwsgi`. - - Require all granted - +Note: both of these example configurations serve static through WSGI, instead of bypassing WSGI for performance. - +#### mod_wsgi + +First install/enable `mod_wsgi`. + +Then create `wsgi.py` in the directory containing `server_list` with the following contents: + +```py +import os, sys +sys.path.append(os.path.dirname(__file__)) +from server_list import app +``` + +Then configure the Apache VirtualHost like the following: + +```apache +WSGIDaemonProcess server_list python-home= + +WSGIProcessGroup server_list +WSGIApplicationGroup %{GLOBAL} + +WSGIScriptAlias / /srv/http/serverlist/wsgi.py +WSGICallableObject app + + + + Require all granted + + +``` + +#### mod_proxy_uwsgi + +First [set up uWSGI](#uwsgi), then install/enable `mod_proxy` and `mod_proxy_uwsgi` and add the following to your VirtualHost: + +```apache +ProxyPass / unix:/run/uwsgi/server_list.sock|uwsgi://localhost/ +``` + +Note: this requires at least Apache 2.4.7 for the unix socket syntax. If you have an older version of Apache you'll have to use IP sockets. + +### uWSGI + +First, install uWSGI and its python plugin. + +```sh +pacman -S uwsgi uwsgi-plugin-python +# OR: +apt-get install uwsgi uwsgi-plugin-python +# OR: +pip install uwsgi +``` + +Then create a uWSGI config file. For example: + +```ini +[uwsgi] +socket = /run/uwsgi/server_list.sock +plugin = python +virtualenv = +python-path = /srv/http/serverlist +module = server_list +callable = app +``` + +You can put the config file in `/etc/uwsgi/server_list.ini`. Make sure that uWSGI is configured to start as the appropriate user and group for your distro (e.g. http:http) and then start and enable uWSGI. + +```sh +systemctl enable --now uwsgi@server_list.service +``` License -------- +--- The Minetest server list code is licensed under the GNU Lesser General Public License version 2.1 or later (LGPLv2.1+). A LICENSE.txt file should have been diff --git a/config-example.py b/config-example.py deleted file mode 100644 index a5e5bd5..0000000 --- a/config-example.py +++ /dev/null @@ -1,28 +0,0 @@ - -# Enables detailed tracebacks and an interactive Python console on errors. -# Never use in production! -DEBUG = False - -# Address for development server to listen on -HOST = "127.0.0.1" -# Port for development server to listen on -PORT = 5000 - -# Amount of time, is seconds, after which servers are removed from the list -# if they haven't updated their listings. Note: By default Minetest servers -# only announce once every 5 minutes, so this should be more than 300. -PURGE_TIME = 350 - -# List of banned IP addresses for announce -# e.g. ['2620:101::44'] -BANNED_IPS = [] - -# List of banned servers as host/port pairs -# e.g. ['1.2.3.4/30000', 'lowercase.hostname', 'lowercase.hostname/30001'] -BANNED_SERVERS = [] - -# Creates server entries if a server sends an 'update' and there is no entry yet. -# This should only be used to populate the server list after list.json was deleted. -# This WILL cause problems such as mapgen, mods and privilege information missing from the list -ALLOW_UPDATE_WITHOUT_OLD = False - diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..47e9980 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,40 @@ +[alembic] + +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..a74b9f0 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,76 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + + +config = context.config + +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.get_engine().url).replace('%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = current_app.extensions['migrate'].db.get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..3dc9606 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,22 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date.date()} +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/00ac5d537063_initial_migration.py b/migrations/versions/00ac5d537063_initial_migration.py new file mode 100644 index 0000000..823046d --- /dev/null +++ b/migrations/versions/00ac5d537063_initial_migration.py @@ -0,0 +1,73 @@ +"""Initial migration + +Revision ID: 00ac5d537063 +Create Date: 2021-06-12 +""" +from alembic import op +import sqlalchemy as sa + + +revision = '00ac5d537063' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table('server', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('world_uuid', sa.String(length=36), nullable=True), + sa.Column('online', sa.Boolean(), nullable=False), + sa.Column('address', sa.String(), nullable=False), + sa.Column('port', sa.Integer(), nullable=False), + sa.Column('announce_ip', sa.String(), nullable=False), + sa.Column('server_id', sa.String(), nullable=True), + sa.Column('clients', sa.String(), nullable=True), + sa.Column('clients_top', sa.Integer(), nullable=False), + sa.Column('clients_max', sa.Integer(), nullable=False), + sa.Column('first_seen', sa.DateTime(), nullable=False), + sa.Column('start_time', sa.DateTime(), nullable=False), + sa.Column('last_update', sa.DateTime(), nullable=False), + sa.Column('total_uptime', sa.Float(), nullable=False), + sa.Column('down_time', sa.DateTime(), nullable=True), + sa.Column('game_time', sa.Integer(), nullable=False), + sa.Column('lag', sa.Float(), nullable=True), + sa.Column('ping', sa.Float(), nullable=False), + sa.Column('mods', sa.String(), nullable=True), + sa.Column('version', sa.String(), nullable=False), + sa.Column('proto_min', sa.Integer(), nullable=False), + sa.Column('proto_max', sa.Integer(), nullable=False), + sa.Column('game_id', sa.String(), nullable=False), + sa.Column('mapgen', sa.String(), nullable=True), + sa.Column('url', sa.String(), nullable=True), + sa.Column('default_privs', sa.String(), nullable=True), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('popularity', sa.Float(), nullable=False), + sa.Column('geo_continent', sa.String(length=2), nullable=True), + sa.Column('creative', sa.Boolean(), nullable=False), + sa.Column('is_dedicated', sa.Boolean(), nullable=False), + sa.Column('damage_enabled', sa.Boolean(), nullable=False), + sa.Column('pvp_enabled', sa.Boolean(), nullable=False), + sa.Column('password_required', sa.Boolean(), nullable=False), + sa.Column('rollback_enabled', sa.Boolean(), nullable=False), + sa.Column('can_see_far_names', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_server_address_port', 'server', ['address', 'port'], unique=True) + op.create_index(op.f('ix_server_online'), 'server', ['online'], unique=False) + op.create_index(op.f('ix_server_world_uuid'), 'server', ['world_uuid'], unique=True) + op.create_table('stats', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('max_servers', sa.Integer(), nullable=False), + sa.Column('max_clients', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + + +def downgrade(): + op.drop_table('stats') + op.drop_index(op.f('ix_server_world_uuid'), table_name='server') + op.drop_index(op.f('ix_server_online'), table_name='server') + op.drop_index('ix_server_address_port', table_name='server') + op.drop_table('server') diff --git a/package.json b/package.json new file mode 100644 index 0000000..a8ad7db --- /dev/null +++ b/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "commander": "^7.2.0", + "dot": "^1.1.3" + } +} diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 914288c..0000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -Flask>=2.0.0 -maxminddb>=2.0.0 diff --git a/server.py b/server.py deleted file mode 100755 index 5c30d50..0000000 --- a/server.py +++ /dev/null @@ -1,515 +0,0 @@ -#!/usr/bin/env python3 -import os, sys, json, time, socket -from threading import Thread, RLock -from glob import glob - -import maxminddb -from flask import Flask, request, send_from_directory, make_response - - -app = Flask(__name__, static_url_path = "") - -# Load configuration -app.config.from_pyfile("config-example.py") # Use example for defaults -if os.path.isfile(os.path.join(app.root_path, "config.py")): - app.config.from_pyfile("config.py") - -tmp = glob(os.path.join(app.root_path, "dbip-country-lite-*.mmdb")) -if tmp: - reader = maxminddb.open_database(tmp[0], maxminddb.MODE_AUTO) -else: - app.logger.warning( - "For working GeoIP download the database from "+ - "https://db-ip.com/db/download/ip-to-country-lite and place the "+ - ".mmdb file in the app root folder." - ) - reader = None - -# Helpers - -def geoip_lookup_continent(ip): - if ip.startswith("::ffff:"): - ip = ip[7:] - - if not reader: - return - try: - geo = reader.get(ip) - except geoip2.errors.GeoIP2Error: - return - - if geo and "continent" in geo: - return geo["continent"]["code"] - else: - app.logger.warning("Unable to get GeoIP continent data for %s.", ip) - -# Views - -@app.route("/") -def index(): - return app.send_static_file("index.html") - - -@app.route("/list") -def list(): - # We have to make sure that the list isn't cached, - # since the list isn't really static. - return send_from_directory(app.static_folder, "list.json", max_age=0) - - -@app.route("/geoip") -def geoip(): - continent = geoip_lookup_continent(request.remote_addr) - - resp = make_response({ - "continent": continent, # null on error - }) - resp.cache_control.max_age = 7 * 86400 - resp.cache_control.private = True - - return resp - - -@app.route("/announce", methods=["GET", "POST"]) -def announce(): - ip = request.remote_addr - if ip.startswith("::ffff:"): - ip = ip[7:] - - if ip in app.config["BANNED_IPS"]: - return "Banned (IP).", 403 - - data = request.values["json"] - - if len(data) > 8192: - return "JSON data is too big.", 413 - - try: - server = json.loads(data) - except: - return "Unable to process JSON data.", 400 - - if type(server) != dict: - return "JSON data is not an object.", 400 - - if not "action" in server: - return "Missing action field.", 400 - - action = server["action"] - if action not in ("start", "update", "delete"): - return "Invalid action field.", 400 - - if action == "start": - server["uptime"] = 0 - - server["ip"] = ip - - if not "port" in server: - server["port"] = 30000 - #### Compatability code #### - # port was sent as a string instead of an integer - elif type(server["port"]) == str: - server["port"] = int(server["port"]) - #### End compatability code #### - - if "%s/%d" % (server["ip"], server["port"]) in app.config["BANNED_SERVERS"]: - return "Banned (Server).", 403 - elif "address" in server and "%s/%d" % (server["address"].lower(), server["port"]) in app.config["BANNED_SERVERS"]: - return "Banned (Server).", 403 - elif "address" in server and server["address"].lower() in app.config["BANNED_SERVERS"]: - return "Banned (Server).", 403 - - old = serverList.get(ip, server["port"]) - - if action == "delete": - if not old: - return "Server not found." - serverList.remove(old) - serverList.save() - return "Removed from server list." - elif not checkRequest(server): - return "Invalid JSON data.", 400 - - if action == "update" and not old: - if app.config["ALLOW_UPDATE_WITHOUT_OLD"]: - action = "start" - else: - return "Server to update not found." - - server["update_time"] = int(time.time()) - - if action == "start": - server["start"] = int(time.time()) - tracker.push("%s:%d" % (server["ip"], server["port"]), server["start"]) - else: - server["start"] = old["start"] - - if "clients_list" in server: - server["clients"] = len(server["clients_list"]) - - server["clients_top"] = max(server["clients"], old["clients_top"]) if old else server["clients"] - - if "url" in server: - url = server["url"] - if not any(url.startswith(p) for p in ["http://", "https://", "//"]): - del server["url"] - - # Make sure that startup options are saved - if action == "update": - for field in ("dedicated", "rollback", "mapgen", "privs", - "can_see_far_names", "mods"): - if field in old: - server[field] = old[field] - - # Popularity - if old: - server["updates"] = old["updates"] + 1 - # This is actually a count of all the client numbers we've received, - # it includes clients that were on in the previous update. - server["total_clients"] = old["total_clients"] + server["clients"] - else: - server["updates"] = 1 - server["total_clients"] = server["clients"] - server["pop_v"] = server["total_clients"] / server["updates"] - - finishRequestAsync(server) - - return "Request has been filed.", 202 - -# Utilities - -# Returns ping time in seconds (up), False (down), or None (error). -def serverUp(info): - try: - sock = socket.socket(info[0], info[1], info[2]) - sock.settimeout(3) - sock.connect(info[4]) - # send packet of type ORIGINAL, with no data - # this should prompt the server to assign us a peer id - # [0] u32 protocol_id (PROTOCOL_ID) - # [4] session_t sender_peer_id (PEER_ID_INEXISTENT) - # [6] u8 channel - # [7] u8 type (PACKET_TYPE_ORIGINAL) - buf = b"\x4f\x45\x74\x03\x00\x00\x00\x01" - sock.send(buf) - start = time.time() - # receive reliable packet of type CONTROL, subtype SET_PEER_ID, - # with our assigned peer id as data - # [0] u32 protocol_id (PROTOCOL_ID) - # [4] session_t sender_peer_id - # [6] u8 channel - # [7] u8 type (PACKET_TYPE_RELIABLE) - # [8] u16 seqnum - # [10] u8 type (PACKET_TYPE_CONTROL) - # [11] u8 controltype (CONTROLTYPE_SET_PEER_ID) - # [12] session_t peer_id_new - data = sock.recv(1024) - end = time.time() - if not data: - return False - peer_id = data[12:14] - # send packet of type CONTROL, subtype DISCO, - # to cleanly close our server connection - # [0] u32 protocol_id (PROTOCOL_ID) - # [4] session_t sender_peer_id - # [6] u8 channel - # [7] u8 type (PACKET_TYPE_CONTROL) - # [8] u8 controltype (CONTROLTYPE_DISCO) - buf = b"\x4f\x45\x74\x03" + peer_id + b"\x00\x00\x03" - sock.send(buf) - sock.close() - return end - start - except socket.timeout: - return False - except: - return None - - -# fieldName: (Required, Type, SubType) -fields = { - "action": (True, "str"), - - "address": (False, "str"), - "port": (False, "int"), - - "clients": (True, "int"), - "clients_max": (True, "int"), - "uptime": (True, "int"), - "game_time": (True, "int"), - "lag": (False, "float"), - - "clients_list": (False, "list", "str"), - "mods": (False, "list", "str"), - - "version": (True, "str"), - "proto_min": (True, "int"), - "proto_max": (True, "int"), - - "gameid": (True, "str"), - "mapgen": (False, "str"), - "url": (False, "str"), - "privs": (False, "str"), - "name": (True, "str"), - "description": (True, "str"), - - # Flags - "creative": (False, "bool"), - "dedicated": (False, "bool"), - "damage": (False, "bool"), - "liquid_finite": (False, "bool"), - "pvp": (False, "bool"), - "password": (False, "bool"), - "rollback": (False, "bool"), - "can_see_far_names": (False, "bool"), -} - -def checkRequest(server): - for name, data in fields.items(): - if not name in server: - if data[0]: return False - else: continue - #### Compatibility code #### - # Accept strings in boolean fields but convert it to a - # boolean, because old servers sent some booleans as strings. - if data[1] == "bool" and type(server[name]).__name__ == "str": - server[name] = True if server[name].lower() in ("true", "1") else False - continue - # Accept strings in integer fields but convert it to an - # integer, for interoperability with e.g. minetest.write_json. - if data[1] == "int" and type(server[name]).__name__ == "str": - server[name] = int(server[name]) - continue - #### End compatibility code #### - if type(server[name]).__name__ != data[1]: - return False - if len(data) >= 3: - for item in server[name]: - if type(item).__name__ != data[2]: - return False - return True - - -def finishRequestAsync(server): - th = Thread(name = "ServerListThread", - target = asyncFinishThread, - args = (server,)) - th.start() - - -def asyncFinishThread(server): - checkAddress = False - if not "address" in server or not server["address"]: - server["address"] = server["ip"] - else: - checkAddress = True - - try: - info = socket.getaddrinfo(server["address"], - server["port"], - type=socket.SOCK_DGRAM, - proto=socket.SOL_UDP) - except socket.gaierror: - app.logger.warning("Unable to get address info for %s." % (server["address"],)) - return - - if checkAddress: - addresses = set(data[4][0] for data in info) - if not server["ip"] in addresses: - app.logger.warning("Invalid IP %s for address %s (address valid for %s)." - % (server["ip"], server["address"], addresses)) - return - - geo = geoip_lookup_continent(info[-1][4][0]) - if geo: - server["geo_continent"] = geo - - server["ping"] = serverUp(info[0]) - if not server["ping"]: - app.logger.warning("Server %s:%d has no ping." - % (server["address"], server["port"])) - return - - del server["action"] - - serverList.update(server) - - -class UptimeTracker: - def __init__(self): - self.d = {} - self.cleanTime = 0 - self.lock = RLock() - def push(self, id, ts): - with self.lock: - if time.time() >= self.cleanTime: # clear once in a while - self.d.clear() - self.cleanTime = time.time() + 48*60*60 - - if id in self.d: - self.d[id] = self.d[id][-1:] + [ts] - else: - self.d[id] = [0, ts] - # returns the before-last start time, in bulk - def getStartTimes(self): - ret = {} - with self.lock: - for k, v in self.d.items(): - ret[k] = v[0] - return ret - -class ServerList: - def __init__(self): - self.list = [] - self.maxServers = 0 - self.maxClients = 0 - self.lock = RLock() - self.load() - self.purgeOld() - - def getWithIndex(self, ip, port): - with self.lock: - for i, server in enumerate(self.list): - if server["ip"] == ip and server["port"] == port: - return (i, server) - return (None, None) - - def get(self, ip, port): - i, server = self.getWithIndex(ip, port) - return server - - def remove(self, server): - with self.lock: - try: - self.list.remove(server) - except: - pass - - def sort(self): - start_times = tracker.getStartTimes() - - def server_points(server): - points = 0 - - # 1 per client - if "clients_list" in server: - points += len(server["clients_list"]) - else: - # Old server (1/4 per client) - points = server["clients"] / 4 - - # Penalize highly loaded servers to improve player distribution. - # Note: This doesn't just make more than 80% of max players stop - # increasing your points, it can actually reduce your points - # if you have guests. - cap = int(server["clients_max"] * 0.80) - if server["clients"] > cap: - points -= server["clients"] - cap - - # 1 per month of age, limited to 8 - points += min(8, server["game_time"] / (60*60*24*30)) - - # 1/2 per average client, limited to 4 - points += min(4, server["pop_v"] / 2) - - # -8 for unrealistic max_clients - if server["clients_max"] > 200: - points -= 8 - - # -8 per second of ping over 0.4s - if server["ping"] > 0.4: - points -= (server["ping"] - 0.4) * 8 - - # Up to -8 for less than an hour of uptime (penalty linearly decreasing) - # only if the server has restarted before within the last 2 hours - HOUR_SECS = 60 * 60 - uptime = server["uptime"] - if uptime < HOUR_SECS: - start_time = start_times.get("%s:%d" % (server["ip"], server["port"]), 0) - if start_time >= time.time() - 2 * HOUR_SECS: - points -= ((HOUR_SECS - uptime) / HOUR_SECS) * 8 - - # reduction to 40% for servers that support both legacy (v4) and v5 clients - if server["proto_min"] <= 32 and server["proto_max"] > 36: - points *= 0.4 - - return points - - with self.lock: - self.list.sort(key=server_points, reverse=True) - - def purgeOld(self): - cutoff = int(time.time()) - app.config["PURGE_TIME"] - with self.lock: - count = len(self.list) - self.list = [server for server in self.list if cutoff <= server["update_time"]] - if len(self.list) < count: - self.save() - - def load(self): - with self.lock: - try: - with open(os.path.join(app.static_folder, "list.json"), "r") as fd: - data = json.load(fd) - except FileNotFoundError: - return - - if not data: - return - - self.list = data["list"] - self.maxServers = data["total_max"]["servers"] - self.maxClients = data["total_max"]["clients"] - - def save(self): - with self.lock: - servers = len(self.list) - clients = 0 - for server in self.list: - clients += server["clients"] - - self.maxServers = max(servers, self.maxServers) - self.maxClients = max(clients, self.maxClients) - - list_path = os.path.join(app.static_folder, "list.json") - with open(list_path + "~", "w") as fd: - json.dump({ - "total": {"servers": servers, "clients": clients}, - "total_max": {"servers": self.maxServers, "clients": self.maxClients}, - "list": self.list - }, - fd, - indent = "\t" if app.config["DEBUG"] else None, - separators = (', ', ': ') if app.config["DEBUG"] else (',', ':') - ) - os.replace(list_path + "~", list_path) - - def update(self, server): - with self.lock: - i, old = self.getWithIndex(server["ip"], server["port"]) - if i is not None: - self.list[i] = server - else: - self.list.append(server) - - self.sort() - self.save() - -class PurgeThread(Thread): - def __init__(self): - Thread.__init__(self) - self.daemon = True - def run(self): - while True: - time.sleep(60) - serverList.purgeOld() - -# Globals / Startup - -tracker = UptimeTracker() - -serverList = ServerList() - -PurgeThread().start() - -if __name__ == "__main__": - app.run(host = app.config["HOST"], port = app.config["PORT"]) diff --git a/server_list/__init__.py b/server_list/__init__.py new file mode 100644 index 0000000..8bcd3dd --- /dev/null +++ b/server_list/__init__.py @@ -0,0 +1,2 @@ +from .app import app, celery +from . import commands, tasks, views diff --git a/server_list/app.py b/server_list/app.py new file mode 100755 index 0000000..fec0de1 --- /dev/null +++ b/server_list/app.py @@ -0,0 +1,35 @@ +import os + +from celery import Celery + +from flask import Flask +from flask_sqlalchemy import SQLAlchemy +from flask_migrate import Migrate + + +app = Flask(__name__, static_url_path="") + +# Load defaults +app.config.from_pyfile("config.py") + +# Load configuration +if os.path.isfile(os.path.join(app.root_path, "..", "config.py")): + app.config.from_pyfile("../config.py") + +app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False + +db = SQLAlchemy(app) +migrate = Migrate(app, db) + +celery = Celery( + app.import_name, + broker=app.config['CELERY_BROKER_URL'] +) +celery.conf.update(app.config) + +class ContextTask(celery.Task): + def __call__(self, *args, **kwargs): + with app.app_context(): + return self.run(*args, **kwargs) + +celery.Task = ContextTask diff --git a/server_list/commands.py b/server_list/commands.py new file mode 100644 index 0000000..251ea6f --- /dev/null +++ b/server_list/commands.py @@ -0,0 +1,35 @@ +import json + +import click + +from .app import app, db +from .models import Server, Stats + + +@app.cli.command("load-json") +@click.argument("filename") +@click.option("--update") +def load_json(filename, update): + """Load the SQL database with servers from a JSON server list. + """ + with open(filename, "r") as fd: + data = json.load(fd) + assert data + + for obj in data["list"]: + if update: + obj.setdefault("address", obj["ip"]) + Server.create_or_update(obj) + else: + server = Server() + server.update(obj, True) + db.session.add(server) + + stats = Stats.get() + stats.max_servers = data["total_max"]["servers"] + stats.max_clients = data["total_max"]["clients"] + db.session.add(stats) + + db.session.commit() + + click.echo(click.style(f'Loaded {len(data["list"])} servers', fg="green")) diff --git a/server_list/config.py b/server_list/config.py new file mode 100644 index 0000000..97c0ed5 --- /dev/null +++ b/server_list/config.py @@ -0,0 +1,45 @@ +from datetime import timedelta +from glob import glob + +# Enables detailed tracebacks and an interactive Python console on errors. +# Never use in production! +DEBUG = False + +# Amount of time, in seconds, after which servers are removed from the list +# if they haven't updated their listings. Note: By default Minetest servers +# only announce once every 5 minutes, so this should be more than 300. +PURGE_TIME = timedelta(minutes=6) + +# List of banned IP addresses for announce +# e.g. ['2620:101::44'] +BANNED_IPS = [] + +# List of banned servers as host/port pairs +# e.g. ['1.2.3.4/30000', 'lowercase.hostname', 'lowercase.hostname/30001'] +BANNED_SERVERS = [] + +# Creates server entries if a server sends an 'update' and there is no entry yet. +# This should only be used to populate the server list after list.json was deleted. +# This WILL cause problems such as mapgen, mods and privilege information missing from the list +ALLOW_UPDATE_WITHOUT_OLD = False + +# Database to use to store persistent server information +SQLALCHEMY_DATABASE_URI = "sqlite:///server_list.sqlite" + +# How strongly past player counts are weighted into the popularity +# over the current player count. +POPULARITY_FACTOR = 0.9 + +# Message broker to forward messages from web server to worker threads +# Redis and RabbitMQ are good options. +#CELERY_BROKER_URL = "redis://localhost/0" + +# Maximum number of clients before a server will be considered heavily loaded +# and down-weighted to improve player distribution. +CLIENT_LIMIT = 32 + +# MaxMind GeoIP database. +# You can download a copy from https://db-ip.com/db/download/ip-to-country-lite +mmdbs = glob("dbip-country-lite-*.mmdb") +if mmdbs: + MAXMIND_DB = mmdbs[0] diff --git a/server_list/models.py b/server_list/models.py new file mode 100644 index 0000000..cb34fb6 --- /dev/null +++ b/server_list/models.py @@ -0,0 +1,266 @@ +from datetime import datetime + +from sqlalchemy.orm.exc import NoResultFound + +from .app import app, db + + +class Server(db.Model): + __table_args__ = (db.Index("ix_server_address_port", "address", "port", unique=True),) + + id = db.Column(db.Integer, primary_key=True) + + # World-specific UUID used to identify the server. + # This is kept secret to prevent anyone from spoofing the server. + world_uuid = db.Column(db.String(36), nullable=True, index=True, unique=True) + + # Whether the server is currently online + online = db.Column(db.Boolean, index=True, nullable=False, default=True) + + # Server sent connection address + address = db.Column(db.String, nullable=False) + port = db.Column(db.Integer, nullable=False, default=30000) + + # IP address announcement was received from + announce_ip = db.Column(db.String, nullable=False) + + # Name of server software. E.g. "minetest" + server_id = db.Column(db.String, nullable=True) + + # List of player names, one per line + clients = db.Column(db.String, nullable=True) + + # Highest number of clients ever seen + clients_top = db.Column(db.Integer, nullable=False) + + # Maximum number of allowed clients + clients_max = db.Column(db.Integer, nullable=False) + + # First time that we received an announcement from this server + first_seen = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + + # Time that server sent "start" announcement. + # This can be used to calculate the current uptime. + start_time = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + + # Time of most recent update request + last_update = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + + # Amount of time that we've seen the server up for, in seconds + total_uptime = db.Column(db.Float, nullable=False) + + # Most recent time that the server went down + down_time = db.Column(db.DateTime, nullable=True) + + # Server sent value for age of world. + # Should nearly match uptime on a server that always announces. + game_time = db.Column(db.Integer, nullable=False) + + # Server sent value based on sever loop timing + lag = db.Column(db.Float, nullable=True) + + # Ping time in seconds + ping = db.Column(db.Float, nullable=False) + + # List of enabled mods, one per line + mods = db.Column(db.String, nullable=True) + + # Server release version + version = db.Column(db.String, nullable=False) + + # Supported protocol versions + proto_min = db.Column(db.Integer, nullable=False) + proto_max = db.Column(db.Integer, nullable=False) + + game_id = db.Column(db.String, nullable=False) + + # Mapgen name + mapgen = db.Column(db.String, nullable=True) + + # Server landing page URL + url = db.Column(db.String, nullable=True) + + # Privileges granted to new players by default + default_privs = db.Column(db.String, nullable=True) + + name = db.Column(db.String, nullable=False) + + description = db.Column(db.String, nullable=False) + + # Roughly the average number of players on the server + popularity = db.Column(db.Float, nullable=False) + + # Continent determined from IP + geo_continent = db.Column(db.String(2), nullable=True) + + # Flags + creative = db.Column(db.Boolean, nullable=False) + is_dedicated = db.Column(db.Boolean, nullable=False) + damage_enabled = db.Column(db.Boolean, nullable=False) + pvp_enabled = db.Column(db.Boolean, nullable=False) + password_required = db.Column(db.Boolean, nullable=False) + rollback_enabled = db.Column(db.Boolean, nullable=False) + can_see_far_names = db.Column(db.Boolean, nullable=False) + + @staticmethod + def find_from_json(obj): + try: + if "world_uuid" in obj: + return Server.query.filter_by(world_uuid=obj["world_uuid"]).one() + return Server.query.filter_by(address=obj["address"], port=obj["port"]).one() + except NoResultFound: + return None + + @staticmethod + def create_or_update(obj): + server = Server.find_from_json(obj) + if server is not None: + server.update(obj) + else: + server = Server() + server.update(obj, True) + db.session.add(server) + return server + + def update(self, obj, initial=False): + now = datetime.now() + action = obj.get("action", "start") + + assert action != "delete" + + if "clients_list" in obj: + num_clients = len(obj["clients_list"]) + else: + num_clients = obj["clients"] + + if initial: + # Values set only when the server is first created + assert action == "start" + self.world_uuid = obj.get("world_uuid") + self.clients_top = num_clients + self.total_uptime = 0 + else: + self.clients_top = max(self.clients_top, num_clients) + + if action == "start": + # Fields updated only on startup + self.start_time = now + self.mods = "\n".join(obj.get("mods", [])) + self.mapgen = obj.get("mapgen") + self.default_privs = obj.get("privs") + self.is_dedicated = obj.get("dedicated", False) + self.rollback_enabled = obj.get("rollback", False) + self.can_see_far_names = obj.get("can_see_far_names", False) + + self.online = True + + self.address = obj["address"] + self.port = obj.get("port", 30000) + + self.announce_ip = obj["ip"] + + self.server_id = obj.get("server_id") + + self.clients = "\n".join(obj["clients_list"]) + self.clients_max = obj["clients_max"] + + self.game_time = obj["game_time"] + + self.lag = obj.get("lag") + self.ping = obj["ping"] + + self.version = obj["version"] + self.proto_min = obj["proto_min"] + self.proto_max = obj["proto_max"] + + self.game_id = obj["gameid"] + self.url = obj.get("url") + self.name = obj["name"] + self.description = obj["description"] + + if initial: + self.popularity = num_clients + else: + pop_factor = app.config["POPULARITY_FACTOR"] + self.popularity = self.popularity * pop_factor + \ + num_clients * (1 - pop_factor) + + self.geo_continent = obj.get("geo_continent") + + self.creative = obj.get("creative", False) + self.damage_enabled = obj.get("damage", False) + self.pvp_enabled = obj.get("pvp", False) + self.password_required = obj.get("password", False) + + self.last_update = now + + def as_json(self): + obj = { + "address": self.address, + "can_see_far_names": self.can_see_far_names, + "clients_max": self.clients_max, + "clients_top": self.clients_top, + "creative": self.creative, + "damage": self.damage_enabled, + "dedicated": self.is_dedicated, + "description": self.description, + "game_time": self.game_time, + "gameid": self.game_id, + "name": self.name, + "password": self.password_required, + "ping": self.ping, + "pop_v": self.popularity, + "port": self.port, + "proto_max": self.proto_max, + "proto_min": self.proto_min, + "pvp": self.pvp_enabled, + "rollback": self.rollback_enabled, + "uptime": (datetime.utcnow() - self.start_time).total_seconds(), + "version": self.version, + } + + # Optional fields + if self.clients is not None: + obj["clients_list"] = self.clients.split("\n") if self.clients else [] + if self.geo_continent is not None: + obj["geo_continent"] = self.geo_continent + if self.lag is not None: + obj["lag"] = self.lag + if self.mapgen is not None: + obj["mapgen"] = self.mapgen + if self.mods is not None: + obj["mods"] = self.mods.split("\n") if self.mods else [] + if self.default_privs is not None: + obj["privs"] = self.default_privs + if self.server_id is not None: + obj["server_id"] = self.server_id + if self.url is not None: + obj["url"] = self.url + + return obj + + def set_offline(self): + now = datetime.utcnow() + self.online = False + self.total_uptime += (now - self.start_time).total_seconds() + self.down_time = now + + +class Stats(db.Model): + """ + This table has only a single row storing all of the global statistics. + """ + id = db.Column(db.Integer, primary_key=True) + + max_servers = db.Column(db.Integer, nullable=False, default=0) + max_clients = db.Column(db.Integer, nullable=False, default=0) + + @staticmethod + def get(): + try: + return Stats.query.filter_by(id=1).one() + except NoResultFound: + stats = Stats() + stats.id = 1 + db.session.add(stats) + return stats diff --git a/server_list/ping.py b/server_list/ping.py new file mode 100644 index 0000000..a6b8d30 --- /dev/null +++ b/server_list/ping.py @@ -0,0 +1,175 @@ +import asyncio +import time +import random +import socket + +from .app import app +from .util import get_addr_info + + +class MinetestProtocol: + def connection_made(self, transport): + self.transport = transport + + def send_original(self): + # Send packet of type ORIGINAL, with no data. + # This should prompt the server to assign us a peer id. + # [0] u32 protocol_id (PROTOCOL_ID) + # [4] session_t sender_peer_id (PEER_ID_INEXISTENT) + # [6] u8 channel + # [7] u8 type (PACKET_TYPE_ORIGINAL) + self.transport.sendto(b"\x4f\x45\x74\x03\x00\x00\x00\x01") + + self.start = time.time() + + def datagram_received(self, data, addr): + end = time.time() + + # [0] u32 protocol_id (PROTOCOL_ID) + # [4] session_t sender_peer_id + # [6] u8 channel + # [7] u8 type (PACKET_TYPE_RELIABLE) + # [8] u16 sequence number + # [10] u8 type (PACKET_TYPE_CONTROL) + # [11] u8 controltype (CONTROLTYPE_SET_PEER_ID) + # [12] session_t peer_id_new + peer_id = data[12:14] + + # Send packet of type CONTROL, subtype DISCO, + # to cleanly close our server connection. + # [0] u32 protocol_id (PROTOCOL_ID) + # [4] session_t sender_peer_id + # [6] u8 channel + # [7] u8 type (PACKET_TYPE_CONTROL) + # [8] u8 controltype (CONTROLTYPE_DISCO) + self.transport.sendto(b"\x4f\x45\x74\x03" + peer_id + b"\x00\x00\x03", addr) + + self.future.set_result(end - self.start) + self.transport.close() + + def connection_lost(self, exc): + if not self.future.done(): + self.future.set_result(None) + + def error_received(self, exc): + self.future.set_result(None) + + +async def ping_server_async(address, sock=None): + loop = asyncio.get_event_loop() + future = loop.create_future() + transport, protocol = await loop.create_datagram_endpoint( + MinetestProtocol, + remote_addr=address, + sock=sock) + attempts = 0 + pings = [] + while len(pings) < 3 and attempts - len(pings) < 3: + attempts += 1 + protocol.future = future + try: + # Sleep a bit to spread requests out + await asyncio.sleep(random.random()) + protocol.send_original() + ping = await asyncio.wait_for(asyncio.shield(future), 2) + if ping is not None: + pings.append(ping) + future = loop.create_future() + except asyncio.TimeoutError: + pass + + if len(pings) != 0: + return min(pings) + + return None + + +async def ping_servers_async(addresses): + return await asyncio.gather(*[ping_server(a) for a in addresses]) + + +def ping_server_addresses(address, port): + pings = [] + addr_info = get_addr_info(address, port) + for record in addr_info: + ping = server_up(record) + if not ping: + app.logger.warning("Could not connect to %s:%d using resolved info %r.", + address, port, record) + return None + pings.append(ping) + return pings + + +def ping_server(sock): + # Send packet of type ORIGINAL, with no data. + # This should prompt the server to assign us a peer id. + # [0] u32 protocol_id (PROTOCOL_ID) + # [4] session_t sender_peer_id (PEER_ID_INEXISTENT) + # [6] u8 channel + # [7] u8 type (PACKET_TYPE_ORIGINAL) + sock.send(b"\x4f\x45\x74\x03\x00\x00\x00\x01") + + # Receive reliable packet of type CONTROL, subtype SET_PEER_ID, + # with our assigned peer id as data. + start = time.time() + data = sock.recv(1024) + end = time.time() + + if not data: + return None + + # [0] u32 protocol_id (PROTOCOL_ID) + # [4] session_t sender_peer_id + # [6] u8 channel + # [7] u8 type (PACKET_TYPE_RELIABLE) + # [8] u16 sequence number + # [10] u8 type (PACKET_TYPE_CONTROL) + # [11] u8 controltype (CONTROLTYPE_SET_PEER_ID) + # [12] session_t peer_id_new + peer_id = data[12:14] + + # Send packet of type CONTROL, subtype DISCO, + # to cleanly close our server connection. + # [0] u32 protocol_id (PROTOCOL_ID) + # [4] session_t sender_peer_id + # [6] u8 channel + # [7] u8 type (PACKET_TYPE_CONTROL) + # [8] u8 controltype (CONTROLTYPE_DISCO) + sock.send(b"\x4f\x45\x74\x03" + peer_id + b"\x00\x00\x03") + + return end - start + + +# Returns ping time in seconds (up) or None (down). +def server_up(info): + """Pings a Minetest server to check if it is online. + """ + try: + sock = socket.socket(info[0], info[1], info[2]) + sock.settimeout(2) + sock.connect(info[4]) + except OSError: + return None + + attempts = 0 + pings = [] + while len(pings) < 3 and attempts - len(pings) < 3: + attempts += 1 + try: + ping = ping_server(sock) + if ping is not None: + pings.append(ping) + except socket.timeout: + pass + except ConnectionRefusedError: + return None + except OSError: + return None + + sock.close() + + if len(pings) != 0: + return min(pings) + + return None diff --git a/static/index.html b/server_list/static/index.html similarity index 100% rename from static/index.html rename to server_list/static/index.html diff --git a/static/list.js b/server_list/static/list.js similarity index 100% rename from static/list.js rename to server_list/static/list.js diff --git a/static/servers.jst b/server_list/static/servers.jst similarity index 92% rename from static/servers.jst rename to server_list/static/servers.jst index 0b4bd10..483d1fa 100644 --- a/static/servers.jst +++ b/server_list/static/servers.jst @@ -26,15 +26,15 @@ {{~it.list :server:index}} {{ if (master.limit && index + 1 > master.limit) break;}} - {{ if (master.min_clients && server.clients < master.min_clients) continue;}} + {{ if (master.min_clients && server.clients_list.length < master.min_clients) continue;}} {{? !master.no_address}} {{=addressString(server)}} {{?}} {{? !master.no_clients}} - - {{=constantWidth(server.clients + '/' + server.clients_max, 3.4)}} + + {{=constantWidth(server.clients_list.length + '/' + server.clients_max, 3.4)}} {{? !master.no_avgtop}} {{=constantWidth(Math.floor(server.pop_v) + '/' + server.clients_top, 3.4)}}{{?}} {{=hoverList("Clients", server.clients_list)}} {{?}} diff --git a/static/style.css b/server_list/static/style.css similarity index 100% rename from static/style.css rename to server_list/static/style.css diff --git a/server_list/tasks.py b/server_list/tasks.py new file mode 100644 index 0000000..afd52ed --- /dev/null +++ b/server_list/tasks.py @@ -0,0 +1,100 @@ +import asyncio +import json +import os +from datetime import datetime + +from .app import app, celery, db +from .models import Server, Stats +from .ping import ping_servers_async, ping_server_addresses +from .util import get_geo_continent, server_ranking + + +@celery.task +def update_server(obj): + geo_continent = get_geo_continent(obj["addr_info"][-1][4][0]) + if geo_continent is not None: + obj["geo_continent"] = geo_continent + + # Ensure that a Minetest server is actually reachable on all addresses + pings = ping_server_addresses(obj["address"], obj["port"]) + + if pings is None: + return + + # Use average ping + obj["ping"] = sum(pings) / len(pings) + + Server.create_or_update(obj) + + db.session.commit() + + +def update_ping(): + servers = Server.query.filter_by(online=True).all() + + addresses = [(s.address, s.port) for s in servers] + pings = [] + + async def do_ping(): + pings.extend(await ping_servers_async(addresses)) + asyncio.run(do_ping()) + + for i, server in enumerate(servers): + if pings[i] is None: + server.set_offline() + else: + server.ping = pings[i] + + +def update_list_json(): + online_servers = Server.query.filter_by(online=True).all() + online_servers.sort(key=server_ranking, reverse=True) + server_list = [s.as_json() for s in online_servers] + + num_clients = 0 + for server in server_list: + num_clients += len(server["clients_list"]) + + stats = Stats.get() + stats.max_servers = max(len(server_list), stats.max_servers) + stats.max_clients = max(num_clients, stats.max_clients) + + list_path = os.path.join(app.static_folder, "list.json") + # Write to temporary file, then do an atomic replace so that clients don't + # see a truncated file if they load the list just as it's being updated. + with open(list_path + "~", "w") as fd: + debug = app.config["DEBUG"] + json.dump({ + "total": {"servers": len(server_list), "clients": num_clients}, + "total_max": {"servers": stats.max_servers, "clients": stats.max_clients}, + "list": server_list, + }, + fd, + indent="\t" if debug else None, + separators=(',', ': ') if debug else (',', ':') + ) + os.replace(list_path + "~", list_path) + + +@celery.task +def maintenance(): + cutoff = datetime.utcnow() - app.config["PURGE_TIME"] + Server.query.filter( + Server.online == True, + Server.last_update < cutoff + ).update({ + "online": False, + "total_uptime": Server.total_uptime + (Server.last_update - Server.start_time).total_seconds(), + "down_time": datetime.utcnow(), + }) + + update_ping() + + update_list_json() + + db.session.commit() + + +@celery.on_after_configure.connect +def setup_periodic_tasks(sender, **kwargs): + sender.add_periodic_task(60, maintenance.s(), name='Server list maintenance') diff --git a/server_list/templates/address_verification_failed.txt b/server_list/templates/address_verification_failed.txt new file mode 100644 index 0000000..e1e1a04 --- /dev/null +++ b/server_list/templates/address_verification_failed.txt @@ -0,0 +1,10 @@ +Server address does not include a DNS record for the IP that the announcement was sent from. +Announce IP: {{ announce_ip }} +Address records: {{ valid_addresses | join ", " }} + +Help: This is usually because your server is only listening on IPv4 but your announcement is being sent over IPv6. +There are two ways to fix this: + 1. (preferred) Set bind_address = :: in your server config to listen on IPv6 and add your IPv6 address to DNS as an AAAA record. + This allows both IPv4 and IPv6 because the linux kernel will forward IPv4 requests to the IPv6 socket by default using an IPv4-mapped IPv6 address (look up IPV6_V6ONLY for more information). + On other operating systems this option may not work and you'll have to use the second option. + 2. Set bind_address = 0.0.0.0 in your server config to force IPv4 only, the announce will then be sent from the IPv4 address. diff --git a/server_list/util.py b/server_list/util.py new file mode 100755 index 0000000..67aa08c --- /dev/null +++ b/server_list/util.py @@ -0,0 +1,199 @@ +import re +import socket +import time + +from datetime import datetime, timedelta + +from .app import app + +try: + import maxminddb + MAXMIND_DB = app.config.get("MAXMIND_DB", None) + if MAXMIND_DB is not None: + geoip_reader = maxminddb.open_database(MAXMIND_DB, maxminddb.MODE_AUTO) + else: + app.logger.warning( + "For working GeoIP download the database from " + "https://db-ip.com/db/download/ip-to-country-lite and point " + "the MAXMIND_DB setting to the .mmdb file." + ) + geoip_reader = None +except ImportError: + app.logger.warning("maxminddb not available, GeoIP will not work.") + + +UUID_RE = re.compile('^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$') + + +def check_ban(announce_ip, address, port): + if "%s/%d" % (announce_ip, port) in app.config["BANNED_SERVERS"]: + return True + + if address != announce_ip: + # Normalize address for ban checks + address = address.lower().rstrip(".") + if f"{address}/{port}" in app.config["BANNED_SERVERS"] or \ + address in app.config["BANNED_SERVERS"]: + return True + + return False + + +def get_addr_info(address, port): + try: + return socket.getaddrinfo( + address, + port, + type=socket.SOCK_DGRAM, + proto=socket.SOL_UDP) + except socket.gaierror: + app.logger.warning("Unable to get address info for [%s]:%d.", + address, port) + return None + + +def verify_announce(addr_info, address, announce_ip): + if address == announce_ip: + return True + + addresses = set(data[4][0] for data in addr_info) + if not announce_ip in addresses: + app.logger.warning( + "Server address %r does not resolve to announce IP %r (address valid for %r).", + address, announce_ip, addresses) + return False + + return True + + +def get_geo_continent(ip): + if ip.startswith("::ffff:"): + ip = ip[7:] + + if reader is None: + return + + try: + geo = geoip_reader.get(ip) + except ValueError: + return + + if geo and "continent" in geo: + return geo["continent"]["code"] + else: + app.logger.warning("Unable to get GeoIP Continent data for %s.", ip) + return None + + +# fieldName: (Required, Type, SubType) +fields = { + "action": (True, "str"), + + "world_uuid": (False, "str"), + + "address": (False, "str"), + "port": (False, "int"), + + "clients_max": (True, "int"), + "uptime": (True, "int"), + "game_time": (True, "int"), + "lag": (False, "float"), + + "clients_list": (True, "list", "str"), + "mods": (False, "list", "str"), + + "version": (True, "str"), + "proto_min": (True, "int"), + "proto_max": (True, "int"), + + "gameid": (True, "str"), + "mapgen": (False, "str"), + "url": (False, "str"), + "privs": (False, "str"), + "name": (True, "str"), + "description": (True, "str"), + + # Flags + "creative": (False, "bool"), + "dedicated": (False, "bool"), + "damage": (False, "bool"), + "pvp": (False, "bool"), + "password": (False, "bool"), + "rollback": (False, "bool"), + "can_see_far_names": (False, "bool"), +} + +def check_request_json(obj): + """Checks the types and values of fields in the request. + + Returns error string or None. + """ + for name, data in fields.items(): + # Delete optional string fields sent as empty strings + if not data[0] and data[1] == "str" and obj.get(name) == "": + del obj[name] + + if not name in obj: + if data[0]: + return f"Required field '{name}' is missing." + continue + + type_str = type(obj[name]).__name__ + if type_str != data[1]: + return f"Field '{name}'' has incorrect type (expected {data[1]} found {type_str})." + + + if len(data) >= 3: + for item in obj[name]: + subtype_str = type(item).__name__ + if subtype_str != data[2]: + return f"Entry in field '{name}' has incorrect type (expected {data[2]} found {subtype_str})." + + if "url" in obj: + url = obj["url"] + if not any(url.startswith(p) for p in ["http://", "https://", "//"]): + return "Field 'url' does not match expected format." + + if "world_uuid" in obj and not UUID_RE.match(obj["world_uuid"]): + return "Field 'world_uuid' does not match expected format." + + return None + + +def server_ranking(server): + now = datetime.utcnow() + points = 0 + + clients = server.clients.split('\n') + + # 1 per client, capped to CLIENT_LIMIT or clients_max * 0.9 + cap = min(server.clients_max * 0.9, app.config["CLIENT_LIMIT"]) + points += min(len(clients), cap) + + # 1/2 per week of age, limited to 8 + points += min(8, (now - server.first_seen) / timedelta(weeks=2)) + + # 1/2 per average client, limited to 4 + points += min(4, server.popularity / 2) + + # -8 per second of ping over 0.3s + if server.ping > 0.3: + points -= (server.ping - 0.3) * 8 + + # Up to -4 for less than an hour of uptime (penalty linearly decreasing) + ONE_HOUR = timedelta(hours=1) + uptime = now - server.start_time + if uptime < ONE_HOUR: + # Only apply penalty if the server was down for more than an hour + down_too_long = True + if server.down_time is not None: + down_too_long = (server.start_time - server.down_time) > ONE_HOUR + + if down_too_long: + points -= ((ONE_HOUR - uptime) / ONE_HOUR) * 4 + + # Reduction to 40% for servers that support both legacy (v4) and v5 clients + if server.proto_min <= 32 and server.proto_max > 36: + points *= 0.4 + + return points diff --git a/server_list/views.py b/server_list/views.py new file mode 100755 index 0000000..2d38d85 --- /dev/null +++ b/server_list/views.py @@ -0,0 +1,107 @@ +import json + +from flask import render_template, request, send_from_directory, make_response + +from .app import app, db +from .models import Server +from .tasks import update_server +from .util import check_ban, check_request_json, get_addr_info, get_geo_continent, verify_announce + + +@app.route("/") +def index(): + return app.send_static_file("index.html") + + +@app.route("/list") +def server_list(): + # We have to make sure that the list isn't cached, + # since the list isn't really static. + return send_from_directory(app.static_folder, "list.json", max_age=0) + + +@app.route("/geoip") +def geoip(): + continent = get_geo_continent(request.remote_addr) + + resp = make_response({ + "continent": continent, # null on error + }) + resp.cache_control.max_age = 7 * 86400 + resp.cache_control.private = True + + return resp + + +@app.route("/announce", methods=["GET", "POST"]) +def announce(): + announce_ip = request.remote_addr + if announce_ip.startswith("::ffff:"): + announce_ip = announce_ip[7:] + + if announce_ip in app.config["BANNED_IPS"]: + return "Banned.", 403 + + data = request.values["json"] + + if len(data) > 8192: + return "JSON data is too big.", 413 + + try: + obj = json.loads(data) + except json.JSONDecodeError as e: + return "Failed to decode JSON: " + e.msg, 400 + + if not isinstance(obj, dict): + return "JSON data is not an object.", 400 + + action = obj.get("action") + if action not in ("start", "update", "delete"): + return "Action field is invalid or missing.", 400 + + obj["ip"] = announce_ip + if not obj.get("address"): + obj["address"] = announce_ip + obj.setdefault("port", 30000) + + if check_ban(announce_ip, obj["address"], obj["port"]): + return "Banned", 403 + + server = Server.find_from_json(obj) + + if action == "delete": + if not server: + return "Server not found." + server.set_offline() + db.session.commit() + return "Removed from server list." + + # Delete message does not require most fields + error_str = check_request_json(obj) + if error_str is not None: + return "Invalid JSON data: " + error_str, 400 + + if action == "update" and not server: + if app.config["ALLOW_UPDATE_WITHOUT_OLD"]: + action = "start" + else: + return "Server to update not found.", 404 + + addr_info = get_addr_info(obj["address"], obj["port"]) + + if addr_info is None: + return f"Failed to resolve server address {obj['address']!r}.", 400 + + if "world_uuid" not in obj: + valid = verify_announce(addr_info, obj["address"], obj["ip"]) + + if not valid: + return render_template("address_verification_failed.txt", + announce_ip=announce_ip, + valid_addresses=[data[4][0] for data in addr_info]), 400 + + obj["addr_info"] = addr_info + + update_server.delay(obj) + + return "Done.", 202