WebHost: Add robots.txt to WebHost (#3157)

* Add a `robots.txt` file to prevent crawlers from scraping the site

* Added `ASSET_RIGHTS` entry to config.yaml to control whether `/robots.txt` is served or not

* Always import robots.py, determine config in route function

* Finish writing a comment

* Remove unnecessary redundant import and config
This commit is contained in:
Chris Wilson
2024-04-20 20:58:56 -04:00
committed by GitHub
parent 915ad61ecf
commit ad4451276d
5 changed files with 44 additions and 5 deletions

View File

@@ -23,7 +23,6 @@ def get_app():
from WebHostLib import register, cache, app as raw_app
from WebHostLib.models import db
register()
app = raw_app
if os.path.exists(configpath) and not app.config["TESTING"]:
import yaml
@@ -34,6 +33,7 @@ def get_app():
app.config["HOST_ADDRESS"] = Utils.get_public_ipv4()
logging.info(f"HOST_ADDRESS was set to {app.config['HOST_ADDRESS']}")
register()
cache.init_app(app)
db.bind(**app.config["PONY"])
db.generate_mapping(create_tables=True)