🔍 robots and other optimizations
This commit is contained in:
17
app.py
17
app.py
@@ -239,6 +239,23 @@ def sitemap():
|
||||
return send_from_directory('static', 'sitemap.xml', mimetype='application/xml')
|
||||
|
||||
|
||||
@app.route('/robots.txt')
|
||||
def robots():
|
||||
"""Generate robots.txt dynamically with correct SITE_URL"""
|
||||
from flask import Response
|
||||
robots_txt = f"""# Sunday Comics - Robots.txt
|
||||
User-agent: *
|
||||
Allow: /
|
||||
|
||||
# Sitemap location
|
||||
Sitemap: {SITE_URL}/sitemap.xml
|
||||
|
||||
# Disallow API endpoints from indexing
|
||||
Disallow: /api/
|
||||
"""
|
||||
return Response(robots_txt, mimetype='text/plain')
|
||||
|
||||
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(e):
|
||||
"""404 error handler"""
|
||||
|
||||
Reference in New Issue
Block a user