Add robots.txt disallowing all scrapers

This commit is contained in:
2025-10-16 10:35:48 +02:00
parent 4aa6ac97fd
commit 1df2ecbebf
2 changed files with 8 additions and 1 deletions

View File

@@ -7,7 +7,7 @@ from typing import Annotated
import starlette.status as status
from fastapi import Depends, FastAPI, HTTPException, Request, Response
from fastapi.responses import RedirectResponse
from fastapi.responses import RedirectResponse, FileResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from sqlalchemy import create_engine, select
@@ -109,6 +109,11 @@ async def index(request: Request, session: SessionDep, user: UserDep):
)
@app.get("/robots.txt")
async def robots_txt():
return FileResponse("src/meal_manager/static/robots.txt", media_type="text/plain")
@app.get("/past_events")
async def past_events(request: Request, session: SessionDep):
now = datetime.now()

View File

@@ -0,0 +1,2 @@
User-agent: *
Disallow: /