Add robots.txt disallowing all scrapers
This commit is contained in:
@@ -7,7 +7,7 @@ from typing import Annotated
|
||||
|
||||
import starlette.status as status
|
||||
from fastapi import Depends, FastAPI, HTTPException, Request, Response
|
||||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.responses import RedirectResponse, FileResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from sqlalchemy import create_engine, select
|
||||
@@ -109,6 +109,11 @@ async def index(request: Request, session: SessionDep, user: UserDep):
|
||||
)
|
||||
|
||||
|
||||
@app.get("/robots.txt")
|
||||
async def robots_txt():
|
||||
return FileResponse("src/meal_manager/static/robots.txt", media_type="text/plain")
|
||||
|
||||
|
||||
@app.get("/past_events")
|
||||
async def past_events(request: Request, session: SessionDep):
|
||||
now = datetime.now()
|
||||
|
||||
Reference in New Issue
Block a user