Add robots.txt disallowing all scrapers
This commit is contained in:
@@ -7,7 +7,7 @@ from typing import Annotated
|
|||||||
|
|
||||||
import starlette.status as status
|
import starlette.status as status
|
||||||
from fastapi import Depends, FastAPI, HTTPException, Request, Response
|
from fastapi import Depends, FastAPI, HTTPException, Request, Response
|
||||||
from fastapi.responses import RedirectResponse
|
from fastapi.responses import RedirectResponse, FileResponse
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from fastapi.templating import Jinja2Templates
|
from fastapi.templating import Jinja2Templates
|
||||||
from sqlalchemy import create_engine, select
|
from sqlalchemy import create_engine, select
|
||||||
@@ -109,6 +109,11 @@ async def index(request: Request, session: SessionDep, user: UserDep):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/robots.txt")
|
||||||
|
async def robots_txt():
|
||||||
|
return FileResponse("src/meal_manager/static/robots.txt", media_type="text/plain")
|
||||||
|
|
||||||
|
|
||||||
@app.get("/past_events")
|
@app.get("/past_events")
|
||||||
async def past_events(request: Request, session: SessionDep):
|
async def past_events(request: Request, session: SessionDep):
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
|
|||||||
2
src/meal_manager/static/robots.txt
Normal file
2
src/meal_manager/static/robots.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
User-agent: *
|
||||||
|
Disallow: /
|
||||||
Reference in New Issue
Block a user