Added permissive robots.txt route

This commit is contained in:
Louis King
2026-02-06 12:09:36 +00:00
parent 927fcd6efb
commit 75d7e5bdfa

View File

@@ -7,7 +7,7 @@ from typing import AsyncGenerator
import httpx
from fastapi import FastAPI, Request
from fastapi.responses import HTMLResponse
from fastapi.responses import HTMLResponse, PlainTextResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from starlette.exceptions import HTTPException as StarletteHTTPException
@@ -152,6 +152,11 @@ def create_app(
except Exception as e:
return {"status": "not_ready", "api": str(e)}
@app.get("/robots.txt", response_class=PlainTextResponse)
async def robots_txt() -> str:
"""Serve robots.txt to control search engine crawling."""
return "User-agent: *\nAllow: /\n"
@app.exception_handler(StarletteHTTPException)
async def http_exception_handler(
request: Request, exc: StarletteHTTPException