fix: disallow page search param properly in robots.txt

This commit is contained in:
trafficlunar 2025-06-11 21:49:24 +01:00
parent 737f808fec
commit a2fd114f7d

View file

@ -5,7 +5,7 @@ export default function robots(): MetadataRoute.Robots {
rules: {
userAgent: "*",
allow: "/",
disallow: ["/*?page", "/create-username", "/edit/*", "/profile/settings", "/random", "/submit", "/report/mii/*", "/report/user/*", "/admin"],
disallow: ["/*?*page=", "/create-username", "/edit/*", "/profile/settings", "/random", "/submit", "/report/mii/*", "/report/user/*", "/admin"],
},
sitemap: `${process.env.NEXT_PUBLIC_BASE_URL}/sitemap.xml`,
};