fix: disallow page search param properly in robots.txt
This commit is contained in:
parent
737f808fec
commit
a2fd114f7d
1 changed files with 1 additions and 1 deletions
|
|
@ -5,7 +5,7 @@ export default function robots(): MetadataRoute.Robots {
|
||||||
rules: {
|
rules: {
|
||||||
userAgent: "*",
|
userAgent: "*",
|
||||||
allow: "/",
|
allow: "/",
|
||||||
disallow: ["/*?page", "/create-username", "/edit/*", "/profile/settings", "/random", "/submit", "/report/mii/*", "/report/user/*", "/admin"],
|
disallow: ["/*?*page=", "/create-username", "/edit/*", "/profile/settings", "/random", "/submit", "/report/mii/*", "/report/user/*", "/admin"],
|
||||||
},
|
},
|
||||||
sitemap: `${process.env.NEXT_PUBLIC_BASE_URL}/sitemap.xml`,
|
sitemap: `${process.env.NEXT_PUBLIC_BASE_URL}/sitemap.xml`,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue