Skip to content

Commit

Permalink
feat(client): Env var to restrict crawlers acess
Browse files Browse the repository at this point in the history
  • Loading branch information
clementprdhomme committed Oct 24, 2024
1 parent 2e3fb17 commit 97b4ea1
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 1 deletion.
21 changes: 21 additions & 0 deletions client/src/app/robots.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { env } from "@/env";

import type { MetadataRoute } from "next";

export default function robots(): MetadataRoute.Robots {
if (env.NEXT_USE_RESTRICTIVE_ROBOTS_TXT) {
return {
rules: {
userAgent: "*",
disallow: "/",
},
};
}

return {
rules: {
userAgent: "*",
allow: "/",
},
};
}
10 changes: 9 additions & 1 deletion client/src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,14 @@ export const env = createEnv({
* Serverside Environment variables, not available on the client.
* Will throw if you access these variables on the client.
*/
server: {},
server: {
// If `true` or left empty, crawlers (including search engines) are not allowed to index the
// website
NEXT_USE_RESTRICTIVE_ROBOTS_TXT: z.preprocess(
(value) => (!value || value === "true" ? true : false),
z.boolean(),
),
},
/*
* Environment variables available on the client (and server).
*
Expand All @@ -27,5 +34,6 @@ export const env = createEnv({
runtimeEnv: {
NEXT_PUBLIC_MAPBOX_TOKEN: process.env.NEXT_PUBLIC_MAPBOX_TOKEN,
NEXT_PUBLIC_MAPBOX_STYLE: process.env.NEXT_PUBLIC_MAPBOX_STYLE,
NEXT_USE_RESTRICTIVE_ROBOTS_TXT: process.env.NEXT_USE_RESTRICTIVE_ROBOTS_TXT,
},
});

0 comments on commit 97b4ea1

Please sign in to comment.