From 9adebb4097db2d76a7cd37a06c60e25b3dcca8d0 Mon Sep 17 00:00:00 2001
From: Emmanuel Raviart <emmanuel@raviart.com>
Date: Sun, 26 Sep 2021 19:39:03 +0200
Subject: [PATCH] Allow auto-generated robots.txt.

---
 example.env                |  3 +++
 package-lock.json          | 13 +++++++++++++
 package.json               |  1 +
 src/lib/auditors/config.ts |  2 +-
 src/lib/server/config.ts   |  2 ++
 src/routes/robots.txt.ts   | 17 +++++++++++++++++
 static/robots.txt          |  3 ---
 7 files changed, 37 insertions(+), 4 deletions(-)
 create mode 100644 src/routes/robots.txt.ts
 delete mode 100644 static/robots.txt

diff --git a/example.env b/example.env
index 0bbb1f7dd..aca7de232 100644
--- a/example.env
+++ b/example.env
@@ -3,6 +3,9 @@
 # Are advanced & experimental features enabled?
 # ADVANCED=false
 
+# Are search robots allowed to index this site?
+# ALLOW_ROBOTS=false
+
 # Public HTTP(S) URLs of LexImpact Socio-Fiscal API server
 API_BASE_URLS="https://simulateur-socio-fiscal.leximpact.dev/api/"
 # API_BASE_URLS="http://localhost:8000"
diff --git a/package-lock.json b/package-lock.json
index 177429df3..f202405aa 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -28,6 +28,7 @@
         "autoprefixer": "^10.2.5",
         "cssnano": "^5.0.5",
         "d3-scale": "^4.0.0",
+        "dedent-js": "^1.0.1",
         "dotenv": "^10.0.0",
         "eslint": "^7.22.0",
         "eslint-config-prettier": "^8.1.0",
@@ -3171,6 +3172,12 @@
         }
       }
     },
+    "node_modules/dedent-js": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/dedent-js/-/dedent-js-1.0.1.tgz",
+      "integrity": "sha1-vuX7fJ5yfYXf+iRZDRDsGrElUwU=",
+      "dev": true
+    },
     "node_modules/deep-is": {
       "version": "0.1.4",
       "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
@@ -9323,6 +9330,12 @@
         "ms": "2.1.2"
       }
     },
+    "dedent-js": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/dedent-js/-/dedent-js-1.0.1.tgz",
+      "integrity": "sha1-vuX7fJ5yfYXf+iRZDRDsGrElUwU=",
+      "dev": true
+    },
     "deep-is": {
       "version": "0.1.4",
       "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
diff --git a/package.json b/package.json
index 4944814fd..33c12f430 100644
--- a/package.json
+++ b/package.json
@@ -31,6 +31,7 @@
     "autoprefixer": "^10.2.5",
     "cssnano": "^5.0.5",
     "d3-scale": "^4.0.0",
+    "dedent-js": "^1.0.1",
     "dotenv": "^10.0.0",
     "eslint": "^7.22.0",
     "eslint-config-prettier": "^8.1.0",
diff --git a/src/lib/auditors/config.ts b/src/lib/auditors/config.ts
index 0568bf67b..b2cb2811d 100644
--- a/src/lib/auditors/config.ts
+++ b/src/lib/auditors/config.ts
@@ -31,7 +31,7 @@ export function auditConfig(
   const errors: { [key: string]: unknown } = {}
   const remainingKeys = new Set(Object.keys(data))
 
-  for (const key of ["advanced", "proxy"]) {
+  for (const key of ["advanced", "allowRobots", "proxy"]) {
     audit.attribute(
       data,
       key,
diff --git a/src/lib/server/config.ts b/src/lib/server/config.ts
index 323a8ac1c..3a2b3d3dc 100644
--- a/src/lib/server/config.ts
+++ b/src/lib/server/config.ts
@@ -4,6 +4,7 @@ import { validateConfig } from "$lib/auditors/config"
 
 export interface Config {
   advanced: boolean
+  allowRobots: boolean
   apiBaseUrls: string[]
   apiWebSocketBaseUrls: string[]
   baseUrl: string
@@ -42,6 +43,7 @@ export interface Config {
 
 const [validConfig, error] = validateConfig({
   advanced: process.env["ADVANCED"],
+  allowRobots: process.env["ALLOW_ROBOTS"],
   apiBaseUrls: process.env["API_BASE_URLS"],
   baseUrl: process.env["BASE_URL"],
   childrenKey: process.env["CHILDREN_KEY"],
diff --git a/src/routes/robots.txt.ts b/src/routes/robots.txt.ts
new file mode 100644
index 000000000..b83b51bd2
--- /dev/null
+++ b/src/routes/robots.txt.ts
@@ -0,0 +1,17 @@
+import type { RequestHandler } from "@sveltejs/kit"
+import dedent from "dedent-js"
+
+import config from "$lib/server/config"
+
+const { allowRobots } = config
+
+export const get: RequestHandler = async ({ params }) => {
+  return {
+    body: dedent`
+        # https://www.robotstxt.org/robotstxt.html
+        User-agent: *
+        Disallow:${allowRobots ? "" : " /"}
+    `,
+    headers: { "Content-Type": "text/plain; charset=utf-8" },
+  }
+}
diff --git a/static/robots.txt b/static/robots.txt
deleted file mode 100644
index e9e57dc4d..000000000
--- a/static/robots.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# https://www.robotstxt.org/robotstxt.html
-User-agent: *
-Disallow:
-- 
GitLab