Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TG-1126 Maintenance - Generate a robot.txt per environment at build time #41

Open
wants to merge 18 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions {{cookiecutter.project_dirname}}/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,6 @@ terraform.tfvars
# typescript
*.tsbuildinfo
next-env.d.ts

# robot.txt
*/plublic/robot.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/**
* @jest-environment node
*/

import { expect } from "@jest/globals"

const generateRobotsTxt = require('../../generate-robot-txt')

const crawlableRobotsTxt = `User-agent: *\nDisallow: /admin\nAllow: /`;
const uncrawlableRobotsTxt = `User-agent: *\nDisallow: /`;

test('robot.txt is valid for dev environment', () => {
process.env.REACT_ENVIRONMENT = 'development'
expect(generateRobotsTxt()).toBe(uncrawlableRobotsTxt)
})

test('robot.txt is valid for staging environment', () => {
process.env.REACT_ENVIRONMENT = 'staging'
expect(generateRobotsTxt()).toBe(uncrawlableRobotsTxt)
})

test('robot.txt is valid for production environment', () => {
process.env.REACT_ENVIRONMENT = 'production'
expect(generateRobotsTxt()).toBe(crawlableRobotsTxt)
})
3 changes: 2 additions & 1 deletion {{cookiecutter.project_dirname}}/docker/remote.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ ENV NEXT_TELEMETRY_DISABLED=1 \
SENTRY_ORG=$SENTRY_ORG \
SENTRY_PROJECT_NAME=$SENTRY_PROJECT_NAME \
SENTRY_URL=$SENTRY_URL
RUN yarn prebuild
RUN yarn build
LABEL company="20tab" project="{{ cookiecutter.project_slug }}" service="frontend" stage="build"

Expand All @@ -33,7 +34,7 @@ WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
USER nextjs
COPY ["next.config.js", "package.json", "sentry.client.config.js", "sentry.server.config.js", "server.js", "yarn.lock", "./"]
COPY ["next.config.js", "package.json", "generate-robot-txt.js","sentry.client.config.js", "sentry.server.config.js", "yarn.lock", "./"]
COPY ["public/", "public/"]
COPY --from=build --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=build --chown=nextjs:nodejs /app/.next/static ./.next/static
Expand Down
20 changes: 20 additions & 0 deletions {{cookiecutter.project_dirname}}/generate-robot-txt.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
const fs = require("fs");

const crawlableRobotsTxt = `User-agent: *\nDisallow: /admin\nAllow: /`;

const uncrawlableRobotsTxt = `User-agent: *\nDisallow: /`;

function generateRobotsTxt() {
// Create a non-crawlable robots.txt in non-production environments
const robotsTxt =
process.env.REACT_ENVIRONMENT === "production"
? crawlableRobotsTxt
: uncrawlableRobotsTxt;

// Create robots.txt file
fs.writeFileSync("public/robots.txt", robotsTxt);

return robotsTxt;
}

module.exports = generateRobotsTxt;
9 changes: 4 additions & 5 deletions {{cookiecutter.project_dirname}}/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,20 @@
"private": true,
"scripts": {
"audit:fix": "npm_config_yes=true npx yarn-audit-fix",
"build": "next build",
"prebuild": "node prebuild",
"build": "yarn next build",
"ci:contract-test": "npx jest pact/contracts --testEnvironment=node --color --detectOpenHandles --passWithNoTests",
"ci:unit-test": "npx jest --color --silent --detectOpenHandles --testPathIgnorePatterns=pact/contracts --coverage --coverageDirectory ./coverage/tests --ci --reporters=default --reporters=jest-junit",
"e2e": "CYPRESS_BASE_URL=http://localhost:3000 cypress open",
"e2e": "CYP1RESS_BASE_URL=http://localhost:3000 cypress open",
"lint": "next lint",
"dev": "next dev",
"pact": "npx jest pact/contracts --testEnvironment=node --detectOpenHandles",
"start": "node server.js",
"test": "npx jest --testPathIgnorePatterns=pact/contracts --detectOpenHandles"
},
"dependencies": {
"@next/font": "^13.4.19",
"@sentry/nextjs": "^7.64.0",
"basic-auth": "^2.0.1",
"express": "^4.18.2",
"next": "^13.4.19",
"react": "^18.2.0",
"react-dom": "^18.2.0",
Expand All @@ -29,7 +29,6 @@
"@testing-library/jest-dom": "^6.1.3",
"@testing-library/react": "^14.0.0",
"@testing-library/user-event": "^14.4.3",
"@types/express": "^4.17.17",
"@types/jest": "^29.2.5",
"@types/node": "^20.5.3",
"@types/react": "^18.2.21",
Expand Down
9 changes: 9 additions & 0 deletions {{cookiecutter.project_dirname}}/prebuild.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
const generateRobotsTxt = require('./generate-robot-txt')

generateRobotsTxt()

console.log(
`Generated a ${
process.env.REACT_ENVIRONMENT === "production" ? "crawlable" : "non-crawlable"
} public/robots.txt`
);

This file was deleted.

4 changes: 0 additions & 4 deletions {{cookiecutter.project_dirname}}/public/robots/production.txt

This file was deleted.

3 changes: 0 additions & 3 deletions {{cookiecutter.project_dirname}}/public/robots/staging.txt

This file was deleted.

31 changes: 0 additions & 31 deletions {{cookiecutter.project_dirname}}/server.js

This file was deleted.

4 changes: 2 additions & 2 deletions {{cookiecutter.project_dirname}}/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1105,7 +1105,7 @@
"@types/range-parser" "*"
"@types/send" "*"

"@types/express@^4.17.11", "@types/express@^4.17.17":
"@types/express@^4.17.11":
version "4.17.21"
resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.21.tgz#c26d4a151e60efe0084b23dc3369ebc631ed192d"
integrity sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==
Expand Down Expand Up @@ -3213,7 +3213,7 @@ expect@^29.0.0, expect@^29.7.0:
jest-message-util "^29.7.0"
jest-util "^29.7.0"

express@^4.18.1, express@^4.18.2:
express@^4.18.1:
version "4.18.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59"
integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==
Expand Down