From c09046abbf0bd24c45a8da6febdbf0f90a67bc86 Mon Sep 17 00:00:00 2001 From: "David B. Kinder" Date: Thu, 4 Apr 2019 15:37:49 -0700 Subject: [PATCH] doc: add robots.txt use a robots.txt file to prevent search engines from indexing old content. Signed-off-by: David B. Kinder --- doc/Makefile | 1 + doc/scripts/publish-robots.txt | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 doc/scripts/publish-robots.txt diff --git a/doc/Makefile b/doc/Makefile index cf5d169ea..8adaf5b6e 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -77,6 +77,7 @@ publish: cp -r $(BUILDDIR)/html/* $(PUBLISHDIR) cp scripts/publish-README.md $(PUBLISHDIR)/../README.md cp scripts/publish-index.html $(PUBLISHDIR)/../index.html + cp scripts/publish-robots.txt $(PUBLISHDIR)/../robots.txt cd $(PUBLISHDIR)/..; git add -A; git commit -s -m "publish $(RELEASE)"; git push origin master; diff --git a/doc/scripts/publish-robots.txt b/doc/scripts/publish-robots.txt new file mode 100644 index 000000000..c39c851c4 --- /dev/null +++ b/doc/scripts/publish-robots.txt @@ -0,0 +1,7 @@ +User-agent: * +Allow: / +Disallow: /0.1/ +Disallow: /0.2/ +Disallow: /0.3/ +Disallow: /0.4/ +Disallow: /0.5/