diff options
author | Ivan Fraixedes <[email protected]> | 2015-12-08 21:13:09 +0000 |
---|---|---|
committer | Bjørn Erik Pedersen <[email protected]> | 2016-01-05 23:36:16 +0100 |
commit | 9a6dc6c791f47e1d410956cb16cc42a0117f7246 (patch) | |
tree | 83262271b13d4d0af9b117ac017b99168b6c9709 /hugolib/robotstxt_test.go | |
parent | 7c5a1fd16bc76f010d2321371e3c2ef95b53556d (diff) | |
download | hugo-9a6dc6c791f47e1d410956cb16cc42a0117f7246.tar.gz hugo-9a6dc6c791f47e1d410956cb16cc42a0117f7246.zip |
Add embeded template for robots.txt
Diffstat (limited to 'hugolib/robotstxt_test.go')
-rw-r--r-- | hugolib/robotstxt_test.go | 67 |
1 files changed, 67 insertions, 0 deletions
diff --git a/hugolib/robotstxt_test.go b/hugolib/robotstxt_test.go new file mode 100644 index 000000000..c964c0231 --- /dev/null +++ b/hugolib/robotstxt_test.go @@ -0,0 +1,67 @@ +package hugolib + +import ( + "bytes" + "testing" + + "github.com/spf13/afero" + "github.com/spf13/hugo/helpers" + "github.com/spf13/hugo/hugofs" + "github.com/spf13/hugo/source" + "github.com/spf13/viper" +) + +const ROBOTSTXT_TEMPLATE = `User-agent: Googlebot + {{ range .Data.Pages }} + Disallow: {{.RelPermalink}} + {{ end }} +` + +func TestRobotsTXTOutput(t *testing.T) { + viper.Reset() + defer viper.Reset() + + hugofs.DestinationFS = new(afero.MemMapFs) + + viper.Set("baseurl", "http://auth/bub/") + + s := &Site{ + Source: &source.InMemorySource{ByteSource: WEIGHTED_SOURCES}, + } + + s.initializeSiteInfo() + + s.prepTemplates() + s.addTemplate("robots.txt", ROBOTSTXT_TEMPLATE) + + if err := s.CreatePages(); err != nil { + t.Fatalf("Unable to create pages: %s", err) + } + + if err := s.BuildSiteMeta(); err != nil { + t.Fatalf("Unable to build site metadata: %s", err) + } + + if err := s.RenderHomePage(); err != nil { + t.Fatalf("Unable to RenderHomePage: %s", err) + } + + if err := s.RenderSitemap(); err != nil { + t.Fatalf("Unable to RenderSitemap: %s", err) + } + + if err := s.RenderRobotsTXT(); err != nil { + t.Fatalf("Unable to RenderRobotsTXT :%s", err) + } + + robotsFile, err := hugofs.DestinationFS.Open("robots.txt") + + if err != nil { + t.Fatalf("Unable to locate: robots.txt") + } + + robots := helpers.ReaderToBytes(robotsFile) + if !bytes.HasPrefix(robots, []byte("User-agent: Googlebot")) { + t.Errorf("Robots file should start with 'User-agentL Googlebot'. %s", robots) + } +} |