summaryrefslogtreecommitdiffhomepage
path: root/hugolib
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <[email protected]>2017-07-24 09:00:23 +0200
committerBjørn Erik Pedersen <[email protected]>2017-12-27 18:44:47 +0100
commit3cdf19e9b7e46c57a9bb43ff02199177feb55768 (patch)
treed05e3dc15824c8eeef3e5455193d2d6328621f47 /hugolib
parent02f2735f68e1bb2e2c412698755d52c4d396f237 (diff)
downloadhugo-3cdf19e9b7e46c57a9bb43ff02199177feb55768.tar.gz
hugo-3cdf19e9b7e46c57a9bb43ff02199177feb55768.zip
:sparkles: Implement Page bundling and image handling
This commit is not the smallest in Hugo's history. Some hightlights include: * Page bundles (for complete articles, keeping images and content together etc.). * Bundled images can be processed in as many versions/sizes as you need with the three methods `Resize`, `Fill` and `Fit`. * Processed images are cached inside `resources/_gen/images` (default) in your project. * Symbolic links (both files and dirs) are now allowed anywhere inside /content * A new table based build summary * The "Total in nn ms" now reports the total including the handling of the files inside /static. So if it now reports more than you're used to, it is just **more real** and probably faster than before (see below). A site building benchmark run compared to `v0.31.1` shows that this should be slightly faster and use less memory: ```bash ▶ ./benchSite.sh "TOML,num_langs=.*,num_root_sections=5,num_pages=(500|1000),tags_per_page=5,shortcodes,render" benchmark old ns/op new ns/op delta BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 101785785 78067944 -23.30% BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 185481057 149159919 -19.58% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 103149918 85679409 -16.94% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 203515478 169208775 -16.86% benchmark old allocs new allocs delta BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 532464 391539 -26.47% BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1056549 772702 -26.87% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 555974 406630 -26.86% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1086545 789922 -27.30% benchmark old bytes new bytes delta BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 53243246 43598155 -18.12% BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 105811617 86087116 -18.64% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 54558852 44545097 -18.35% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 106903858 86978413 -18.64% ``` Fixes #3651 Closes #3158 Fixes #1014 Closes #2021 Fixes #1240 Updates #3757
Diffstat (limited to 'hugolib')
-rw-r--r--hugolib/alias.go2
-rw-r--r--hugolib/alias_test.go4
-rw-r--r--hugolib/config.go12
-rw-r--r--hugolib/datafiles_test.go31
-rw-r--r--hugolib/fileInfo.go109
-rw-r--r--hugolib/fileInfo_test.go61
-rw-r--r--hugolib/handler_base.go65
-rw-r--r--hugolib/handler_file.go59
-rw-r--r--hugolib/handler_meta.go128
-rw-r--r--hugolib/handler_page.go157
-rw-r--r--hugolib/handler_test.go77
-rw-r--r--hugolib/hugo_sites.go337
-rw-r--r--hugolib/hugo_sites_build.go34
-rw-r--r--hugolib/hugo_sites_build_test.go100
-rw-r--r--hugolib/hugo_sites_multihost_test.go5
-rw-r--r--hugolib/hugolib.debugbin0 -> 2464730 bytes
-rw-r--r--hugolib/menu_old_test.go654
-rw-r--r--hugolib/page.go156
-rw-r--r--hugolib/page_bundler.go188
-rw-r--r--hugolib/page_bundler_capture.go683
-rw-r--r--hugolib/page_bundler_capture_test.go255
-rw-r--r--hugolib/page_bundler_handlers.go346
-rw-r--r--hugolib/page_bundler_test.go379
-rw-r--r--hugolib/page_collections.go13
-rw-r--r--hugolib/page_collections_test.go2
-rw-r--r--hugolib/page_output.go74
-rw-r--r--hugolib/page_paths.go41
-rw-r--r--hugolib/page_paths_test.go1
-rw-r--r--hugolib/page_resource.go23
-rw-r--r--hugolib/page_test.go4
-rw-r--r--hugolib/pagination.go6
-rw-r--r--hugolib/permalinks.go11
-rw-r--r--hugolib/prune_resources.go84
-rw-r--r--hugolib/rss_test.go2
-rw-r--r--hugolib/shortcode_test.go5
-rw-r--r--hugolib/site.go729
-rw-r--r--hugolib/siteJSONEncode_test.go10
-rw-r--r--hugolib/site_render.go31
-rw-r--r--hugolib/site_sections_test.go5
-rw-r--r--hugolib/site_stats_test.go101
-rw-r--r--hugolib/site_test.go203
-rw-r--r--hugolib/site_url_test.go7
-rw-r--r--hugolib/testhelpers_test.go18
43 files changed, 3206 insertions, 2006 deletions
diff --git a/hugolib/alias.go b/hugolib/alias.go
index a3fe5c24a..dbb864384 100644
--- a/hugolib/alias.go
+++ b/hugolib/alias.go
@@ -109,7 +109,7 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, p *Page)
return err
}
- return s.publish(targetPath, aliasContent)
+ return s.publish(&s.PathSpec.ProcessingStats.Aliases, targetPath, aliasContent)
}
diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go
index 1d6824dba..abbda5f35 100644
--- a/hugolib/alias_test.go
+++ b/hugolib/alias_test.go
@@ -51,7 +51,9 @@ func TestAlias(t *testing.T) {
writeSource(t, fs, filepath.Join("content", "page.md"), pageWithAlias)
writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), basicTemplate)
- buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ require.Len(t, s.rawAllPages, 1)
// the real page
th.assertFileContent(filepath.Join("public", "page", "index.html"), "For some moments the old man")
diff --git a/hugolib/config.go b/hugolib/config.go
index 62098aeea..c83f38cce 100644
--- a/hugolib/config.go
+++ b/hugolib/config.go
@@ -64,22 +64,21 @@ func LoadConfig(fs afero.Fs, relativeSourcePath, configFilename string) (*viper.
v.RegisterAlias("indexes", "taxonomies")
- // Remove these in Hugo 0.23.
+ // Remove these in Hugo 0.33.
if v.IsSet("disable404") {
- helpers.Deprecated("site config", "disable404", "Use disableKinds=[\"404\"]", false)
+ helpers.Deprecated("site config", "disable404", "Use disableKinds=[\"404\"]", true)
}
if v.IsSet("disableRSS") {
- helpers.Deprecated("site config", "disableRSS", "Use disableKinds=[\"RSS\"]", false)
+ helpers.Deprecated("site config", "disableRSS", "Use disableKinds=[\"RSS\"]", true)
}
if v.IsSet("disableSitemap") {
- // NOTE: Do not remove this until Hugo 0.24, ERROR in 0.23.
- helpers.Deprecated("site config", "disableSitemap", "Use disableKinds= [\"sitemap\"]", false)
+ helpers.Deprecated("site config", "disableSitemap", "Use disableKinds= [\"sitemap\"]", true)
}
if v.IsSet("disableRobotsTXT") {
- helpers.Deprecated("site config", "disableRobotsTXT", "Use disableKinds= [\"robotsTXT\"]", false)
+ helpers.Deprecated("site config", "disableRobotsTXT", "Use disableKinds= [\"robotsTXT\"]", true)
}
if err := loadDefaultSettingsFor(v); err != nil {
@@ -176,6 +175,7 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("contentDir", "content")
v.SetDefault("layoutDir", "layouts")
v.SetDefault("staticDir", "static")
+ v.SetDefault("resourceDir", "resources")
v.SetDefault("archetypeDir", "archetypes")
v.SetDefault("publishDir", "public")
v.SetDefault("dataDir", "data")
diff --git a/hugolib/datafiles_test.go b/hugolib/datafiles_test.go
index b62fb197d..cdc5e0684 100644
--- a/hugolib/datafiles_test.go
+++ b/hugolib/datafiles_test.go
@@ -27,16 +27,15 @@ import (
jww "github.com/spf13/jwalterweatherman"
"github.com/gohugoio/hugo/parser"
- "github.com/gohugoio/hugo/source"
"github.com/stretchr/testify/require"
)
func TestDataDirJSON(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("data/test/foo.json"), Content: []byte(`{ "bar": "foofoo" }`)},
- {Name: filepath.FromSlash("data/test.json"), Content: []byte(`{ "hello": [ { "world": "foo" } ] }`)},
+ sources := [][2]string{
+ {filepath.FromSlash("data/test/foo.json"), `{ "bar": "foofoo" }`},
+ {filepath.FromSlash("data/test.json"), `{ "hello": [ { "world": "foo" } ] }`},
}
expected, err := parser.HandleJSONMetaData([]byte(`{ "test": { "hello": [{ "world": "foo" }] , "foo": { "bar":"foofoo" } } }`))
@@ -51,8 +50,8 @@ func TestDataDirJSON(t *testing.T) {
func TestDataDirToml(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("data/test/kung.toml"), Content: []byte("[foo]\nbar = 1")},
+ sources := [][2]string{
+ {"data/test/kung.toml", "[foo]\nbar = 1"},
}
expected, err := parser.HandleTOMLMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
@@ -67,12 +66,12 @@ func TestDataDirToml(t *testing.T) {
func TestDataDirYAMLWithOverridenValue(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
+ sources := [][2]string{
// filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this:
- {Name: filepath.FromSlash("data/a.yaml"), Content: []byte("a: 1")},
- {Name: filepath.FromSlash("data/test/v1.yaml"), Content: []byte("v1-2: 2")},
- {Name: filepath.FromSlash("data/test/v2.yaml"), Content: []byte("v2:\n- 2\n- 3")},
- {Name: filepath.FromSlash("data/test.yaml"), Content: []byte("v1: 1")},
+ {filepath.FromSlash("data/a.yaml"), "a: 1"},
+ {filepath.FromSlash("data/test/v1.yaml"), "v1-2: 2"},
+ {filepath.FromSlash("data/test/v2.yaml"), "v2:\n- 2\n- 3"},
+ {filepath.FromSlash("data/test.yaml"), "v1: 1"},
}
expected := map[string]interface{}{"a": map[string]interface{}{"a": 1},
@@ -85,10 +84,10 @@ func TestDataDirYAMLWithOverridenValue(t *testing.T) {
func TestDataDirMultipleSources(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("data/test/first.toml"), Content: []byte("bar = 1")},
- {Name: filepath.FromSlash("themes/mytheme/data/test/first.toml"), Content: []byte("bar = 2")},
- {Name: filepath.FromSlash("data/test/second.toml"), Content: []byte("tender = 2")},
+ sources := [][2]string{
+ {filepath.FromSlash("data/test/first.toml"), "bar = 1"},
+ {filepath.FromSlash("themes/mytheme/data/test/first.toml"), "bar = 2"},
+ {filepath.FromSlash("data/test/second.toml"), "tender = 2"},
}
expected, _ := parser.HandleTOMLMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
@@ -98,7 +97,7 @@ func TestDataDirMultipleSources(t *testing.T) {
}
-func doTestDataDir(t *testing.T, expected interface{}, sources []source.ByteSource, configKeyValues ...interface{}) {
+func doTestDataDir(t *testing.T, expected interface{}, sources [][2]string, configKeyValues ...interface{}) {
var (
cfg, fs = newTestCfg()
)
diff --git a/hugolib/fileInfo.go b/hugolib/fileInfo.go
new file mode 100644
index 000000000..14dd8dbf9
--- /dev/null
+++ b/hugolib/fileInfo.go
@@ -0,0 +1,109 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "os"
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/source"
+)
+
+// fileInfo implements the File and ReadableFile interface.
+var (
+ _ source.File = (*fileInfo)(nil)
+ _ source.ReadableFile = (*fileInfo)(nil)
+)
+
+type fileInfo struct {
+ bundleTp bundleDirType
+ source.ReadableFile
+ overriddenLang string
+}
+
+func (fi *fileInfo) Lang() string {
+ if fi.overriddenLang != "" {
+ return fi.overriddenLang
+ }
+ return fi.ReadableFile.Lang()
+}
+
+func (fi *fileInfo) isOwner() bool {
+ return fi.bundleTp > bundleNot
+}
+
+func isContentFile(filename string) bool {
+ return contentFileExtensionsSet[strings.TrimPrefix(helpers.Ext(filename), ".")]
+}
+
+func (fi *fileInfo) isContentFile() bool {
+ return contentFileExtensionsSet[fi.Ext()]
+}
+
+func newFileInfo(sp *source.SourceSpec, baseDir, filename string, fi os.FileInfo, tp bundleDirType) *fileInfo {
+
+ baseFi := sp.NewFileInfo(baseDir, filename, fi)
+ f := &fileInfo{
+ bundleTp: tp,
+ ReadableFile: baseFi,
+ }
+
+ return f
+
+}
+
+type bundleDirType int
+
+const (
+ bundleNot bundleDirType = iota
+
+ // All from here are bundles in one form or another.
+ bundleLeaf
+ bundleBranch
+)
+
+// Returns the given file's name's bundle type and whether it is a content
+// file or not.
+func classifyBundledFile(name string) (bundleDirType, bool) {
+ if !isContentFile(name) {
+ return bundleNot, false
+ }
+ if strings.HasPrefix(name, "_index.") {
+ return bundleBranch, true
+ }
+
+ if strings.HasPrefix(name, "index.") {
+ return bundleLeaf, true
+ }
+
+ return bundleNot, true
+}
+
+func (b bundleDirType) String() string {
+ switch b {
+ case bundleNot:
+ return "Not a bundle"
+ case bundleLeaf:
+ return "Regular bundle"
+ case bundleBranch:
+ return "Branch bundle"
+ }
+
+ return ""
+}
+
+func (b bundleDirType) isBundle() bool {
+ return b > bundleNot
+}
diff --git a/hugolib/fileInfo_test.go b/hugolib/fileInfo_test.go
new file mode 100644
index 000000000..18579c078
--- /dev/null
+++ b/hugolib/fileInfo_test.go
@@ -0,0 +1,61 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/source"
+ "github.com/stretchr/testify/require"
+)
+
+func TestBundleFileInfo(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+ cfg, fs := newTestBundleSourcesMultilingual(t)
+ sourceSpec := source.NewSourceSpec(cfg, fs)
+
+ for _, this := range []struct {
+ filename string
+ check func(f *fileInfo)
+ }{
+ {"/path/to/file.md", func(fi *fileInfo) {
+ assert.Equal("md", fi.Ext())
+ assert.Equal("en", fi.Lang())
+ assert.False(fi.isOwner())
+ assert.True(fi.isContentFile())
+ }},
+ {"/path/to/file.JPG", func(fi *fileInfo) {
+ assert.Equal("jpg", fi.Ext())
+ assert.False(fi.isContentFile())
+ }},
+ {"/path/to/file.nn.png", func(fi *fileInfo) {
+ assert.Equal("png", fi.Ext())
+ assert.Equal("nn", fi.Lang())
+ assert.Equal("file", fi.TranslationBaseName())
+ assert.False(fi.isContentFile())
+ }},
+ } {
+ fi := newFileInfo(
+ sourceSpec,
+ filepath.FromSlash("/work/base"),
+ filepath.FromSlash(this.filename),
+ nil, bundleNot)
+ this.check(fi)
+ }
+
+}
diff --git a/hugolib/handler_base.go b/hugolib/handler_base.go
deleted file mode 100644
index d7e4a63a3..000000000
--- a/hugolib/handler_base.go
+++ /dev/null
@@ -1,65 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
- "github.com/gohugoio/hugo/source"
-)
-
-// Handler is used for processing files of a specific type.
-type Handler interface {
- FileConvert(*source.File, *Site) HandledResult
- PageConvert(*Page) HandledResult
- Read(*source.File, *Site) HandledResult
- Extensions() []string
-}
-
-// Handle identifies functionality associated with certain file extensions.
-type Handle struct {
- extensions []string
-}
-
-// Extensions returns a list of extensions.
-func (h Handle) Extensions() []string {
- return h.extensions
-}
-
-// HandledResult describes the results of a file handling operation.
-type HandledResult struct {
- page *Page
- file *source.File
- err error
-}
-
-// HandledResult is an error
-func (h HandledResult) Error() string {
- if h.err != nil {
- if h.page != nil {
- return "Error: " + h.err.Error() + " for " + h.page.File.LogicalName()
- }
- if h.file != nil {
- return "Error: " + h.err.Error() + " for " + h.file.LogicalName()
- }
- }
- return h.err.Error()
-}
-
-func (h HandledResult) String() string {
- return h.Error()
-}
-
-// Page returns the affected page.
-func (h HandledResult) Page() *Page {
- return h.page
-}
diff --git a/hugolib/handler_file.go b/hugolib/handler_file.go
deleted file mode 100644
index 82ea85fb2..000000000
--- a/hugolib/handler_file.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
- "bytes"
-
- "github.com/dchest/cssmin"
- "github.com/gohugoio/hugo/source"
-)
-
-func init() {
- RegisterHandler(new(cssHandler))
- RegisterHandler(new(defaultHandler))
-}
-
-type basicFileHandler Handle
-
-func (h basicFileHandler) Read(f *source.File, s *Site) HandledResult {
- return HandledResult{file: f}
-}
-
-func (h basicFileHandler) PageConvert(*Page) HandledResult {
- return HandledResult{}
-}
-
-type defaultHandler struct{ basicFileHandler }
-
-func (h defaultHandler) Extensions() []string { return []string{"*"} }
-func (h defaultHandler) FileConvert(f *source.File, s *Site) HandledResult {
- err := s.publish(f.Path(), f.Contents)
- if err != nil {
- return HandledResult{err: err}
- }
- return HandledResult{file: f}
-}
-
-type cssHandler struct{ basicFileHandler }
-
-func (h cssHandler) Extensions() []string { return []string{"css"} }
-func (h cssHandler) FileConvert(f *source.File, s *Site) HandledResult {
- x := cssmin.Minify(f.Bytes())
- err := s.publish(f.Path(), bytes.NewReader(x))
- if err != nil {
- return HandledResult{err: err}
- }
- return HandledResult{file: f}
-}
diff --git a/hugolib/handler_meta.go b/hugolib/handler_meta.go
deleted file mode 100644
index c1aaf5f8c..000000000
--- a/hugolib/handler_meta.go
+++ /dev/null
@@ -1,128 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
- "errors"
-
- "fmt"
-
- "github.com/gohugoio/hugo/source"
-)
-
-var handlers []Handler
-
-// MetaHandler abstracts reading and converting functionality of a Handler.
-type MetaHandler interface {
- // Read the Files in and register
- Read(*source.File, *Site, HandleResults)
-
- // Generic Convert Function with coordination
- Convert(interface{}, *Site, HandleResults)
-
- Handle() Handler
-}
-
-// HandleResults is a channel for HandledResult.
-type HandleResults chan<- HandledResult
-
-// NewMetaHandler creates a MetaHandle for a given extensions.
-func NewMetaHandler(in string) *MetaHandle {
- x := &MetaHandle{ext: in}
- x.Handler()
- return x
-}
-
-// MetaHandle is a generic MetaHandler that internally uses
-// the globally registered handlers for handling specific file types.
-type MetaHandle struct {
- handler Handler
- ext string
-}
-
-func (mh *MetaHandle) Read(f *source.File, s *Site, results HandleResults) {
- if h := mh.Handler(); h != nil {
- results <- h.Read(f, s)
- return
- }
-
- results <- HandledResult{err: errors.New("No handler found"), file: f}
-}
-
-// Convert handles the conversion of files and pages.
-func (mh *MetaHandle) Convert(i interface{}, s *Site, results HandleResults) {
- h := mh.Handler()
-
- if f, ok := i.(*source.File); ok {
- results <- h.FileConvert(f, s)
- return
- }
-
- if p, ok := i.(*Page); ok {
- if p == nil {
- results <- HandledResult{err: errors.New("file resulted in a nil page")}
- return
- }
-
- if h == nil {
- results <- HandledResult{err: fmt.Errorf("No handler found for page '%s'. Verify the markup is supported by Hugo.", p.FullFilePath())}
- return
- }
-
- results <- h.PageConvert(p)
- }
-}
-
-// Handler finds the registered handler for the used extensions.
-func (mh *MetaHandle) Handler() Handler {
- if mh.handler == nil {
- mh.handler = FindHandler(mh.ext)
-
- // if no handler found, use default handler
- if mh.handler == nil {
- mh.handler = FindHandler("*")
- }
- }
- return mh.handler
-}
-
-// FindHandler finds a Handler in the globally registered handlers.
-func FindHandler(ext string) Handler {
- for _, h := range Handlers() {
- if HandlerMatch(h, ext) {
- return h
- }
- }
- return nil
-}
-
-// HandlerMatch checks if the given extensions matches.
-func HandlerMatch(h Handler, ext string) bool {
- for _, x := range h.Extensions() {
- if ext == x {
- return true
- }
- }
- return false
-}
-
-// RegisterHandler adds a handler to the globally registered ones.
-func RegisterHandler(h Handler) {
- handlers = append(handlers, h)
-}
-
-// Handlers returns the globally registered handlers.
-func Handlers() []Handler {
- return handlers
-}
diff --git a/hugolib/handler_page.go b/hugolib/handler_page.go
deleted file mode 100644
index c9e22fe76..000000000
--- a/hugolib/handler_page.go
+++ /dev/null
@@ -1,157 +0,0 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
- "fmt"
-
- "github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/source"
-)
-
-func init() {
- RegisterHandler(new(markdownHandler))
- RegisterHandler(new(htmlHandler))
- RegisterHandler(new(asciidocHandler))
- RegisterHandler(new(rstHandler))
- RegisterHandler(new(pandocHandler))
- RegisterHandler(new(mmarkHandler))
- RegisterHandler(new(orgHandler))
-}
-
-type basicPageHandler Handle
-
-func (b basicPageHandler) Read(f *source.File, s *Site) HandledResult {
- page, err := s.NewPage(f.Path())
-
- if err != nil {
- return HandledResult{file: f, err: err}
- }
-
- if _, err := page.ReadFrom(f.Contents); err != nil {
- return HandledResult{file: f, err: err}
- }
-
- // In a multilanguage setup, we use the first site to
- // do the initial processing.
- // That site may be different than where the page will end up,
- // so we do the assignment here.
- // We should clean up this, but that will have to wait.
- s.assignSiteByLanguage(page)
-
- return HandledResult{file: f, page: page, err: err}
-}
-
-func (b basicPageHandler) FileConvert(*source.File, *Site) HandledResult {
- return HandledResult{}
-}
-
-type markdownHandler struct {
- basicPageHandler
-}
-
-func (h markdownHandler) Extensions() []string { return []string{"mdown", "markdown", "md"} }
-func (h markdownHandler) PageConvert(p *Page) HandledResult {
- return commonConvert(p)
-}
-
-type htmlHandler struct {
- basicPageHandler
-}
-
-func (h htmlHandler) Extensions() []string { return []string{"html", "htm"} }
-
-func (h htmlHandler) PageConvert(p *Page) HandledResult {
- if p.rendered {
- panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))
- }
-
- // Work on a copy of the raw content from now on.
- p.createWorkContentCopy()
-
- if err := p.processShortcodes(); err != nil {
- p.s.Log.ERROR.Println(err)
- }
-
- return HandledResult{err: nil}
-}
-
-type asciidocHandler struct {
- basicPageHandler
-}
-
-func (h asciidocHandler) Extensions() []string { return []string{"asciidoc", "adoc", "ad"} }
-func (h asciidocHandler) PageConvert(p *Page) HandledResult {
- return commonConvert(p)
-}
-
-type rstHandler struct {
- basicPageHandler
-}
-
-func (h rstHandler) Extensions() []string { return []string{"rest", "rst"} }
-func (h rstHandler) PageConvert(p *Page) HandledResult {
- return commonConvert(p)
-}
-
-type pandocHandler struct {
- basicPageHandler
-}
-
-func (h pandocHandler) Extensions() []string { return []string{"pandoc", "pdc"} }
-func (h pandocHandler) PageConvert(p *Page) HandledResult {
- return commonConvert(p)
-}
-
-type mmarkHandler struct {
- basicPageHandler
-}
-
-func (h mmarkHandler) Extensions() []string { return []string{"mmark"} }
-func (h mmarkHandler) PageConvert(p *Page) HandledResult {
- return commonConvert(p)
-}
-
-type orgHandler struct {
- basicPageHandler
-}
-
-func (h orgHandler) Extensions() []string { return []string{"org"} }
-func (h orgHandler) PageConvert(p *Page) HandledResult {
- return commonConvert(p)
-}
-
-func commonConvert(p *Page) HandledResult {
- if p.rendered {
- panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))
- }
-
- // Work on a copy of the raw content from now on.
- p.createWorkContentCopy()
-
- if err := p.processShortcodes(); err != nil {
- p.s.Log.ERROR.Println(err)
- }
-
- // TODO(bep) these page handlers need to be re-evaluated, as it is hard to
- // process a page in isolation. See the new preRender func.
- if p.s.Cfg.GetBool("enableEmoji") {
- p.workContent = helpers.Emojify(p.workContent)
- }
-
- p.workContent = p.replaceDivider(p.workContent)
- p.workContent = p.renderContent(p.workContent)
-
- return HandledResult{err: nil}
-}
diff --git a/hugolib/handler_test.go b/hugolib/handler_test.go
deleted file mode 100644
index aa58d1c43..000000000
--- a/hugolib/handler_test.go
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
- "path/filepath"
- "testing"
-
- "github.com/gohugoio/hugo/deps"
- "github.com/gohugoio/hugo/helpers"
-)
-
-func TestDefaultHandler(t *testing.T) {
- t.Parallel()
-
- var (
- cfg, fs = newTestCfg()
- )
-
- cfg.Set("verbose", true)
- cfg.Set("uglyURLs", true)
-
- writeSource(t, fs, filepath.FromSlash("content/sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*")
- writeSource(t, fs, filepath.FromSlash("content/sect/doc2.html"), "<!doctype html><html><body>more content</body></html>")
- writeSource(t, fs, filepath.FromSlash("content/sect/doc3.md"), "# doc3\n*some* content")
- writeSource(t, fs, filepath.FromSlash("content/sect/doc4.md"), "---\ntitle: doc4\n---\n# doc4\n*some content*")
- writeSource(t, fs, filepath.FromSlash("content/sect/doc3/img1.png"), "‰PNG  ��� IHDR����������:~›U��� IDATWcø��ZMoñ����IEND®B`‚")
- writeSource(t, fs, filepath.FromSlash("content/sect/img2.gif"), "GIF89a��€��ÿÿÿ���,�������D�;")
- writeSource(t, fs, filepath.FromSlash("content/sect/img2.spf"), "****FAKE-FILETYPE****")
- writeSource(t, fs, filepath.FromSlash("content/doc7.html"), "<html><body>doc7 content</body></html>")
- writeSource(t, fs, filepath.FromSlash("content/sect/doc8.html"), "---\nmarkup: md\n---\n# title\nsome *content*")
-
- writeSource(t, fs, filepath.FromSlash("layouts/_default/single.html"), "{{.Content}}")
- writeSource(t, fs, filepath.FromSlash("head"), "<head><script src=\"script.js\"></script></head>")
- writeSource(t, fs, filepath.FromSlash("head_abs"), "<head><script src=\"/script.js\"></script></head")
-
- buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
-
- tests := []struct {
- doc string
- expected string
- }{
- {filepath.FromSlash("public/sect/doc1.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>some <em>content</em></p>\n"},
- {filepath.FromSlash("public/sect/doc2.html"), "<!doctype html><html><body>more content</body></html>"},
- {filepath.FromSlash("public/sect/doc3.html"), "\n\n<h1 id=\"doc3\">doc3</h1>\n\n<p><em>some</em> content</p>\n"},
- {filepath.FromSlash("public/sect/doc3/img1.png"), string([]byte("‰PNG  ��� IHDR����������:~›U��� IDATWcø��ZMoñ����IEND®B`‚"))},
- {filepath.FromSlash("public/sect/img2.gif"), string([]byte("GIF89a��€��ÿÿÿ���,�������D�;"))},
- {filepath.FromSlash("public/sect/img2.spf"), string([]byte("****FAKE-FILETYPE****"))},
- {filepath.FromSlash("public/doc7.html"), "<html><body>doc7 content</body></html>"},
- {filepath.FromSlash("public/sect/doc8.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>some <em>content</em></p>\n"},
- }
-
- for _, test := range tests {
- file, err := fs.Destination.Open(test.doc)
- if err != nil {
- t.Fatalf("Did not find %s in target.", test.doc)
- }
-
- content := helpers.ReaderToString(file)
-
- if content != test.expected {
- t.Errorf("%s content expected:\n%q\ngot:\n%q", test.doc, test.expected, content)
- }
- }
-
-}
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
index bf488b9be..4211e91f6 100644
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -15,10 +15,13 @@ package hugolib
import (
"errors"
+ "io"
+ "path/filepath"
+ "sort"
"strings"
"sync"
- "path/filepath"
+ "github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
@@ -32,20 +35,40 @@ import (
type HugoSites struct {
Sites []*Site
- runMode runmode
-
multilingual *Multilingual
// Multihost is set if multilingual and baseURL set on the language level.
multihost bool
+ // If this is running in the dev server.
+ running bool
+
*deps.Deps
+
+ // Keeps track of bundle directories and symlinks to enable partial rebuilding.
+ ContentChanges *contentChangeMap
}
func (h *HugoSites) IsMultihost() bool {
return h != nil && h.multihost
}
+func (h *HugoSites) PrintProcessingStats(w io.Writer) {
+ stats := make([]*helpers.ProcessingStats, len(h.Sites))
+ for i := 0; i < len(h.Sites); i++ {
+ stats[i] = h.Sites[i].PathSpec.ProcessingStats
+ }
+ helpers.ProcessingStatsTable(w, stats...)
+}
+
+func (h *HugoSites) langSite() map[string]*Site {
+ m := make(map[string]*Site)
+ for _, s := range h.Sites {
+ m[s.Language.Lang] = s
+ }
+ return m
+}
+
// GetContentPage finds a Page with content given the absolute filename.
// Returns nil if none found.
func (h *HugoSites) GetContentPage(filename string) *Page {
@@ -58,12 +81,29 @@ func (h *HugoSites) GetContentPage(filename string) *Page {
rel := strings.TrimPrefix(filename, contendDir)
rel = strings.TrimPrefix(rel, helpers.FilePathSeparator)
- pos := s.rawAllPages.findPagePosByFilePath(rel)
+ for _, s := range h.Sites {
- if pos == -1 {
- return nil
+ pos := s.rawAllPages.findPagePosByFilePath(rel)
+
+ if pos == -1 {
+ continue
+ }
+ return s.rawAllPages[pos]
+ }
+
+ // If not found already, this may be bundled in another content file.
+ rel = filepath.Dir(rel)
+ for _, s := range h.Sites {
+
+ pos := s.rawAllPages.findFirstPagePosByFilePathPrefix(rel)
+
+ if pos == -1 {
+ continue
+ }
+ return s.rawAllPages[pos]
}
- return s.rawAllPages[pos]
+
+ return nil
}
@@ -81,10 +121,20 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
return nil, err
}
+ var contentChangeTracker *contentChangeMap
+
+ // Only needed in server mode.
+ // TODO(bep) clean up the running vs watching terms
+ if cfg.Running {
+ contentChangeTracker = &contentChangeMap{symContent: make(map[string]map[string]bool)}
+ }
+
h := &HugoSites{
- multilingual: langConfig,
- multihost: cfg.Cfg.GetBool("multihost"),
- Sites: sites}
+ running: cfg.Running,
+ multilingual: langConfig,
+ multihost: cfg.Cfg.GetBool("multihost"),
+ ContentChanges: contentChangeTracker,
+ Sites: sites}
for _, s := range sites {
s.owner = h
@@ -143,6 +193,10 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
d.OutputFormatsConfig = s.outputFormatsConfig
s.Deps = d
}
+ s.resourceSpec, err = resource.NewSpec(s.Deps.PathSpec, s.mediaTypesConfig)
+ if err != nil {
+ return err
+ }
}
@@ -258,10 +312,6 @@ func (h *HugoSites) toSiteInfos() []*SiteInfo {
// BuildCfg holds build options used to, as an example, skip the render step.
type BuildCfg struct {
- // Whether we are in watch (server) mode
- Watching bool
- // Print build stats at the end of a build
- PrintStats bool
// Reset site state before build. Use to force full rebuilds.
ResetState bool
// Re-creates the sites from configuration before a build.
@@ -304,11 +354,12 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
- return s.renderAndWriteXML("sitemapindex",
+ return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
sitemapDefault.Filename, h.toSiteInfos(), s.appendThemeTemplates(smLayouts)...)
}
func (h *HugoSites) assignMissingTranslations() error {
+
// This looks heavy, but it should be a small number of nodes by now.
allPages := h.findAllPagesByKindNotIn(KindPage)
for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
@@ -427,73 +478,57 @@ func (h *HugoSites) createMissingPages() error {
return nil
}
-func (s *Site) assignSiteByLanguage(p *Page) {
-
- pageLang := p.Lang()
-
- if pageLang == "" {
- panic("Page language missing: " + p.Title)
+func (h *HugoSites) removePageByPathPrefix(path string) {
+ for _, s := range h.Sites {
+ s.removePageByPathPrefix(path)
}
+}
- for _, site := range s.owner.Sites {
- if strings.HasPrefix(site.Language.Lang, pageLang) {
- p.s = site
- p.Site = &site.Info
- return
- }
+func (h *HugoSites) removePageByPath(path string) {
+ for _, s := range h.Sites {
+ s.removePageByPath(path)
}
-
}
func (h *HugoSites) setupTranslations() {
-
- master := h.Sites[0]
-
- for _, p := range master.rawAllPages {
- if p.Lang() == "" {
- panic("Page language missing: " + p.Title)
- }
-
- if p.Kind == kindUnknown {
- p.Kind = p.s.kindFromSections(p.sections)
- }
-
- if !p.s.isEnabled(p.Kind) {
- continue
- }
-
- shouldBuild := p.shouldBuild()
-
- for i, site := range h.Sites {
- // The site is assigned by language when read.
- if site == p.s {
- site.updateBuildStats(p)
- if shouldBuild {
- site.Pages = append(site.Pages, p)
- }
+ for _, s := range h.Sites {
+ for _, p := range s.rawAllPages {
+ if p.Kind == kindUnknown {
+ p.Kind = p.s.kindFromSections(p.sections)
}
- if !shouldBuild {
+ if !p.s.isEnabled(p.Kind) {
continue
}
- if i == 0 {
- site.AllPages = append(site.AllPages, p)
+ shouldBuild := p.shouldBuild()
+ s.updateBuildStats(p)
+ if shouldBuild {
+ s.Pages = append(s.Pages, p)
}
}
+ }
+
+ allPages := make(Pages, 0)
+
+ for _, s := range h.Sites {
+ allPages = append(allPages, s.Pages...)
+ }
+ allPages.Sort()
+
+ for _, s := range h.Sites {
+ s.AllPages = allPages
}
// Pull over the collections from the master site
for i := 1; i < len(h.Sites); i++ {
- h.Sites[i].AllPages = h.Sites[0].AllPages
h.Sites[i].Data = h.Sites[0].Data
}
if len(h.Sites) > 1 {
- pages := h.Sites[0].AllPages
- allTranslations := pagesToTranslationsMap(pages)
- assignTranslationsToPages(allTranslations, pages)
+ allTranslations := pagesToTranslationsMap(allPages)
+ assignTranslationsToPages(allTranslations, allPages)
}
}
@@ -501,6 +536,7 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) {
pageChan := make(chan *Page)
wg := &sync.WaitGroup{}
+
numWorkers := getGoMaxProcs() * 4
for i := 0; i < numWorkers; i++ {
@@ -508,77 +544,10 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) {
go func(pages <-chan *Page, wg *sync.WaitGroup) {
defer wg.Done()
for p := range pages {
- if !p.shouldRenderTo(s.rc.Format) {
- // No need to prepare
- continue
- }
- var shortcodeUpdate bool
- if p.shortcodeState != nil {
- shortcodeUpdate = p.shortcodeState.updateDelta()
- }
-
- if !shortcodeUpdate && !cfg.whatChanged.other && p.rendered {
- // No need to process it again.
- continue
- }
-
- // If we got this far it means that this is either a new Page pointer
- // or a template or similar has changed so wee need to do a rerendering
- // of the shortcodes etc.
-
- // Mark it as rendered
- p.rendered = true
-
- // If in watch mode or if we have multiple output formats,
- // we need to keep the original so we can
- // potentially repeat this process on rebuild.
- needsACopy := cfg.Watching || len(p.outputFormats) > 1
- var workContentCopy []byte
- if needsACopy {
- workContentCopy = make([]byte, len(p.workContent))
- copy(workContentCopy, p.workContent)
- } else {
- // Just reuse the same slice.
- workContentCopy = p.workContent
- }
-
- if p.Markup == "markdown" {
- tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
- p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
- workContentCopy = tmpContent
- }
-
- var err error
- if workContentCopy, err = handleShortcodes(p, workContentCopy); err != nil {
- s.Log.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
- }
-
- if p.Markup != "html" {
+ if err := p.prepareForRender(cfg); err != nil {
+ s.Log.ERROR.Printf("Failed to prepare page %q for render: %s", p.BaseFileName(), err)
- // Now we know enough to create a summary of the page and count some words
- summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
-
- if err != nil {
- s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
- } else if summaryContent != nil {
- workContentCopy = summaryContent.content
- }
-
- p.Content = helpers.BytesToHTML(workContentCopy)
-
- if summaryContent == nil {
- if err := p.setAutoSummary(); err != nil {
- s.Log.ERROR.Printf("Failed to set user auto summary for page %q: %s", p.pathOrTitle(), err)
- }
- }
-
- } else {
- p.Content = helpers.BytesToHTML(workContentCopy)
}
-
- //analyze for raw stats
- p.analyzePage()
-
}
}(pageChan, wg)
}
@@ -646,3 +615,113 @@ func (h *HugoSites) findAllPagesByKind(kind string) Pages {
func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
}
+
+func (h *HugoSites) findPagesByShortcode(shortcode string) Pages {
+ var pages Pages
+ for _, s := range h.Sites {
+ pages = append(pages, s.findPagesByShortcode(shortcode)...)
+ }
+ return pages
+}
+
+// Used in partial reloading to determine if the change is in a bundle.
+type contentChangeMap struct {
+ mu sync.RWMutex
+ branches []string
+ leafs []string
+
+ // Hugo supports symlinked content (both directories and files). This
+ // can lead to situations where the same file can be referenced from several
+ // locations in /content -- which is really cool, but also means we have to
+ // go an extra mile to handle changes.
+ // This map is only used in watch mode.
+ // It maps either file to files or the real dir to a set of content directories where it is in use.
+ symContent map[string]map[string]bool
+ symContentMu sync.Mutex
+}
+
+func (m *contentChangeMap) add(filename string, tp bundleDirType) {
+ m.mu.Lock()
+ dir := filepath.Dir(filename)
+ switch tp {
+ case bundleBranch:
+ m.branches = append(m.branches, dir)
+ case bundleLeaf:
+ m.leafs = append(m.leafs, dir)
+ default:
+ panic("invalid bundle type")
+ }
+ m.mu.Unlock()
+}
+
+// Track the addition of bundle dirs.
+func (m *contentChangeMap) handleBundles(b *bundleDirs) {
+ for _, bd := range b.bundles {
+ m.add(bd.fi.Filename(), bd.tp)
+ }
+}
+
+// resolveAndRemove resolves the given filename to the root folder of a bundle, if relevant.
+// It also removes the entry from the map. It will be re-added again by the partial
+// build if it still is a bundle.
+func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bundleDirType) {
+ m.mu.RLock()
+ defer m.mu.RUnlock()
+
+ dir, name := filepath.Split(filename)
+ dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
+ fileTp, isContent := classifyBundledFile(name)
+
+ // If the file itself is a bundle, no need to look further:
+ if fileTp > bundleNot {
+ return dir, dir, fileTp
+ }
+
+ // This may be a member of a bundle. Start with branch bundles, the most specific.
+ if !isContent {
+ for i, b := range m.branches {
+ if b == dir {
+ m.branches = append(m.branches[:i], m.branches[i+1:]...)
+ return dir, dir, bundleBranch
+ }
+ }
+ }
+
+ // And finally the leaf bundles, which can contain anything.
+ for i, l := range m.leafs {
+ if strings.HasPrefix(dir, l) {
+ m.leafs = append(m.leafs[:i], m.leafs[i+1:]...)
+ return dir, dir, bundleLeaf
+ }
+ }
+
+ // Not part of any bundle
+ return dir, filename, bundleNot
+}
+
+func (m *contentChangeMap) addSymbolicLinkMapping(from, to string) {
+ m.symContentMu.Lock()
+ mm, found := m.symContent[from]
+ if !found {
+ mm = make(map[string]bool)
+ m.symContent[from] = mm
+ }
+ mm[to] = true
+ m.symContentMu.Unlock()
+}
+
+func (m *contentChangeMap) GetSymbolicLinkMappings(dir string) []string {
+ mm, found := m.symContent[dir]
+ if !found {
+ return nil
+ }
+ dirs := make([]string, len(mm))
+ i := 0
+ for dir, _ := range mm {
+ dirs[i] = dir
+ i++
+ }
+
+ sort.Strings(dirs)
+ return dirs
+}
diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go
index c0749e388..b2b394eb5 100644
--- a/hugolib/hugo_sites_build.go
+++ b/hugolib/hugo_sites_build.go
@@ -15,7 +15,6 @@ package hugolib
import (
"bytes"
- "time"
"errors"
@@ -30,7 +29,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
h.Metrics.Reset()
}
- t0 := time.Now()
+ //t0 := time.Now()
// Need a pointer as this may be modified.
conf := &config
@@ -63,10 +62,6 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
return err
}
- if config.PrintStats {
- h.Log.FEEDBACK.Printf("total in %v ms\n", int(1000*time.Since(t0).Seconds()))
- }
-
if h.Metrics != nil {
var b bytes.Buffer
h.Metrics.WriteMetrics(&b)
@@ -101,8 +96,6 @@ func (h *HugoSites) init(config *BuildCfg) error {
}
}
- h.runMode.Watching = config.Watching
-
return nil
}
@@ -115,12 +108,10 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
return errors.New("Rebuild does not support 'ResetState'.")
}
- if !config.Watching {
+ if !h.running {
return errors.New("Rebuild called when not in watch mode")
}
- h.runMode.Watching = config.Watching
-
if config.whatChanged.source {
// This is for the non-renderable content pages (rarely used, I guess).
// We could maybe detect if this is really needed, but it should be
@@ -147,7 +138,7 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
if len(events) > 0 {
// This is a rebuild
- changed, err := firstSite.reProcess(events)
+ changed, err := firstSite.processPartial(events)
config.whatChanged = &changed
return err
}
@@ -188,25 +179,19 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
}
for _, s := range h.Sites {
- s.siteStats = &siteStats{}
for _, p := range s.Pages {
// May have been set in front matter
if len(p.outputFormats) == 0 {
p.outputFormats = s.outputFormats[p.Kind]
}
-
- cnt := len(p.outputFormats)
- if p.Kind == KindPage {
- s.siteStats.pageCountRegular += cnt
+ for _, r := range p.Resources.ByType(pageResourceType) {
+ r.(*Page).outputFormats = p.outputFormats
}
- s.siteStats.pageCount += cnt
- if err := p.initTargetPathDescriptor(); err != nil {
- return err
- }
- if err := p.initURLs(); err != nil {
+ if err := p.initPaths(); err != nil {
return err
}
+
}
s.assembleMenus()
s.refreshPageCaches()
@@ -222,7 +207,6 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
}
func (h *HugoSites) render(config *BuildCfg) error {
-
for _, s := range h.Sites {
s.initRenderFormats()
for i, rf := range s.renderFormats {
@@ -235,10 +219,6 @@ func (h *HugoSites) render(config *BuildCfg) error {
}
}
}
-
- if !config.SkipRender && config.PrintStats {
- s.Stats()
- }
}
if !config.SkipRender {
diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go
index 6b3522d6f..3d66f7fe6 100644
--- a/hugolib/hugo_sites_build_test.go
+++ b/hugolib/hugo_sites_build_test.go
@@ -16,7 +16,6 @@ import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/source"
"github.com/spf13/afero"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
@@ -26,6 +25,7 @@ type testSiteConfig struct {
DefaultContentLanguage string
DefaultContentLanguageInSubdir bool
Fs afero.Fs
+ Running bool
}
func TestMultiSitesMainLangInRoot(t *testing.T) {
@@ -226,7 +226,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
gp1 := sites.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
require.NotNil(t, gp1)
require.Equal(t, "doc1", gp1.Title)
- gp2 := sites.GetContentPage(filepath.FromSlash("content/sect/notfound.md"))
+ gp2 := sites.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
require.Nil(t, gp2)
enSite := sites.Sites[0]
@@ -238,7 +238,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
if len(enSite.RegularPages) != 4 {
t.Fatal("Expected 4 english pages")
}
- require.Len(t, enSite.Source.Files(), 14, "should have 13 source files")
require.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)")
doc1en := enSite.RegularPages[0]
@@ -401,12 +400,11 @@ func TestMultiSitesRebuild(t *testing.T) {
if !isCI() {
defer leaktest.CheckTimeout(t, 30*time.Second)()
}
- siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
+ siteConfig := testSiteConfig{Running: true, Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
fs := sites.Fs
- cfg := BuildCfg{Watching: true}
th := testHelper{sites.Cfg, fs, t}
-
+ cfg := BuildCfg{}
err := sites.Build(cfg)
if err != nil {
@@ -446,8 +444,10 @@ func TestMultiSitesRebuild(t *testing.T) {
// * Change a template
// * Change language file
{
- nil,
- []fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}},
+ func(t *testing.T) {
+ fs.Source.Remove("content/sect/doc2.en.md")
+ },
+ []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 3, "1 en removed")
@@ -467,9 +467,9 @@ func TestMultiSitesRebuild(t *testing.T) {
writeNewContentFile(t, fs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10)
},
[]fsnotify.Event{
- {Name: "content/new1.en.md", Op: fsnotify.Create},
- {Name: "content/new2.en.md", Op: fsnotify.Create},
- {Name: "content/new1.fr.md", Op: fsnotify.Create},
+ {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create},
+ {Name: filepath.FromSlash("content/new2.en.md"), Op: fsnotify.Create},
+ {Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create},
},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 5)
@@ -490,7 +490,7 @@ func TestMultiSitesRebuild(t *testing.T) {
doc1 += "CHANGED"
writeSource(t, fs, p, doc1)
},
- []fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}},
+ []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 5)
doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
@@ -506,8 +506,8 @@ func TestMultiSitesRebuild(t *testing.T) {
}
},
[]fsnotify.Event{
- {Name: "content/new1renamed.en.md", Op: fsnotify.Rename},
- {Name: "content/new1.en.md", Op: fsnotify.Rename},
+ {Name: filepath.FromSlash("content/new1renamed.en.md"), Op: fsnotify.Rename},
+ {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename},
},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 5, "Rename")
@@ -523,7 +523,7 @@ func TestMultiSitesRebuild(t *testing.T) {
templateContent += "{{ print \"Template Changed\"}}"
writeSource(t, fs, template, templateContent)
},
- []fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
+ []fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30)
@@ -540,7 +540,7 @@ func TestMultiSitesRebuild(t *testing.T) {
langContent = strings.Replace(langContent, "Bonjour", "Salut", 1)
writeSource(t, fs, languageFile, langContent)
},
- []fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
+ []fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 5)
require.Len(t, enSite.AllPages, 30)
@@ -563,7 +563,7 @@ func TestMultiSitesRebuild(t *testing.T) {
writeSource(t, fs, "layouts/shortcodes/shortcode.html", "Modified Shortcode: {{ i18n \"hello\" }}")
},
[]fsnotify.Event{
- {Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write},
+ {Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write},
},
func(t *testing.T) {
require.Len(t, enSite.RegularPages, 5)
@@ -1097,16 +1097,16 @@ hello:
}
// Sources
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("root.en.md"), Content: []byte(`---
+ sources := [][2]string{
+ {filepath.FromSlash("root.en.md"), `---
title: root
weight: 10000
slug: root
publishdate: "2000-01-01"
---
# root
-`)},
- {Name: filepath.FromSlash("sect/doc1.en.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("sect/doc1.en.md"), `---
title: doc1
weight: 1
slug: doc1-slug
@@ -1122,8 +1122,8 @@ publishdate: "2000-01-01"
{{< lingo >}}
NOTE: slug should be used as URL
-`)},
- {Name: filepath.FromSlash("sect/doc1.fr.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("sect/doc1.fr.md"), `---
title: doc1
weight: 1
plaques:
@@ -1140,8 +1140,8 @@ publishdate: "2000-01-04"
NOTE: should be in the 'en' Page's 'Translations' field.
NOTE: date is after "doc3"
-`)},
- {Name: filepath.FromSlash("sect/doc2.en.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("sect/doc2.en.md"), `---
title: doc2
weight: 2
publishdate: "2000-01-02"
@@ -1149,8 +1149,8 @@ publishdate: "2000-01-02"
# doc2
*some content*
NOTE: without slug, "doc2" should be used, without ".en" as URL
-`)},
- {Name: filepath.FromSlash("sect/doc3.en.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("sect/doc3.en.md"), `---
title: doc3
weight: 3
publishdate: "2000-01-03"
@@ -1163,8 +1163,8 @@ url: /superbob
# doc3
*some content*
NOTE: third 'en' doc, should trigger pagination on home page.
-`)},
- {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("sect/doc4.md"), `---
title: doc4
weight: 4
plaques:
@@ -1175,8 +1175,8 @@ publishdate: "2000-01-05"
*du contenu francophone*
NOTE: should use the defaultContentLanguage and mark this doc as 'fr'.
NOTE: doesn't have any corresponding translation in 'en'
-`)},
- {Name: filepath.FromSlash("other/doc5.fr.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("other/doc5.fr.md"), `---
title: doc5
weight: 5
publishdate: "2000-01-06"
@@ -1184,45 +1184,45 @@ publishdate: "2000-01-06"
# doc5
*autre contenu francophone*
NOTE: should use the "permalinks" configuration with :filename
-`)},
+`},
// Add some for the stats
- {Name: filepath.FromSlash("stats/expired.fr.md"), Content: []byte(`---
+ {filepath.FromSlash("stats/expired.fr.md"), `---
title: expired
publishdate: "2000-01-06"
expiryDate: "2001-01-06"
---
# Expired
-`)},
- {Name: filepath.FromSlash("stats/future.fr.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("stats/future.fr.md"), `---
title: future
weight: 6
publishdate: "2100-01-06"
---
# Future
-`)},
- {Name: filepath.FromSlash("stats/expired.en.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("stats/expired.en.md"), `---
title: expired
weight: 7
publishdate: "2000-01-06"
expiryDate: "2001-01-06"
---
# Expired
-`)},
- {Name: filepath.FromSlash("stats/future.en.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("stats/future.en.md"), `---
title: future
weight: 6
publishdate: "2100-01-06"
---
# Future
-`)},
- {Name: filepath.FromSlash("stats/draft.en.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("stats/draft.en.md"), `---
title: expired
publishdate: "2000-01-06"
draft: true
---
# Draft
-`)},
- {Name: filepath.FromSlash("stats/tax.nn.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("stats/tax.nn.md"), `---
title: Tax NN
weight: 8
publishdate: "2000-01-06"
@@ -1231,8 +1231,8 @@ lag:
- Sogndal
---
# Tax NN
-`)},
- {Name: filepath.FromSlash("stats/tax.nb.md"), Content: []byte(`---
+`},
+ {filepath.FromSlash("stats/tax.nb.md"), `---
title: Tax NB
weight: 8
publishdate: "2000-01-06"
@@ -1241,7 +1241,7 @@ lag:
- Sogndal
---
# Tax NB
-`)},
+`},
}
configFile := "multilangconfig." + configSuffix
@@ -1252,10 +1252,8 @@ lag:
fs := hugofs.NewFrom(mf, cfg)
- // Hugo support using ByteSource's directly (for testing),
- // but to make it more real, we write them to the mem file system.
for _, s := range sources {
- if err := afero.WriteFile(mf, filepath.Join("content", s.Name), s.Content, 0755); err != nil {
+ if err := afero.WriteFile(mf, filepath.Join("content", s[0]), []byte(s[1]), 0755); err != nil {
t.Fatalf("Failed to write file: %s", err)
}
}
@@ -1263,7 +1261,7 @@ lag:
// Add some data
writeSource(t, fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"")
- sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg}) //, Logger: newDebugLogger()})
+ sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg, Running: siteConfig.Running}) //, Logger: newDebugLogger()})
if err != nil {
t.Fatalf("Failed to create sites: %s", err)
@@ -1311,7 +1309,7 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
b, err := afero.ReadFile(fs, filename)
if err != nil {
// Print some debug info
- root := strings.Split(filename, helpers.FilePathSeparator)[0]
+ root := "/" //strings.Split(filename, helpers.FilePathSeparator)[0]
afero.Walk(fs, root, func(path string, info os.FileInfo, err error) error {
if info != nil && !info.IsDir() {
fmt.Println(" ", path)
diff --git a/hugolib/hugo_sites_multihost_test.go b/hugolib/hugo_sites_multihost_test.go
index af96c766d..0729f7d9d 100644
--- a/hugolib/hugo_sites_multihost_test.go
+++ b/hugolib/hugo_sites_multihost_test.go
@@ -47,13 +47,12 @@ languageName = "Nynorsk"
`
- siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false}
+ siteConfig := testSiteConfig{Running: true, Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false}
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
fs := sites.Fs
- cfg := BuildCfg{Watching: true}
th := testHelper{sites.Cfg, fs, t}
assert := require.New(t)
-
+ cfg := BuildCfg{}
err := sites.Build(cfg)
assert.NoError(err)
diff --git a/hugolib/hugolib.debug b/hugolib/hugolib.debug
new file mode 100644
index 000000000..fa1575f58
--- /dev/null
+++ b/hugolib/hugolib.debug
Binary files differ
diff --git a/hugolib/menu_old_test.go b/hugolib/menu_old_test.go
deleted file mode 100644
index 65db31588..000000000
--- a/hugolib/menu_old_test.go
+++ /dev/null
@@ -1,654 +0,0 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-// TODO(bep) remove this file when the reworked tests in menu_test.go is done.
-// NOTE: Do not add more tests to this file!
-
-import (
- "fmt"
- "strings"
- "testing"
-
- "github.com/gohugoio/hugo/deps"
-
- "path/filepath"
-
- "github.com/BurntSushi/toml"
- "github.com/gohugoio/hugo/source"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-const (
- confMenu1 = `
-[[menu.main]]
- name = "Go Home"
- url = "/"
- weight = 1
- pre = "<div>"
- post = "</div>"
-[[menu.main]]
- name = "Blog"
- url = "/posts"
-[[menu.main]]
- name = "ext"
- url = "http://gohugo.io"
- identifier = "ext"
-[[menu.main]]
- name = "ext2"
- url = "http://foo.local/Zoo/foo"
- identifier = "ext2"
-[[menu.grandparent]]
- name = "grandparent"
- url = "/grandparent"
- identifier = "grandparentId"
-[[menu.grandparent]]
- name = "parent"
- url = "/parent"
- identifier = "parentId"
- parent = "grandparentId"
-[[menu.grandparent]]
- name = "Go Home3"
- url = "/"
- identifier = "grandchildId"
- parent = "parentId"
-[[menu.tax]]
- name = "Tax1"
- url = "/two/key/"
- identifier="1"
-[[menu.tax]]
- name = "Tax2"
- url = "/two/key/"
- identifier="2"
-[[menu.tax]]
- name = "Tax RSS"
- url = "/two/key.xml"
- identifier="xml"
-[[menu.hash]]
- name = "Tax With #"
- url = "/resource#anchor"
- identifier="hash"
-[[menu.unicode]]
- name = "Unicode Russian"
- identifier = "unicode-russian"
- url = "/новости-проекта" # Russian => "news-project"
-[[menu.with_title]]
- name="entry with title"
- title="a menuentry title"
- url="/title"
- identifier="titled"`
-)
-
-var menuPage1 = []byte(`+++
-title = "One"
-weight = 1
-[menu]
- [menu.p_one]
-+++
-Front Matter with Menu Pages`)
-
-var menuPage2 = []byte(`+++
-title = "Two"
-weight = 2
-[menu]
- [menu.p_one]
- [menu.p_two]
- identifier = "Two"
-
-+++
-Front Matter with Menu Pages`)
-
-var menuPage3 = []byte(`+++
-title = "Three"
-weight = 3
-[menu]
- [menu.p_two]
- Name = "Three"
- Parent = "Two"
-+++
-Front Matter with Menu Pages`)
-
-var menuPage4 = []byte(`+++
-title = "Four"
-weight = 4
-[menu]
- [menu.p_two]
- Name = "Four"
- Parent = "Three"
-+++
-Front Matter with Menu Pages`)
-
-var menuPageSources = []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.md"), Content: menuPage1},
- {Name: filepath.FromSlash("sect/doc2.md"), Content: menuPage2},
- {Name: filepath.FromSlash("sect/doc3.md"), Content: menuPage3},
-}
-
-var menuPageSectionsSources = []source.ByteSource{
- {Name: filepath.FromSlash("first/doc1.md"), Content: menuPage1},
- {Name: filepath.FromSlash("first/doc2.md"), Content: menuPage2},
- {Name: filepath.FromSlash("second-section/doc3.md"), Content: menuPage3},
- {Name: filepath.FromSlash("Fish and Chips/doc4.md"), Content: menuPage4},
-}
-
-func tstCreateMenuPageWithNameTOML(title, menu, name string) []byte {
- return []byte(fmt.Sprintf(`+++
-title = "%s"
-weight = 1
-[menu]
- [menu.%s]
- name = "%s"
-+++
-Front Matter with Menu with Name`, title, menu, name))
-}
-
-func tstCreateMenuPageWithIdentifierTOML(title, menu, identifier string) []byte {
- return []byte(fmt.Sprintf(`+++
-title = "%s"
-weight = 1
-[menu]
- [menu.%s]
- identifier = "%s"
- name = "somename"
-+++
-Front Matter with Menu with Identifier`, title, menu, identifier))
-}
-
-func tstCreateMenuPageWithNameYAML(title, menu, name string) []byte {
- return []byte(fmt.Sprintf(`---
-title: "%s"
-weight: 1
-menu:
- %s:
- name: "%s"
----
-Front Matter with Menu with Name`, title, menu, name))
-}
-
-func tstCreateMenuPageWithIdentifierYAML(title, menu, identifier string) []byte {
- return []byte(fmt.Sprintf(`---
-title: "%s"
-weight: 1
-menu:
- %s:
- identifier: "%s"
- name: "somename"
----
-Front Matter with Menu with Identifier`, title, menu, identifier))
-}
-
-// Issue 817 - identifier should trump everything
-func TestPageMenuWithIdentifier(t *testing.T) {
- t.Parallel()
- toml := []source.ByteSource{
- {Name: "sect/doc1.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i1")},
- {Name: "sect/doc2.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")},
- {Name: "sect/doc3.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")}, // duplicate
- }
-
- yaml := []source.ByteSource{
- {Name: "sect/doc1.md", Content: tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i1")},
- {Name: "sect/doc2.md", Content: tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")},
- {Name: "sect/doc3.md", Content: tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")}, // duplicate
- }
-
- doTestPageMenuWithIdentifier(t, toml)
- doTestPageMenuWithIdentifier(t, yaml)
-
-}
-
-func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSource) {
-
- s := setupMenuTests(t, menuPageSources)
-
- assert.Equal(t, 3, len(s.RegularPages), "Not enough pages")
-
- me1 := findTestMenuEntryByID(s, "m1", "i1")
- me2 := findTestMenuEntryByID(s, "m1", "i2")
-
- require.NotNil(t, me1)
- require.NotNil(t, me2)
-
- assert.True(t, strings.Contains(me1.URL, "doc1"), me1.URL)
- assert.True(t, strings.Contains(me2.URL, "doc2") || strings.Contains(me2.URL, "doc3"), me2.URL)
-
-}
-
-// Issue 817 contd - name should be second identifier in
-func TestPageMenuWithDuplicateName(t *testing.T) {
- t.Parallel()
- toml := []source.ByteSource{
- {Name: "sect/doc1.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n1")},
- {Name: "sect/doc2.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n2")},
- {Name: "sect/doc3.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n2")}, // duplicate
- }
-
- yaml := []source.ByteSource{
- {Name: "sect/doc1.md", Content: tstCreateMenuPageWithNameYAML("t1", "m1", "n1")},
- {Name: "sect/doc2.md", Content: tstCreateMenuPageWithNameYAML("t1", "m1", "n2")},
- {Name: "sect/doc3.md", Content: tstCreateMenuPageWithNameYAML("t1", "m1", "n2")}, // duplicate
- }
-
- doTestPageMenuWithDuplicateName(t, toml)
- doTestPageMenuWithDuplicateName(t, yaml)
-
-}
-
-func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.ByteSource) {
-
- s := setupMenuTests(t, menuPageSources)
-
- assert.Equal(t, 3, len(s.RegularPages), "Not enough pages")
-
- me1 := findTestMenuEntryByName(s, "m1", "n1")
- me2 := findTestMenuEntryByName(s, "m1", "n2")
-
- require.NotNil(t, me1)
- require.NotNil(t, me2)
-
- assert.True(t, strings.Contains(me1.URL, "doc1"), me1.URL)
- assert.True(t, strings.Contains(me2.URL, "doc2") || strings.Contains(me2.URL, "doc3"), me2.URL)
-
-}
-
-func TestPageMenu(t *testing.T) {
- t.Parallel()
- s := setupMenuTests(t, menuPageSources)
-
- if len(s.RegularPages) != 3 {
- t.Fatalf("Posts not created, expected 3 got %d", len(s.RegularPages))
- }
-
- first := s.RegularPages[0]
- second := s.RegularPages[1]
- third := s.RegularPages[2]
-
- pOne := findTestMenuEntryByName(s, "p_one", "One")
- pTwo := findTestMenuEntryByID(s, "p_two", "Two")
-
- for i, this := range []struct {
- menu string
- page *Page
- menuItem *MenuEntry
- isMenuCurrent bool
- hasMenuCurrent bool
- }{
- {"p_one", first, pOne, true, false},
- {"p_one", first, pTwo, false, false},
- {"p_one", second, pTwo, false, false},
- {"p_two", second, pTwo, true, false},
- {"p_two", third, pTwo, false, true},
- {"p_one", third, pTwo, false, false},
- } {
-
- if i != 4 {
- continue
- }
-
- isMenuCurrent := this.page.IsMenuCurrent(this.menu, this.menuItem)
- hasMenuCurrent := this.page.HasMenuCurrent(this.menu, this.menuItem)
-
- if isMenuCurrent != this.isMenuCurrent {
- t.Errorf("[%d] Wrong result from IsMenuCurrent: %v", i, isMenuCurrent)
- }
-
- if hasMenuCurrent != this.hasMenuCurrent {
- t.Errorf("[%d] Wrong result for menuItem %v for HasMenuCurrent: %v", i, this.menuItem, hasMenuCurrent)
- }
-
- }
-
-}
-
-func TestMenuURL(t *testing.T) {
- t.Parallel()
- s := setupMenuTests(t, menuPageSources)
-
- for i, this := range []struct {
- me *MenuEntry
- expectedURL string
- }{
- // issue #888
- {findTestMenuEntryByID(s, "hash", "hash"), "/Zoo/resource#anchor"},
- // issue #1774
- {findTestMenuEntryByID(s, "main", "ext"), "http://gohugo.io"},
- {findTestMenuEntryByID(s, "main", "ext2"), "http://foo.local/Zoo/foo"},
- } {
-
- if this.me == nil {
- t.Errorf("[%d] MenuEntry not found", i)
- continue
- }
-
- if this.me.URL != this.expectedURL {
- t.Errorf("[%d] Got URL %s expected %s", i, this.me.URL, this.expectedURL)
- }
-
- }
-
-}
-
-// Issue #1934
-func TestYAMLMenuWithMultipleEntries(t *testing.T) {
- t.Parallel()
- ps1 := []byte(`---
-title: "Yaml 1"
-weight: 5
-menu: ["p_one", "p_two"]
----
-Yaml Front Matter with Menu Pages`)
-
- ps2 := []byte(`---
-title: "Yaml 2"
-weight: 5
-menu:
- p_three:
- p_four:
----
-Yaml Front Matter with Menu Pages`)
-
- s := setupMenuTests(t, []source.ByteSource{
- {Name: filepath.FromSlash("sect/yaml1.md"), Content: ps1},
- {Name: filepath.FromSlash("sect/yaml2.md"), Content: ps2}})
-
- p1 := s.RegularPages[0]
- assert.Len(t, p1.Menus(), 2, "List YAML")
- p2 := s.RegularPages[1]
- assert.Len(t, p2.Menus(), 2, "Map YAML")
-
-}
-
-// issue #719
-func TestMenuWithUnicodeURLs(t *testing.T) {
- t.Parallel()
- for _, canonifyURLs := range []bool{true, false} {
- doTestMenuWithUnicodeURLs(t, canonifyURLs)
- }
-}
-
-func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs bool) {
-
- s := setupMenuTests(t, menuPageSources, "canonifyURLs", canonifyURLs)
-
- unicodeRussian := findTestMenuEntryByID(s, "unicode", "unicode-russian")
-
- expected := "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0"
-
- if !canonifyURLs {
- expected = "/Zoo" + expected
- }
-
- assert.Equal(t, expected, unicodeRussian.URL)
-}
-
-func TestMenuWithTitle(t *testing.T) {
- s := setupMenuTests(t, menuPageSources)
- titled := findTestMenuEntryByID(s, "with_title", "titled")
- expected := "a menuentry title"
- assert.Equal(t, expected, titled.Title())
-}
-
-// Issue #1114
-func TestSectionPagesMenu2(t *testing.T) {
- t.Parallel()
- doTestSectionPagesMenu(true, t)
- doTestSectionPagesMenu(false, t)
-}
-
-func doTestSectionPagesMenu(canonifyURLs bool, t *testing.T) {
-
- s := setupMenuTests(t, menuPageSectionsSources,
- "sectionPagesMenu", "spm",
- "canonifyURLs", canonifyURLs,
- )
-
- sects := s.getPage(KindHome).Sections()
-
- require.Equal(t, 3, len(sects))
-
- firstSectionPages := s.getPage(KindSection, "first").Pages
- require.Equal(t, 2, len(firstSectionPages))
- secondSectionPages := s.getPage(KindSection, "second-section").Pages
- require.Equal(t, 1, len(secondSectionPages))
- fishySectionPages := s.getPage(KindSection, "Fish and Chips").Pages
- require.Equal(t, 1, len(fishySectionPages))
-
- nodeFirst := s.getPage(KindSection, "first")
- require.NotNil(t, nodeFirst)
- nodeSecond := s.getPage(KindSection, "second-section")
- require.NotNil(t, nodeSecond)
- nodeFishy := s.getPage(KindSection, "Fish and Chips")
- require.Equal(t, "Fish and Chips", nodeFishy.sections[0])
-
- firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
- secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
- fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
-
- require.NotNil(t, firstSectionMenuEntry)
- require.NotNil(t, secondSectionMenuEntry)
- require.NotNil(t, nodeFirst)
- require.NotNil(t, nodeSecond)
- require.NotNil(t, fishySectionMenuEntry)
- require.NotNil(t, nodeFishy)
-
- require.True(t, nodeFirst.IsMenuCurrent("spm", firstSectionMenuEntry))
- require.False(t, nodeFirst.IsMenuCurrent("spm", secondSectionMenuEntry))
- require.False(t, nodeFirst.IsMenuCurrent("spm", fishySectionMenuEntry))
- require.True(t, nodeFishy.IsMenuCurrent("spm", fishySectionMenuEntry))
- require.Equal(t, "Fish and Chips", fishySectionMenuEntry.Name)
-
- for _, p := range firstSectionPages {
- require.True(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
- require.False(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
- }
-
- for _, p := range secondSectionPages {
- require.False(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
- require.True(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
- }
-
- for _, p := range fishySectionPages {
- require.False(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
- require.False(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
- require.True(t, p.HasMenuCurrent("spm", fishySectionMenuEntry))
- }
-}
-
-func TestMenuLimit(t *testing.T) {
- t.Parallel()
- s := setupMenuTests(t, menuPageSources)
- m := *s.Menus["main"]
-
- // main menu has 4 entries
- firstTwo := m.Limit(2)
- assert.Equal(t, 2, len(firstTwo))
- for i := 0; i < 2; i++ {
- assert.Equal(t, m[i], firstTwo[i])
- }
- assert.Equal(t, m, m.Limit(4))
- assert.Equal(t, m, m.Limit(5))
-}
-
-func TestMenuSortByN(t *testing.T) {
- t.Parallel()
- for i, this := range []struct {
- sortFunc func(p Menu) Menu
- assertFunc func(p Menu) bool
- }{
- {(Menu).Sort, func(p Menu) bool { return p[0].Weight == 1 && p[1].Name == "nx" && p[2].Identifier == "ib" }},
- {(Menu).ByWeight, func(p Menu) bool { return p[0].Weight == 1 && p[1].Name == "nx" && p[2].Identifier == "ib" }},
- {(Menu).ByName, func(p Menu) bool { return p[0].Name == "na" }},
- {(Menu).Reverse, func(p Menu) bool { return p[0].Identifier == "ib" && p[len(p)-1].Identifier == "ia" }},
- } {
- menu := Menu{&MenuEntry{Weight: 3, Name: "nb", Identifier: "ia"},
- &MenuEntry{Weight: 1, Name: "na", Identifier: "ic"},
- &MenuEntry{Weight: 1, Name: "nx", Identifier: "ic"},
- &MenuEntry{Weight: 2, Name: "nb", Identifier: "ix"},
- &MenuEntry{Weight: 2, Name: "nb", Identifier: "ib"}}
-
- sorted := this.sortFunc(menu)
-
- if !this.assertFunc(sorted) {
- t.Errorf("[%d] sort error", i)
- }
- }
-
-}
-
-func TestHomeNodeMenu(t *testing.T) {
- t.Parallel()
- s := setupMenuTests(t, menuPageSources,
- "canonifyURLs", true,
- "uglyURLs", false,
- )
-
- home := s.getPage(KindHome)
- homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
-
- for i, this := range []struct {
- menu string
- menuItem *MenuEntry
- isMenuCurrent bool
- hasMenuCurrent bool
- }{
- {"main", homeMenuEntry, true, false},
- {"doesnotexist", homeMenuEntry, false, false},
- {"main", &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
- {"grandparent", findTestMenuEntryByID(s, "grandparent", "grandparentId"), false, true},
- {"grandparent", findTestMenuEntryByID(s, "grandparent", "parentId"), false, true},
- {"grandparent", findTestMenuEntryByID(s, "grandparent", "grandchildId"), true, false},
- } {
-
- isMenuCurrent := home.IsMenuCurrent(this.menu, this.menuItem)
- hasMenuCurrent := home.HasMenuCurrent(this.menu, this.menuItem)
-
- if isMenuCurrent != this.isMenuCurrent {
- fmt.Println("isMenuCurrent", isMenuCurrent)
- fmt.Printf("this: %#v\n", this)
- t.Errorf("[%d] Wrong result from IsMenuCurrent: %v for %q", i, isMenuCurrent, this.menuItem)
- }
-
- if hasMenuCurrent != this.hasMenuCurrent {
- fmt.Println("hasMenuCurrent", hasMenuCurrent)
- fmt.Printf("this: %#v\n", this)
- t.Errorf("[%d] Wrong result for menu %q menuItem %v for HasMenuCurrent: %v", i, this.menu, this.menuItem, hasMenuCurrent)
- }
- }
-}
-
-func TestHopefullyUniqueID(t *testing.T) {
- t.Parallel()
- assert.Equal(t, "i", (&MenuEntry{Identifier: "i", URL: "u", Name: "n"}).hopefullyUniqueID())
- assert.Equal(t, "u", (&MenuEntry{Identifier: "", URL: "u", Name: "n"}).hopefullyUniqueID())
- assert.Equal(t, "n", (&MenuEntry{Identifier: "", URL: "", Name: "n"}).hopefullyUniqueID())
-}
-
-func TestAddMenuEntryChild(t *testing.T) {
- t.Parallel()
- root := &MenuEntry{Weight: 1}
- root.addChild(&MenuEntry{Weight: 2})
- root.addChild(&MenuEntry{Weight: 1})
- assert.Equal(t, 2, len(root.Children))
- assert.Equal(t, 1, root.Children[0].Weight)
-}
-
-var testMenuIdentityMatcher = func(me *MenuEntry, id string) bool { return me.Identifier == id }
-var testMenuNameMatcher = func(me *MenuEntry, id string) bool { return me.Name == id }
-
-func findTestMenuEntryByID(s *Site, mn string, id string) *MenuEntry {
- return findTestMenuEntry(s, mn, id, testMenuIdentityMatcher)
-}
-func findTestMenuEntryByName(s *Site, mn string, id string) *MenuEntry {
- return findTestMenuEntry(s, mn, id, testMenuNameMatcher)
-}
-
-func findTestMenuEntry(s *Site, mn string, id string, matcher func(me *MenuEntry, id string) bool) *MenuEntry {
- var found *MenuEntry
- if menu, ok := s.Menus[mn]; ok {
- for _, me := range *menu {
-
- if matcher(me, id) {
- if found != nil {
- panic(fmt.Sprintf("Duplicate menu entry in menu %s with id/name %s", mn, id))
- }
- found = me
- }
-
- descendant := findDescendantTestMenuEntry(me, id, matcher)
- if descendant != nil {
- if found != nil {
- panic(fmt.Sprintf("Duplicate menu entry in menu %s with id/name %s", mn, id))
- }
- found = descendant
- }
- }
- }
- return found
-}
-
-func findDescendantTestMenuEntry(parent *MenuEntry, id string, matcher func(me *MenuEntry, id string) bool) *MenuEntry {
- var found *MenuEntry
- if parent.HasChildren() {
- for _, child := range parent.Children {
-
- if matcher(child, id) {
- if found != nil {
- panic(fmt.Sprintf("Duplicate menu entry in menuitem %s with id/name %s", parent.KeyName(), id))
- }
- found = child
- }
-
- descendant := findDescendantTestMenuEntry(child, id, matcher)
- if descendant != nil {
- if found != nil {
- panic(fmt.Sprintf("Duplicate menu entry in menuitem %s with id/name %s", parent.KeyName(), id))
- }
- found = descendant
- }
- }
- }
- return found
-}
-
-func setupMenuTests(t *testing.T, pageSources []source.ByteSource, configKeyValues ...interface{}) *Site {
-
- var (
- cfg, fs = newTestCfg()
- )
-
- menus, err := tomlToMap(confMenu1)
- require.NoError(t, err)
-
- cfg.Set("menu", menus["menu"])
- cfg.Set("baseURL", "http://foo.local/Zoo/")
-
- for i := 0; i < len(configKeyValues); i += 2 {
- cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
- }
-
- for _, src := range pageSources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
-
- }
-
- return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
-
-}
-
-func tomlToMap(s string) (map[string]interface{}, error) {
- var data = make(map[string]interface{})
- _, err := toml.Decode(s, &data)
- return data, err
-}
diff --git a/hugolib/page.go b/hugolib/page.go
index dba1ee227..76151060c 100644
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -25,6 +25,8 @@ import (
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resource"
+
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser"
"github.com/mitchellh/mapstructure"
@@ -80,6 +82,8 @@ const (
kindSitemap = "sitemap"
kindRobotsTXT = "robotsTXT"
kind404 = "404"
+
+ pageResourceType = "page"
)
type Page struct {
@@ -101,6 +105,12 @@ type Page struct {
// This collection will be nil for regular pages.
Pages Pages
+ // Since Hugo 0.32, a Page can have resources such as images and CSS associated
+ // with itself. The resource will typically be placed relative to the Page,
+ // but templates should use the links (Permalink and RelPermalink)
+ // provided by the Resource object.
+ Resources resource.Resources
+
// translations will contain references to this page in other language
// if available.
translations Pages
@@ -155,9 +165,6 @@ type Page struct {
// workContent is a copy of rawContent that may be mutated during site build.
workContent []byte
- // state telling if this is a "new page" or if we have rendered it previously.
- rendered bool
-
// whether the content is in a CJK language.
isCJKLanguage bool
@@ -218,8 +225,9 @@ type Page struct {
Sitemap Sitemap
URLPath
- permalink string
- relPermalink string
+ permalink string
+ relPermalink string
+ relPermalinkBase string // relPermalink without extension
layoutDescriptor output.LayoutDescriptor
@@ -263,6 +271,10 @@ func (p *Page) PubDate() time.Time {
return p.Date
}
+func (*Page) ResourceType() string {
+ return pageResourceType
+}
+
func (p *Page) RSSLink() template.URL {
f, found := p.outputFormats.GetByName(output.RSSFormat.Name)
if !found {
@@ -726,22 +738,29 @@ func (p *Page) getRenderingConfig() *helpers.BlackFriday {
}
func (s *Site) newPage(filename string) *Page {
- sp := source.NewSourceSpec(s.Cfg, s.Fs)
- p := &Page{
+ fi := newFileInfo(
+ s.SourceSpec,
+ s.absContentDir(),
+ filename,
+ nil,
+ bundleNot,
+ )
+ return s.newPageFromFile(fi)
+}
+
+func (s *Site) newPageFromFile(fi *fileInfo) *Page {
+ return &Page{
pageInit: &pageInit{},
- Kind: kindFromFilename(filename),
+ Kind: kindFromFilename(fi.Path()),
contentType: "",
- Source: Source{File: *sp.NewFile(filename)},
+ Source: Source{File: fi},
Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
Params: make(map[string]interface{}),
translations: make(Pages, 0),
- sections: sectionsFromFilename(filename),
+ sections: sectionsFromDir(fi.Dir()),
Site: &s.Info,
s: s,
}
-
- s.Log.DEBUG.Println("Reading from", p.File.Path())
- return p
}
func (p *Page) IsRenderable() bool {
@@ -910,8 +929,8 @@ func (p *Page) LinkTitle() string {
}
func (p *Page) shouldBuild() bool {
- return shouldBuild(p.s.Cfg.GetBool("buildFuture"), p.s.Cfg.GetBool("buildExpired"),
- p.s.Cfg.GetBool("buildDrafts"), p.Draft, p.PublishDate, p.ExpiryDate)
+ return shouldBuild(p.s.BuildFuture, p.s.BuildExpired,
+ p.s.BuildDrafts, p.Draft, p.PublishDate, p.ExpiryDate)
}
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
@@ -967,20 +986,91 @@ func (p *Page) RelPermalink() string {
return p.relPermalink
}
-func (p *Page) initURLs() error {
- if len(p.outputFormats) == 0 {
- p.outputFormats = p.s.outputFormats[p.Kind]
+func (p *Page) subResourceLinkFactory(base string) string {
+ return path.Join(p.relPermalinkBase, base)
+}
+
+func (p *Page) prepareForRender(cfg *BuildCfg) error {
+ s := p.s
+
+ if !p.shouldRenderTo(s.rc.Format) {
+ // No need to prepare
+ return nil
+ }
+
+ var shortcodeUpdate bool
+ if p.shortcodeState != nil {
+ shortcodeUpdate = p.shortcodeState.updateDelta()
+ }
+
+ if !shortcodeUpdate && !cfg.whatChanged.other {
+ // No need to process it again.
+ return nil
+ }
+
+ // If we got this far it means that this is either a new Page pointer
+ // or a template or similar has changed so wee need to do a rerendering
+ // of the shortcodes etc.
+
+ // If in watch mode or if we have multiple output formats,
+ // we need to keep the original so we can
+ // potentially repeat this process on rebuild.
+ needsACopy := p.s.running() || len(p.outputFormats) > 1
+ var workContentCopy []byte
+ if needsACopy {
+ workContentCopy = make([]byte, len(p.workContent))
+ copy(workContentCopy, p.workContent)
+ } else {
+ // Just reuse the same slice.
+ workContentCopy = p.workContent
+ }
+
+ if p.Markup == "markdown" {
+ tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
+ p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
+ workContentCopy = tmpContent
}
- rel := p.createRelativePermalink()
var err error
- p.permalink, err = p.s.permalinkForOutputFormat(rel, p.outputFormats[0])
- if err != nil {
- return err
+ if workContentCopy, err = handleShortcodes(p, workContentCopy); err != nil {
+ s.Log.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
}
- rel = p.s.PathSpec.PrependBasePath(rel)
- p.relPermalink = rel
- p.layoutDescriptor = p.createLayoutDescriptor()
+
+ if p.Markup != "html" {
+
+ // Now we know enough to create a summary of the page and count some words
+ summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
+
+ if err != nil {
+ s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
+ } else if summaryContent != nil {
+ workContentCopy = summaryContent.content
+ }
+
+ p.Content = helpers.BytesToHTML(workContentCopy)
+
+ if summaryContent == nil {
+ if err := p.setAutoSummary(); err != nil {
+ s.Log.ERROR.Printf("Failed to set user auto summary for page %q: %s", p.pathOrTitle(), err)
+ }
+ }
+
+ } else {
+ p.Content = helpers.BytesToHTML(workContentCopy)
+ }
+
+ //analyze for raw stats
+ p.analyzePage()
+
+ // Handle bundled pages.
+ for _, r := range p.Resources.ByType(pageResourceType) {
+ p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages)
+ bp := r.(*Page)
+ if err := bp.prepareForRender(cfg); err != nil {
+ s.Log.ERROR.Printf("Failed to prepare bundled page %q for render: %s", bp.BaseFileName(), err)
+ }
+ }
+
return nil
}
@@ -1849,14 +1939,18 @@ func (p *Page) addLangPathPrefixIfFlagSet(outfile string, should bool) string {
return outfile
}
-func sectionsFromFilename(filename string) []string {
- var sections []string
- dir, _ := filepath.Split(filename)
- dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
- if dir == "" {
+func sectionsFromDir(dirname string) []string {
+ sections := strings.Split(dirname, helpers.FilePathSeparator)
+ if len(sections) == 1 {
+ if sections[0] == "" {
+ return nil
+ }
return sections
}
- sections = strings.Split(dir, helpers.FilePathSeparator)
+ if len(sections) > 1 && sections[0] == "" {
+ return sections[1:]
+ }
+
return sections
}
diff --git a/hugolib/page_bundler.go b/hugolib/page_bundler.go
new file mode 100644
index 000000000..2f6b4d094
--- /dev/null
+++ b/hugolib/page_bundler.go
@@ -0,0 +1,188 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "math"
+ "runtime"
+ "strings"
+
+ // Use this until errgroup gets ported to context
+ // See https://github.com/golang/go/issues/19781
+ "golang.org/x/net/context"
+ "golang.org/x/sync/errgroup"
+)
+
+type siteContentProcessor struct {
+ baseDir string
+
+ site *Site
+
+ handleContent contentHandler
+
+ // The input file bundles.
+ fileBundlesChan chan *bundleDir
+
+ // The input file singles.
+ fileSinglesChan chan *fileInfo
+
+ // These assets should be just copied to destination.
+ fileAssetsChan chan []string
+
+ numWorkers int
+
+ // The output Pages
+ pagesChan chan *Page
+
+ // Used for partial rebuilds (aka. live reload)
+ // Will signal replacement of pages in the site collection.
+ partialBuild bool
+}
+
+func newSiteContentProcessor(baseDir string, partialBuild bool, s *Site) *siteContentProcessor {
+ numWorkers := 12
+ if n := runtime.NumCPU() * 3; n > numWorkers {
+ numWorkers = n
+ }
+
+ numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.owner.Sites))))
+
+ return &siteContentProcessor{
+ partialBuild: partialBuild,
+ baseDir: baseDir,
+ site: s,
+ handleContent: newHandlerChain(s),
+ fileBundlesChan: make(chan *bundleDir, numWorkers),
+ fileSinglesChan: make(chan *fileInfo, numWorkers),
+ fileAssetsChan: make(chan []string, numWorkers),
+ numWorkers: numWorkers,
+ pagesChan: make(chan *Page, numWorkers),
+ }
+}
+
+func (s *siteContentProcessor) closeInput() {
+ close(s.fileSinglesChan)
+ close(s.fileBundlesChan)
+ close(s.fileAssetsChan)
+}
+
+func (s *siteContentProcessor) process(ctx context.Context) error {
+ g1, ctx := errgroup.WithContext(ctx)
+ g2, _ := errgroup.WithContext(ctx)
+
+ // There can be only one of these per site.
+ g1.Go(func() error {
+ for p := range s.pagesChan {
+ if p.s != s.site {
+ panic(fmt.Sprintf("invalid page site: %v vs %v", p.s, s))
+ }
+
+ if s.partialBuild {
+ s.site.replacePage(p)
+ } else {
+ s.site.addPage(p)
+ }
+ }
+ return nil
+ })
+
+ for i := 0; i < s.numWorkers; i++ {
+ g2.Go(func() error {
+ for {
+ select {
+ case f, ok := <-s.fileSinglesChan:
+ if !ok {
+ return nil
+ }
+ err := s.readAndConvertContentFile(f)
+ if err != nil {
+ return err
+ }
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+ }
+ })
+
+ g2.Go(func() error {
+ for {
+ select {
+ case filenames, ok := <-s.fileAssetsChan:
+ if !ok {
+ return nil
+ }
+ for _, filename := range filenames {
+ name := strings.TrimPrefix(filename, s.baseDir)
+ f, err := s.site.Fs.Source.Open(filename)
+ if err != nil {
+ return err
+ }
+
+ err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, name, f)
+ f.Close()
+ if err != nil {
+ return err
+ }
+ }
+
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+ }
+ })
+
+ g2.Go(func() error {
+ for {
+ select {
+ case bundle, ok := <-s.fileBundlesChan:
+ if !ok {
+ return nil
+ }
+ err := s.readAndConvertContentBundle(bundle)
+ if err != nil {
+ return err
+ }
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+ }
+ })
+ }
+
+ if err := g2.Wait(); err != nil {
+ return err
+ }
+
+ close(s.pagesChan)
+
+ if err := g1.Wait(); err != nil {
+ return err
+ }
+
+ s.site.rawAllPages.Sort()
+
+ return nil
+
+}
+
+func (s *siteContentProcessor) readAndConvertContentFile(file *fileInfo) error {
+ ctx := &handlerContext{source: file, baseDir: s.baseDir, pages: s.pagesChan}
+ return s.handleContent(ctx).err
+}
+
+func (s *siteContentProcessor) readAndConvertContentBundle(bundle *bundleDir) error {
+ ctx := &handlerContext{bundle: bundle, baseDir: s.baseDir, pages: s.pagesChan}
+ return s.handleContent(ctx).err
+}
diff --git a/hugolib/page_bundler_capture.go b/hugolib/page_bundler_capture.go
new file mode 100644
index 000000000..5574329de
--- /dev/null
+++ b/hugolib/page_bundler_capture.go
@@ -0,0 +1,683 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "golang.org/x/sync/errgroup"
+
+ "github.com/gohugoio/hugo/source"
+ "github.com/spf13/afero"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var errSkipCyclicDir = errors.New("skip potential cyclic dir")
+
+type capturer struct {
+ // To prevent symbolic link cycles: Visit same folder only once.
+ seen map[string]bool
+ seenMu sync.Mutex
+
+ handler captureResultHandler
+
+ sourceSpec *source.SourceSpec
+ fs afero.Fs
+ logger *jww.Notepad
+
+ baseDir string
+
+ // Filenames limits the content to process to a list of filenames/directories.
+ // This is used for partial building in server mode.
+ filenames []string
+
+ // Used to determine how to handle content changes in server mode.
+ contentChanges *contentChangeMap
+
+ // Semaphore used to throttle the concurrent sub directory handling.
+ sem chan bool
+}
+
+func newCapturer(
+ logger *jww.Notepad,
+ sourceSpec *source.SourceSpec,
+ handler captureResultHandler,
+ contentChanges *contentChangeMap,
+ baseDir string, filenames ...string) *capturer {
+
+ numWorkers := 4
+ if n := runtime.NumCPU(); n > numWorkers {
+ numWorkers = n
+ }
+
+ c := &capturer{
+ sem: make(chan bool, numWorkers),
+ handler: handler,
+ sourceSpec: sourceSpec,
+ logger: logger,
+ contentChanges: contentChanges,
+ fs: sourceSpec.Fs.Source, baseDir: baseDir, seen: make(map[string]bool),
+ filenames: filenames}
+
+ return c
+}
+
+// Captured files and bundles ready to be processed will be passed on to
+// these channels.
+type captureResultHandler interface {
+ handleSingles(fis ...*fileInfo)
+ handleCopyFiles(filenames ...string)
+ captureBundlesHandler
+}
+
+type captureBundlesHandler interface {
+ handleBundles(b *bundleDirs)
+}
+
+type captureResultHandlerChain struct {
+ handlers []captureBundlesHandler
+}
+
+func (c *captureResultHandlerChain) handleSingles(fis ...*fileInfo) {
+ for _, h := range c.handlers {
+ if hh, ok := h.(captureResultHandler); ok {
+ hh.handleSingles(fis...)
+ }
+ }
+}
+func (c *captureResultHandlerChain) handleBundles(b *bundleDirs) {
+ for _, h := range c.handlers {
+ h.handleBundles(b)
+ }
+}
+
+func (c *captureResultHandlerChain) handleCopyFiles(filenames ...string) {
+ for _, h := range c.handlers {
+ if hh, ok := h.(captureResultHandler); ok {
+ hh.handleCopyFiles(filenames...)
+ }
+ }
+}
+
+func (c *capturer) capturePartial(filenames ...string) error {
+ handled := make(map[string]bool)
+
+ for _, filename := range filenames {
+ dir, resolvedFilename, tp := c.contentChanges.resolveAndRemove(filename)
+ if handled[resolvedFilename] {
+ continue
+ }
+
+ handled[resolvedFilename] = true
+
+ switch tp {
+ case bundleLeaf:
+ if err := c.handleDir(resolvedFilename); err != nil {
+ return err
+ }
+ case bundleBranch:
+ if err := c.handleBranchDir(resolvedFilename); err != nil {
+ return err
+ }
+ default:
+ fi, _, err := c.getRealFileInfo(resolvedFilename)
+ if os.IsNotExist(err) {
+ // File has been deleted.
+ continue
+ }
+
+ // Just in case the owning dir is a new symlink -- this will
+ // create the proper mapping for it.
+ c.getRealFileInfo(dir)
+
+ f := c.newFileInfo(resolvedFilename, fi, tp)
+ c.copyOrHandleSingle(f)
+ }
+ }
+
+ return nil
+}
+
+func (c *capturer) capture() error {
+ if len(c.filenames) > 0 {
+ return c.capturePartial(c.filenames...)
+ }
+
+ err := c.handleDir(c.baseDir)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (c *capturer) handleNestedDir(dirname string) error {
+ select {
+ case c.sem <- true:
+ var g errgroup.Group
+
+ g.Go(func() error {
+ defer func() {
+ <-c.sem
+ }()
+ return c.handleDir(dirname)
+ })
+ return g.Wait()
+ default:
+ // For deeply nested file trees, waiting for a semaphore wil deadlock.
+ return c.handleDir(dirname)
+ }
+}
+
+// This handles a bundle branch and its resources only. This is used
+// in server mode on changes. If this dir does not (anymore) represent a bundle
+// branch, the handling is upgraded to the full handleDir method.
+func (c *capturer) handleBranchDir(dirname string) error {
+ files, err := c.readDir(dirname)
+ if err != nil {
+ return err
+ }
+
+ var (
+ dirType bundleDirType
+ )
+
+ for _, fi := range files {
+ if !fi.IsDir() {
+ tp, _ := classifyBundledFile(fi.Name())
+ if dirType == bundleNot {
+ dirType = tp
+ }
+
+ if dirType == bundleLeaf {
+ return c.handleDir(dirname)
+ }
+ }
+ }
+
+ if dirType != bundleBranch {
+ return c.handleDir(dirname)
+ }
+
+ dirs := newBundleDirs(bundleBranch, c)
+
+ for _, fi := range files {
+
+ if fi.IsDir() {
+ continue
+ }
+
+ tp, isContent := classifyBundledFile(fi.Name())
+
+ f := c.newFileInfo(fi.filename, fi.FileInfo, tp)
+ if f.isOwner() {
+ dirs.addBundleHeader(f)
+ } else if !isContent {
+ // This is a partial update -- we only care about the files that
+ // is in this bundle.
+ dirs.addBundleFiles(f)
+ }
+ }
+
+ c.handler.handleBundles(dirs)
+
+ return nil
+
+}
+
+func (c *capturer) handleDir(dirname string) error {
+ files, err := c.readDir(dirname)
+ if err != nil {
+ return err
+ }
+
+ type dirState int
+
+ const (
+ dirStateDefault dirState = iota
+
+ dirStateAssetsOnly
+ dirStateSinglesOnly
+ )
+
+ var (
+ fileBundleTypes = make([]bundleDirType, len(files))
+
+ // Start with the assumption that this dir contains only non-content assets (images etc.)
+ // If that is still true after we had a first look at the list of files, we
+ // can just copy the files to destination. We will still have to look at the
+ // sub-folders for potential bundles.
+ state = dirStateAssetsOnly
+
+ // Start with the assumption that this dir is not a bundle.
+ // A directory is a bundle if it contains a index content file,
+ // e.g. index.md (a leaf bundle) or a _index.md (a branch bundle).
+ bundleType = bundleNot
+ )
+
+ /* First check for any content files.
+ - If there are none, then this is a assets folder only (images etc.)
+ and we can just plainly copy them to
+ destination.
+ - If this is a section with no image etc. or similar, we can just handle it
+ as it was a single content file.
+ */
+ var hasNonContent, isBranch bool
+
+ for i, fi := range files {
+ if !fi.IsDir() {
+ tp, isContent := classifyBundledFile(fi.Name())
+ fileBundleTypes[i] = tp
+ if !isBranch {
+ isBranch = tp == bundleBranch
+ }
+
+ if isContent {
+ // This is not a assets-only folder.
+ state = dirStateDefault
+ } else {
+ hasNonContent = true
+ }
+ }
+ }
+
+ if isBranch && !hasNonContent {
+ // This is a section or similar with no need for any bundle handling.
+ state = dirStateSinglesOnly
+ }
+
+ if state > dirStateDefault {
+ return c.handleNonBundle(dirname, files, state == dirStateSinglesOnly)
+ }
+
+ var fileInfos = make([]*fileInfo, len(files))
+
+ for i, fi := range files {
+ currentType := bundleNot
+
+ if !fi.IsDir() {
+ currentType = fileBundleTypes[i]
+ if bundleType == bundleNot && currentType != bundleNot {
+ bundleType = currentType
+ }
+ }
+
+ if bundleType == bundleNot && currentType != bundleNot {
+ bundleType = currentType
+ }
+
+ fileInfos[i] = c.newFileInfo(fi.filename, fi.FileInfo, currentType)
+ }
+
+ var todo []*fileInfo
+
+ if bundleType != bundleLeaf {
+ for _, fi := range fileInfos {
+ if fi.FileInfo().IsDir() {
+ // Handle potential nested bundles.
+ filename := fi.Filename()
+ if err := c.handleNestedDir(filename); err != nil {
+ return err
+ }
+ } else if bundleType == bundleNot || (!fi.isOwner() && fi.isContentFile()) {
+ // Not in a bundle.
+ c.copyOrHandleSingle(fi)
+ } else {
+ // This is a section folder or similar with non-content files in it.
+ todo = append(todo, fi)
+ }
+ }
+ } else {
+ todo = fileInfos
+ }
+
+ if len(todo) == 0 {
+ return nil
+ }
+
+ dirs, err := c.createBundleDirs(todo, bundleType)
+ if err != nil {
+ return err
+ }
+
+ // Send the bundle to the next step in the processor chain.
+ c.handler.handleBundles(dirs)
+
+ return nil
+}
+
+func (c *capturer) handleNonBundle(
+ dirname string,
+ fileInfos []fileInfoName,
+ singlesOnly bool) error {
+
+ for _, fi := range fileInfos {
+ if fi.IsDir() {
+ if err := c.handleNestedDir(fi.filename); err != nil {
+ return err
+ }
+ } else {
+ if singlesOnly {
+ file := c.newFileInfo(fi.filename, fi, bundleNot)
+ c.handler.handleSingles(file)
+ } else {
+ c.handler.handleCopyFiles(fi.filename)
+ }
+ }
+ }
+
+ return nil
+}
+
+func (c *capturer) copyOrHandleSingle(fi *fileInfo) {
+ if fi.isContentFile() {
+ c.handler.handleSingles(fi)
+ } else {
+ // These do not currently need any further processing.
+ c.handler.handleCopyFiles(fi.Filename())
+ }
+}
+
+func (c *capturer) createBundleDirs(fileInfos []*fileInfo, bundleType bundleDirType) (*bundleDirs, error) {
+ dirs := newBundleDirs(bundleType, c)
+
+ for _, fi := range fileInfos {
+ if fi.FileInfo().IsDir() {
+ var collector func(fis ...*fileInfo)
+
+ if bundleType == bundleBranch {
+ // All files in the current directory are part of this bundle.
+ // Trying to include sub folders in these bundles are filled with ambiguity.
+ collector = func(fis ...*fileInfo) {
+ for _, fi := range fis {
+ c.copyOrHandleSingle(fi)
+ }
+ }
+ } else {
+ // All nested files and directories are part of this bundle.
+ collector = func(fis ...*fileInfo) {
+ fileInfos = append(fileInfos, fis...)
+ }
+ }
+ err := c.collectFiles(fi.Filename(), collector)
+ if err != nil {
+ return nil, err
+ }
+
+ } else if fi.isOwner() {
+ // There can be more than one language, so:
+ // 1. Content files must be attached to its language's bundle.
+ // 2. Other files must be attached to all languages.
+ // 3. Every content file needs a bundle header.
+ dirs.addBundleHeader(fi)
+ }
+ }
+
+ for _, fi := range fileInfos {
+ if fi.FileInfo().IsDir() || fi.isOwner() {
+ continue
+ }
+
+ if fi.isContentFile() {
+ if bundleType != bundleBranch {
+ dirs.addBundleContentFile(fi)
+ }
+ } else {
+ dirs.addBundleFiles(fi)
+ }
+ }
+
+ return dirs, nil
+}
+
+func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInfo)) error {
+ filesInDir, err := c.readDir(dirname)
+ if err != nil {
+ return err
+ }
+
+ for _, fi := range filesInDir {
+ if fi.IsDir() {
+ err := c.collectFiles(fi.filename, handleFiles)
+ if err != nil {
+ return err
+ }
+ } else {
+ handleFiles(c.newFileInfo(fi.filename, fi.FileInfo, bundleNot))
+ }
+ }
+
+ return nil
+}
+
+func (c *capturer) readDir(dirname string) ([]fileInfoName, error) {
+ if c.sourceSpec.IgnoreFile(dirname) {
+ return nil, nil
+ }
+
+ dir, err := c.fs.Open(dirname)
+ if err != nil {
+ return nil, err
+ }
+ defer dir.Close()
+ names, err := dir.Readdirnames(-1)
+ if err != nil {
+ return nil, err
+ }
+
+ fis := make([]fileInfoName, 0, len(names))
+
+ for _, name := range names {
+ filename := filepath.Join(dirname, name)
+ if !c.sourceSpec.IgnoreFile(filename) {
+ fi, _, err := c.getRealFileInfo(filename)
+
+ if err != nil {
+ // It may have been deleted in the meantime.
+ if err == errSkipCyclicDir || os.IsNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+
+ fis = append(fis, fileInfoName{filename: filename, FileInfo: fi})
+ }
+ }
+
+ return fis, nil
+}
+
+func (c *capturer) newFileInfo(filename string, fi os.FileInfo, tp bundleDirType) *fileInfo {
+ return newFileInfo(c.sourceSpec, c.baseDir, filename, fi, tp)
+}
+
+type singlesHandler func(fis ...*fileInfo)
+type bundlesHandler func(b *bundleDirs)
+
+type fileInfoName struct {
+ os.FileInfo
+ filename string
+}
+
+type bundleDirs struct {
+ tp bundleDirType
+ // Maps languages to bundles.
+ bundles map[string]*bundleDir
+
+ // Keeps track of language overrides for non-content files, e.g. logo.en.png.
+ langOverrides map[string]bool
+
+ c *capturer
+}
+
+func newBundleDirs(tp bundleDirType, c *capturer) *bundleDirs {
+ return &bundleDirs{tp: tp, bundles: make(map[string]*bundleDir), langOverrides: make(map[string]bool), c: c}
+}
+
+type bundleDir struct {
+ tp bundleDirType
+ fi *fileInfo
+
+ resources map[string]*fileInfo
+}
+
+func (b bundleDir) clone() *bundleDir {
+ b.resources = make(map[string]*fileInfo)
+ fic := *b.fi
+ b.fi = &fic
+ return &b
+}
+
+func newBundleDir(fi *fileInfo, bundleType bundleDirType) *bundleDir {
+ return &bundleDir{fi: fi, tp: bundleType, resources: make(map[string]*fileInfo)}
+}
+
+func (b *bundleDirs) addBundleContentFile(fi *fileInfo) {
+ dir, found := b.bundles[fi.Lang()]
+ if !found {
+ // Every bundled content file needs a bundle header.
+ // If one does not exist in its language, we pick the default
+ // language version, or a random one if that doesn't exist, either.
+ tl := b.c.sourceSpec.DefaultContentLanguage
+ ldir, found := b.bundles[tl]
+ if !found {
+ // Just pick one.
+ for _, v := range b.bundles {
+ ldir = v
+ break
+ }
+ }
+
+ if ldir == nil {
+ panic(fmt.Sprintf("bundle not found for file %q", fi.Filename()))
+ }
+
+ dir = ldir.clone()
+ dir.fi.overriddenLang = fi.Lang()
+ b.bundles[fi.Lang()] = dir
+ }
+
+ dir.resources[fi.Filename()] = fi
+}
+
+func (b *bundleDirs) addBundleFiles(fi *fileInfo) {
+ dir := filepath.ToSlash(fi.Dir())
+ p := dir + fi.TranslationBaseName() + "." + fi.Ext()
+ for lang, bdir := range b.bundles {
+ key := lang + p
+ // Given mypage.de.md (German translation) and mypage.md we pick the most
+ // the specific for that language.
+ if fi.Lang() == lang || !b.langOverrides[key] {
+ bdir.resources[key] = fi
+ }
+ b.langOverrides[key] = true
+ }
+}
+
+func (b *bundleDirs) addBundleHeader(fi *fileInfo) {
+ b.bundles[fi.Lang()] = newBundleDir(fi, b.tp)
+}
+
+func (c *capturer) isSeen(dirname string) bool {
+ c.seenMu.Lock()
+ defer c.seenMu.Unlock()
+ seen := c.seen[dirname]
+ c.seen[dirname] = true
+ if seen {
+ c.logger.WARN.Printf("Content dir %q already processed; skipped to avoid infinite recursion.", dirname)
+ return true
+
+ }
+ return false
+}
+
+func (c *capturer) getRealFileInfo(path string) (os.FileInfo, string, error) {
+ fileInfo, err := c.lstatIfOs(path)
+ realPath := path
+
+ if err != nil {
+ return nil, "", err
+ }
+
+ if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
+ link, err := filepath.EvalSymlinks(path)
+ if err != nil {
+ return nil, "", fmt.Errorf("Cannot read symbolic link %q, error was: %s", path, err)
+ }
+
+ fileInfo, err = c.lstatIfOs(link)
+ if err != nil {
+ return nil, "", fmt.Errorf("Cannot stat %q, error was: %s", link, err)
+ }
+
+ realPath = link
+
+ if realPath != path && fileInfo.IsDir() && c.isSeen(realPath) {
+ // Avoid cyclic symlinks.
+ // Note that this may prevent some uses that isn't cyclic and also
+ // potential useful, but this implementation is both robust and simple:
+ // We stop at the first directory that we have seen before, e.g.
+ // /content/blog will only be processed once.
+ return nil, realPath, errSkipCyclicDir
+ }
+
+ if c.contentChanges != nil {
+ // Keep track of symbolic links in watch mode.
+ var from, to string
+ if fileInfo.IsDir() {
+ from = realPath
+ to = path
+
+ if !strings.HasSuffix(to, helpers.FilePathSeparator) {
+ to = to + helpers.FilePathSeparator
+ }
+ if !strings.HasSuffix(from, helpers.FilePathSeparator) {
+ from = from + helpers.FilePathSeparator
+ }
+
+ baseDir := c.baseDir
+ if !strings.HasSuffix(baseDir, helpers.FilePathSeparator) {
+ baseDir = baseDir + helpers.FilePathSeparator
+ }
+
+ if strings.HasPrefix(from, baseDir) {
+ // With symbolic links inside /content we need to keep
+ // a reference to both. This may be confusing with --navigateToChanged
+ // but the user has chosen this him or herself.
+ c.contentChanges.addSymbolicLinkMapping(from, from)
+ }
+
+ } else {
+ from = realPath
+ to = path
+ }
+
+ c.contentChanges.addSymbolicLinkMapping(from, to)
+ }
+ }
+
+ return fileInfo, realPath, nil
+}
+
+func (c *capturer) lstatIfOs(path string) (os.FileInfo, error) {
+ return helpers.LstatIfOs(c.fs, path)
+}
diff --git a/hugolib/page_bundler_capture_test.go b/hugolib/page_bundler_capture_test.go
new file mode 100644
index 000000000..6ff182221
--- /dev/null
+++ b/hugolib/page_bundler_capture_test.go
@@ -0,0 +1,255 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "sort"
+
+ jww "github.com/spf13/jwalterweatherman"
+
+ "strings"
+ "sync"
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/source"
+ "github.com/stretchr/testify/require"
+)
+
+type storeFilenames struct {
+ sync.Mutex
+ filenames []string
+ copyNames []string
+ dirKeys []string
+}
+
+func (s *storeFilenames) handleSingles(fis ...*fileInfo) {
+ s.Lock()
+ defer s.Unlock()
+ for _, fi := range fis {
+ s.filenames = append(s.filenames, filepath.ToSlash(fi.Filename()))
+ }
+}
+
+func (s *storeFilenames) handleBundles(d *bundleDirs) {
+ s.Lock()
+ defer s.Unlock()
+ var keys []string
+ for _, b := range d.bundles {
+ res := make([]string, len(b.resources))
+ i := 0
+ for _, r := range b.resources {
+ res[i] = path.Join(r.Lang(), filepath.ToSlash(r.Filename()))
+ i++
+ }
+ sort.Strings(res)
+ keys = append(keys, path.Join("__bundle", b.fi.Lang(), filepath.ToSlash(b.fi.Filename()), "resources", strings.Join(res, "|")))
+ }
+ s.dirKeys = append(s.dirKeys, keys...)
+}
+
+func (s *storeFilenames) handleCopyFiles(names ...string) {
+ s.Lock()
+ defer s.Unlock()
+ for _, name := range names {
+ s.copyNames = append(s.copyNames, filepath.ToSlash(name))
+ }
+
+}
+
+func (s *storeFilenames) sortedStr() string {
+ s.Lock()
+ defer s.Unlock()
+ sort.Strings(s.filenames)
+ sort.Strings(s.dirKeys)
+ sort.Strings(s.copyNames)
+ return "\nF:\n" + strings.Join(s.filenames, "\n") + "\nD:\n" + strings.Join(s.dirKeys, "\n") +
+ "\nC:\n" + strings.Join(s.copyNames, "\n") + "\n"
+}
+
+func TestPageBundlerCaptureSymlinks(t *testing.T) {
+ assert := require.New(t)
+ cfg, fs, workDir := newTestBundleSymbolicSources(t)
+ contentDir := "base"
+ sourceSpec := source.NewSourceSpec(cfg, fs)
+
+ fileStore := &storeFilenames{}
+ logger := newWarningLogger()
+ c := newCapturer(logger, sourceSpec, fileStore, nil, filepath.Join(workDir, contentDir))
+
+ assert.NoError(c.capture())
+
+ // Symlik back to content skipped to prevent infinite recursion.
+ assert.Equal(uint64(3), logger.LogCountForLevelsGreaterThanorEqualTo(jww.LevelWarn))
+
+ expected := `
+F:
+/base/a/page_s.md
+/base/a/regular.md
+/base/symbolic1/s1.md
+/base/symbolic1/s2.md
+/base/symbolic3/circus/a/page_s.md
+/base/symbolic3/circus/a/regular.md
+D:
+__bundle/en/base/symbolic2/a1/index.md/resources/en/base/symbolic2/a1/logo.png|en/base/symbolic2/a1/page.md
+C:
+/base/symbolic3/s1.png
+/base/symbolic3/s2.png
+`
+ got := strings.Replace(fileStore.sortedStr(), filepath.ToSlash(workDir), "", -1)
+ got = strings.Replace(got, "//", "/", -1)
+
+ if expected != got {
+ diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
+ t.Log(got)
+ t.Fatalf("Failed:\n%s", diff)
+ }
+}
+
+func TestPageBundlerCapture(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+ cfg, fs := newTestBundleSources(t)
+
+ sourceSpec := source.NewSourceSpec(cfg, fs)
+
+ fileStore := &storeFilenames{}
+
+ c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil, filepath.FromSlash("/work/base"))
+
+ assert.NoError(c.capture())
+
+ expected := `
+F:
+/work/base/_1.md
+/work/base/a/1.md
+/work/base/a/2.md
+/work/base/assets/pages/mypage.md
+D:
+__bundle/en/work/base/_index.md/resources/en/work/base/_1.png
+__bundle/en/work/base/a/b/index.md/resources/en/work/base/a/b/ab1.md
+__bundle/en/work/base/b/index.md/resources/en/work/base/b/1.md|en/work/base/b/2.md|en/work/base/b/c/logo.png|en/work/base/b/custom-mime.bep
+C:
+/work/base/assets/pic1.png
+/work/base/assets/pic2.png
+/work/base/images/hugo-logo.png
+`
+
+ got := fileStore.sortedStr()
+
+ if expected != got {
+ diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
+ t.Log(got)
+ t.Fatalf("Failed:\n%s", diff)
+ }
+}
+
+func TestPageBundlerCaptureMultilingual(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+ cfg, fs := newTestBundleSourcesMultilingual(t)
+ sourceSpec := source.NewSourceSpec(cfg, fs)
+ fileStore := &storeFilenames{}
+ c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil, filepath.FromSlash("/work/base"))
+
+ assert.NoError(c.capture())
+
+ expected := `
+F:
+/work/base/1s/mypage.md
+/work/base/bb/_1.md
+/work/base/bb/_1.nn.md
+/work/base/bb/en.md
+/work/base/bc/page.md
+/work/base/bc/page.nn.md
+/work/base/be/_index.md
+/work/base/be/page.md
+/work/base/be/page.nn.md
+D:
+__bundle/en/work/base/bb/_index.md/resources/en/work/base/bb/a.png|en/work/base/bb/b.png|nn/work/base/bb/c.nn.png
+__bundle/en/work/base/bc/_index.md/resources/en/work/base/bc/logo-bc.png
+__bundle/en/work/base/bd/index.md/resources/en/work/base/bd/page.md
+__bundle/en/work/base/lb/index.md/resources/en/work/base/lb/1.md|en/work/base/lb/2.md|en/work/base/lb/c/d/deep.png|en/work/base/lb/c/logo.png|en/work/base/lb/c/one.png
+__bundle/nn/work/base/bb/_index.nn.md/resources/en/work/base/bb/a.png|nn/work/base/bb/b.nn.png|nn/work/base/bb/c.nn.png
+__bundle/nn/work/base/bd/index.md/resources/nn/work/base/bd/page.nn.md
+__bundle/nn/work/base/lb/index.nn.md/resources/en/work/base/lb/c/d/deep.png|en/work/base/lb/c/one.png|nn/work/base/lb/2.nn.md|nn/work/base/lb/c/logo.nn.png
+C:
+/work/base/1s/mylogo.png
+/work/base/bb/b/d.nn.png
+`
+
+ got := fileStore.sortedStr()
+
+ if expected != got {
+ diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
+ t.Log(got)
+ t.Fatalf("Failed:\n%s", diff)
+ }
+
+}
+
+type noOpFileStore int
+
+func (noOpFileStore) handleSingles(fis ...*fileInfo) {}
+func (noOpFileStore) handleBundles(b *bundleDirs) {}
+func (noOpFileStore) handleCopyFiles(names ...string) {}
+
+func BenchmarkPageBundlerCapture(b *testing.B) {
+ capturers := make([]*capturer, b.N)
+
+ for i := 0; i < b.N; i++ {
+ cfg, fs := newTestCfg()
+ sourceSpec := source.NewSourceSpec(cfg, fs)
+
+ base := fmt.Sprintf("base%d", i)
+ for j := 1; j <= 5; j++ {
+ js := fmt.Sprintf("j%d", j)
+ writeSource(b, fs, filepath.Join(base, js, "index.md"), "content")
+ writeSource(b, fs, filepath.Join(base, js, "logo1.png"), "content")
+ writeSource(b, fs, filepath.Join(base, js, "sub", "logo2.png"), "content")
+ writeSource(b, fs, filepath.Join(base, js, "section", "_index.md"), "content")
+ writeSource(b, fs, filepath.Join(base, js, "section", "logo.png"), "content")
+ writeSource(b, fs, filepath.Join(base, js, "section", "sub", "logo.png"), "content")
+
+ for k := 1; k <= 5; k++ {
+ ks := fmt.Sprintf("k%d", k)
+ writeSource(b, fs, filepath.Join(base, js, ks, "logo1.png"), "content")
+ writeSource(b, fs, filepath.Join(base, js, "section", ks, "logo.png"), "content")
+ }
+ }
+
+ for i := 1; i <= 5; i++ {
+ writeSource(b, fs, filepath.Join(base, "assetsonly", fmt.Sprintf("image%d.png", i)), "image")
+ }
+
+ for i := 1; i <= 5; i++ {
+ writeSource(b, fs, filepath.Join(base, "contentonly", fmt.Sprintf("c%d.md", i)), "content")
+ }
+
+ capturers[i] = newCapturer(newErrorLogger(), sourceSpec, new(noOpFileStore), nil, base)
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ err := capturers[i].capture()
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+}
diff --git a/hugolib/page_bundler_handlers.go b/hugolib/page_bundler_handlers.go
new file mode 100644
index 000000000..7054f0b79
--- /dev/null
+++ b/hugolib/page_bundler_handlers.go
@@ -0,0 +1,346 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "errors"
+ "fmt"
+ "sort"
+
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resource"
+)
+
+var (
+ // This should be the only list of valid extensions for content files.
+ contentFileExtensions = []string{
+ "html", "htm",
+ "mdown", "markdown", "md",
+ "asciidoc", "adoc", "ad",
+ "rest", "rst",
+ "mmark",
+ "org",
+ "pandoc", "pdc"}
+
+ contentFileExtensionsSet map[string]bool
+)
+
+func init() {
+ contentFileExtensionsSet = make(map[string]bool)
+ for _, ext := range contentFileExtensions {
+ contentFileExtensionsSet[ext] = true
+ }
+}
+
+func newHandlerChain(s *Site) contentHandler {
+ c := &contentHandlers{s: s}
+
+ contentFlow := c.parsePage(c.processFirstMatch(
+ // Handles all files with a content file extension. See above.
+ c.handlePageContent(),
+
+ // Every HTML file without front matter will be passed on to this handler.
+ c.handleHTMLContent(),
+ ))
+
+ c.rootHandler = c.processFirstMatch(
+ contentFlow,
+
+ // Creates a file resource (image, CSS etc.) if there is a parent
+ // page set on the current context.
+ c.createResource(),
+
+ // Everything that isn't handled above, will just be copied
+ // to destination.
+ c.copyFile(),
+ )
+
+ return c.rootHandler
+
+}
+
+type contentHandlers struct {
+ s *Site
+ rootHandler contentHandler
+}
+
+func (c *contentHandlers) processFirstMatch(handlers ...contentHandler) func(ctx *handlerContext) handlerResult {
+ return func(ctx *handlerContext) handlerResult {
+ for _, h := range handlers {
+ res := h(ctx)
+ if res.handled || res.err != nil {
+ return res
+ }
+ }
+ return handlerResult{err: errors.New("no matching handler found")}
+ }
+}
+
+type handlerContext struct {
+ // These are the pages stored in Site.
+ pages chan<- *Page
+
+ doNotAddToSiteCollections bool
+
+ currentPage *Page
+ parentPage *Page
+
+ bundle *bundleDir
+
+ // The source baseDir, e.g. "/myproject/content/"
+ baseDir string
+
+ source *fileInfo
+
+ // Relative path to the target.
+ target string
+}
+
+func (c *handlerContext) ext() string {
+ if c.currentPage != nil {
+ if c.currentPage.Markup != "" {
+ return c.currentPage.Markup
+ }
+ return c.currentPage.Ext()
+ }
+
+ if c.bundle != nil {
+ return c.bundle.fi.Ext()
+ } else {
+ return c.source.Ext()
+ }
+}
+
+func (c *handlerContext) targetPath() string {
+ if c.target != "" {
+ return c.target
+ }
+
+ return strings.TrimPrefix(c.source.Filename(), c.baseDir)
+}
+
+func (c *handlerContext) file() *fileInfo {
+ if c.bundle != nil {
+ return c.bundle.fi
+ }
+
+ return c.source
+}
+
+// Create a copy with the current context as its parent.
+func (c handlerContext) childCtx(fi *fileInfo) *handlerContext {
+ if c.currentPage == nil {
+ panic("Need a Page to create a child context")
+ }
+
+ c.target = strings.TrimPrefix(fi.Path(), c.bundle.fi.Dir())
+ c.source = fi
+
+ c.doNotAddToSiteCollections = c.bundle != nil && c.bundle.tp != bundleBranch
+
+ c.bundle = nil
+
+ c.parentPage = c.currentPage
+ c.currentPage = nil
+
+ return &c
+}
+
+func (c *handlerContext) supports(exts ...string) bool {
+ ext := c.ext()
+ for _, s := range exts {
+ if s == ext {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (c *handlerContext) isContentFile() bool {
+ return contentFileExtensionsSet[c.ext()]
+}
+
+type (
+ handlerResult struct {
+ err error
+ handled bool
+ resource resource.Resource
+ }
+
+ contentHandlerChain func(h contentHandler) contentHandler
+ contentHandler func(ctx *handlerContext) handlerResult
+)
+
+var (
+ notHandled handlerResult
+ noOpContenHandler = func(ctx *handlerContext) handlerResult {
+ return handlerResult{handled: true}
+ }
+)
+
+func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
+ return func(ctx *handlerContext) handlerResult {
+ if !ctx.isContentFile() {
+ return notHandled
+ }
+
+ result := handlerResult{handled: true}
+ fi := ctx.file()
+
+ f, err := fi.Open()
+ if err != nil {
+ return handlerResult{err: fmt.Errorf("(%s) failed to open content file: %s", fi.Filename(), err)}
+ }
+ defer f.Close()
+
+ p := c.s.newPageFromFile(fi)
+
+ _, err = p.ReadFrom(f)
+ if err != nil {
+ return handlerResult{err: err}
+ }
+
+ if !p.shouldBuild() {
+ if !ctx.doNotAddToSiteCollections {
+ ctx.pages <- p
+ }
+ return result
+ }
+
+ ctx.currentPage = p
+
+ if ctx.bundle != nil {
+ // Add the bundled files
+ for _, fi := range ctx.bundle.resources {
+ childCtx := ctx.childCtx(fi)
+ res := c.rootHandler(childCtx)
+ if res.err != nil {
+ return res
+ }
+ if res.resource != nil {
+ p.Resources = append(p.Resources, res.resource)
+ }
+ }
+
+ sort.SliceStable(p.Resources, func(i, j int) bool {
+ if p.Resources[i].ResourceType() < p.Resources[j].ResourceType() {
+ return true
+ }
+
+ p1, ok1 := p.Resources[i].(*Page)
+ p2, ok2 := p.Resources[j].(*Page)
+
+ if ok1 != ok2 {
+ return ok2
+ }
+
+ if ok1 {
+ return defaultPageSort(p1, p2)
+ }
+
+ return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink()
+ })
+ }
+
+ return h(ctx)
+ }
+}
+
+func (c *contentHandlers) handlePageContent() contentHandler {
+ return func(ctx *handlerContext) handlerResult {
+ if ctx.supports("html", "htm") {
+ return notHandled
+ }
+
+ p := ctx.currentPage
+
+ // Work on a copy of the raw content from now on.
+ p.createWorkContentCopy()
+
+ if err := p.processShortcodes(); err != nil {
+ p.s.Log.ERROR.Println(err)
+ }
+
+ if c.s.Cfg.GetBool("enableEmoji") {
+ p.workContent = helpers.Emojify(p.workContent)
+ }
+
+ p.workContent = p.replaceDivider(p.workContent)
+ p.workContent = p.renderContent(p.workContent)
+
+ if !ctx.doNotAddToSiteCollections {
+ ctx.pages <- p
+ }
+
+ return handlerResult{handled: true, resource: p}
+ }
+}
+
+func (c *contentHandlers) handleHTMLContent() contentHandler {
+ return func(ctx *handlerContext) handlerResult {
+ if !ctx.supports("html", "htm") {
+ return notHandled
+ }
+
+ p := ctx.currentPage
+
+ p.createWorkContentCopy()
+
+ if err := p.processShortcodes(); err != nil {
+ p.s.Log.ERROR.Println(err)
+ }
+
+ if !ctx.doNotAddToSiteCollections {
+ ctx.pages <- p
+ }
+
+ return handlerResult{handled: true, resource: p}
+ }
+}
+
+func (c *contentHandlers) createResource() contentHandler {
+ return func(ctx *handlerContext) handlerResult {
+ if ctx.parentPage == nil {
+ return notHandled
+ }
+
+ resource, err := c.s.resourceSpec.NewResourceFromFilename(
+ ctx.parentPage.subResourceLinkFactory,
+ c.s.absPublishDir(),
+ ctx.source.Filename(), ctx.target)
+
+ return handlerResult{err: err, handled: true, resource: resource}
+ }
+}
+
+func (c *contentHandlers) copyFile() contentHandler {
+ return func(ctx *handlerContext) handlerResult {
+ f, err := c.s.Fs.Source.Open(ctx.source.Filename())
+ if err != nil {
+ return handlerResult{err: err}
+ }
+
+ target := ctx.targetPath()
+
+ defer f.Close()
+ if err := c.s.publish(&c.s.PathSpec.ProcessingStats.Files, target, f); err != nil {
+ return handlerResult{err: err}
+ }
+
+ return handlerResult{handled: true}
+ }
+}
diff --git a/hugolib/page_bundler_test.go b/hugolib/page_bundler_test.go
new file mode 100644
index 000000000..ff50fc67a
--- /dev/null
+++ b/hugolib/page_bundler_test.go
@@ -0,0 +1,379 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "io/ioutil"
+ "os"
+ "runtime"
+ "strings"
+ "testing"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/media"
+
+ "path/filepath"
+
+ "fmt"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/resource"
+ "github.com/spf13/viper"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestPageBundlerSite(t *testing.T) {
+ t.Parallel()
+
+ for _, ugly := range []bool{false, true} {
+ t.Run(fmt.Sprintf("ugly=%t", ugly),
+ func(t *testing.T) {
+
+ assert := require.New(t)
+ cfg, fs := newTestBundleSources(t)
+
+ cfg.Set("permalinks", map[string]string{
+ "a": ":sections/:filename",
+ "b": ":year/:slug/",
+ })
+
+ cfg.Set("outputFormats", map[string]interface{}{
+ "CUSTOMO": map[string]interface{}{
+ "mediaType": media.HTMLType,
+ "baseName": "cindex",
+ "path": "cpath",
+ },
+ })
+
+ cfg.Set("outputs", map[string]interface{}{
+ "home": []string{"HTML", "CUSTOMO"},
+ "page": []string{"HTML", "CUSTOMO"},
+ "section": []string{"HTML", "CUSTOMO"},
+ })
+
+ cfg.Set("uglyURLs", ugly)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ th := testHelper{s.Cfg, s.Fs, t}
+
+ // Singles (2), Below home (1), Bundle (1)
+ assert.Len(s.RegularPages, 6)
+
+ singlePage := s.getPage(KindPage, "a/1.md")
+
+ assert.NotNil(singlePage)
+ assert.Contains(singlePage.Content, "TheContent")
+
+ if ugly {
+ assert.Equal("/a/1.html", singlePage.RelPermalink())
+ th.assertFileContent(filepath.FromSlash("/work/public/a/1.html"), "TheContent")
+
+ } else {
+ assert.Equal("/a/1/", singlePage.RelPermalink())
+ th.assertFileContent(filepath.FromSlash("/work/public/a/1/index.html"), "TheContent")
+ }
+
+ th.assertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content")
+
+ // This should be just copied to destination.
+ th.assertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content")
+
+ leafBundle1 := s.getPage(KindPage, "b/index.md")
+ assert.NotNil(leafBundle1)
+ leafBundle2 := s.getPage(KindPage, "a/b/index.md")
+ assert.NotNil(leafBundle2)
+
+ pageResources := leafBundle1.Resources.ByType(pageResourceType)
+ assert.Len(pageResources, 2)
+ firstPage := pageResources[0].(*Page)
+ secondPage := pageResources[1].(*Page)
+ assert.Equal(filepath.FromSlash("b/1.md"), firstPage.pathOrTitle(), secondPage.pathOrTitle())
+ assert.Contains(firstPage.Content, "TheContent")
+ assert.Len(leafBundle1.Resources, 4) // 2 pages 1 image 1 custom mime type
+
+ imageResources := leafBundle1.Resources.ByType("image")
+ assert.Len(imageResources, 1)
+ image := imageResources[0]
+
+ altFormat := leafBundle1.OutputFormats().Get("CUSTOMO")
+ assert.NotNil(altFormat)
+
+ assert.Equal(filepath.FromSlash("/work/base/b/c/logo.png"), image.(resource.Source).AbsSourceFilename())
+ assert.Equal("https://example.com/2017/pageslug/c/logo.png", image.Permalink())
+ th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
+ th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
+
+ // Custom media type defined in site config.
+ assert.Len(leafBundle1.Resources.ByType("bepsays"), 1)
+
+ if ugly {
+ assert.Equal("/2017/pageslug.html", leafBundle1.RelPermalink())
+ th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"), "TheContent")
+ th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
+
+ assert.Equal("/a/b.html", leafBundle2.RelPermalink())
+
+ } else {
+ assert.Equal("/2017/pageslug/", leafBundle1.RelPermalink())
+ th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent")
+ th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent")
+
+ assert.Equal("/a/b/", leafBundle2.RelPermalink())
+
+ }
+
+ })
+ }
+
+}
+
+func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
+ assert := require.New(t)
+ cfg, fs, workDir := newTestBundleSymbolicSources(t)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: newWarningLogger()}, BuildCfg{})
+
+ th := testHelper{s.Cfg, s.Fs, t}
+
+ assert.Equal(7, len(s.RegularPages))
+ a1Bundle := s.getPage(KindPage, "symbolic2/a1/index.md")
+ assert.NotNil(a1Bundle)
+ assert.Equal(2, len(a1Bundle.Resources))
+ assert.Equal(1, len(a1Bundle.Resources.ByType(pageResourceType)))
+
+ th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent")
+ th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent")
+ th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic2/a1/index.html"), "TheContent")
+
+}
+
+func newTestBundleSources(t *testing.T) (*viper.Viper, *hugofs.Fs) {
+ cfg, fs := newTestCfg()
+
+ workDir := "/work"
+ cfg.Set("workingDir", workDir)
+ cfg.Set("contentDir", "base")
+ cfg.Set("baseURL", "https://example.com")
+ cfg.Set("mediaTypes", map[string]interface{}{
+ "text/bepsays": map[string]interface{}{
+ "suffix": "bep",
+ },
+ })
+
+ pageContent := `---
+title: "Bundle Galore"
+slug: pageslug
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ pageContentNoSlug := `---
+title: "Bundle Galore #2"
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ layout := `{{ .Title }}|{{ .Content }}`
+
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "_1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "_1.png"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "images", "hugo-logo.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "2.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "1.md"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "index.md"), pageContentNoSlug)
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "ab1.md"), pageContentNoSlug)
+
+ // Mostly plain static assets in a folder with a page in a sub folder thrown in.
+ writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic1.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic2.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent)
+
+ // Bundle
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "2.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "custom-mime.bep"), "bepsays")
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "c", "logo.png"), "content")
+
+ return cfg, fs
+}
+
+func newTestBundleSourcesMultilingual(t *testing.T) (*viper.Viper, *hugofs.Fs) {
+ cfg, fs := newTestCfg()
+
+ workDir := "/work"
+ cfg.Set("workingDir", workDir)
+ cfg.Set("contentDir", "base")
+ cfg.Set("baseURL", "https://example.com")
+ cfg.Set("defaultContentLanguage", "en")
+
+ langConfig := map[string]interface{}{
+ "en": map[string]interface{}{
+ "weight": 1,
+ "languageName": "English",
+ },
+ "nn": map[string]interface{}{
+ "weight": 2,
+ "languageName": "Nynorsk",
+ },
+ }
+
+ cfg.Set("languages", langConfig)
+
+ pageContent := `---
+slug: pageslug
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ layout := `{{ .Title }}|{{ .Content }}|Lang: {{ .Site.Language.Lang }}`
+
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mylogo.png"), "content")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "en.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "a.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.nn.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "c.nn.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b", "d.nn.png"), "content")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "logo-bc.png"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.nn.md"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "bd", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.nn.md"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "be", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.nn.md"), pageContent)
+
+ // Bundle leaf, multilingual
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.nn.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "one.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "d", "deep.png"), "content")
+
+ return cfg, fs
+}
+
+func newTestBundleSymbolicSources(t *testing.T) (*viper.Viper, *hugofs.Fs, string) {
+ assert := require.New(t)
+ // We need to use the OS fs for this.
+ cfg := viper.New()
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+ fs.Destination = &afero.MemMapFs{}
+ loadDefaultSettingsFor(cfg)
+
+ workDir, err := ioutil.TempDir("", "hugosym")
+
+ if runtime.GOOS == "darwin" && !strings.HasPrefix(workDir, "/private") {
+ // To get the entry folder in line with the rest. This its a little bit
+ // mysterious, but so be it.
+ workDir = "/private" + workDir
+ }
+
+ contentDir := "base"
+ cfg.Set("workingDir", workDir)
+ cfg.Set("contentDir", contentDir)
+ cfg.Set("baseURL", "https://example.com")
+
+ layout := `{{ .Title }}|{{ .Content }}`
+ pageContent := `---
+slug: %s
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ fs.Source.MkdirAll(filepath.Join(workDir, "layouts", "_default"), 0777)
+ fs.Source.MkdirAll(filepath.Join(workDir, contentDir), 0777)
+ fs.Source.MkdirAll(filepath.Join(workDir, contentDir, "a"), 0777)
+ for i := 1; i <= 3; i++ {
+ fs.Source.MkdirAll(filepath.Join(workDir, fmt.Sprintf("symcontent%d", i)), 0777)
+
+ }
+ fs.Source.MkdirAll(filepath.Join(workDir, "symcontent2", "a1"), 0777)
+
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
+
+ writeSource(t, fs, filepath.Join(workDir, contentDir, "a", "regular.md"), fmt.Sprintf(pageContent, "a1"))
+
+ // Regular files inside symlinked folder.
+ writeSource(t, fs, filepath.Join(workDir, "symcontent1", "s1.md"), fmt.Sprintf(pageContent, "s1"))
+ writeSource(t, fs, filepath.Join(workDir, "symcontent1", "s2.md"), fmt.Sprintf(pageContent, "s2"))
+
+ // A bundle
+ writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "index.md"), fmt.Sprintf(pageContent, ""))
+ writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "page.md"), fmt.Sprintf(pageContent, "page"))
+ writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "logo.png"), "image")
+
+ // Assets
+ writeSource(t, fs, filepath.Join(workDir, "symcontent3", "s1.png"), "image")
+ writeSource(t, fs, filepath.Join(workDir, "symcontent3", "s2.png"), "image")
+
+ // Symlinked sections inside content.
+ os.Chdir(filepath.Join(workDir, contentDir))
+ for i := 1; i <= 3; i++ {
+ assert.NoError(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)))
+ }
+
+ os.Chdir(filepath.Join(workDir, contentDir, "a"))
+
+ // Create a symlink to one single content file
+ assert.NoError(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"))
+
+ os.Chdir(filepath.FromSlash("../../symcontent3"))
+
+ // Create a circular symlink. Will print some warnings.
+ assert.NoError(os.Symlink(filepath.Join("..", contentDir), filepath.FromSlash("circus")))
+
+ os.Chdir(workDir)
+ assert.NoError(err)
+
+ return cfg, fs, workDir
+}
diff --git a/hugolib/page_collections.go b/hugolib/page_collections.go
index 1eda67b19..6eae2e479 100644
--- a/hugolib/page_collections.go
+++ b/hugolib/page_collections.go
@@ -151,14 +151,18 @@ func (c *PageCollections) removePageByPathPrefix(path string) {
func (c *PageCollections) removePageByPath(path string) {
if i := c.rawAllPages.findPagePosByFilePath(path); i >= 0 {
+ c.clearResourceCacheForPage(c.rawAllPages[i])
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
}
+
}
func (c *PageCollections) removePage(page *Page) {
if i := c.rawAllPages.findPagePos(page); i >= 0 {
+ c.clearResourceCacheForPage(c.rawAllPages[i])
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
}
+
}
func (c *PageCollections) findPagesByShortcode(shortcode string) Pages {
@@ -179,3 +183,12 @@ func (c *PageCollections) replacePage(page *Page) {
c.removePage(page)
c.addPage(page)
}
+
+func (c *PageCollections) clearResourceCacheForPage(page *Page) {
+ if len(page.Resources) > 0 {
+ first := page.Resources[0]
+ dir := path.Dir(first.RelPermalink())
+ // This is done to keep the memory usage in check when doing live reloads.
+ page.s.resourceSpec.DeleteCacheByPrefix(dir)
+ }
+}
diff --git a/hugolib/page_collections_test.go b/hugolib/page_collections_test.go
index aee99040c..292218ba4 100644
--- a/hugolib/page_collections_test.go
+++ b/hugolib/page_collections_test.go
@@ -133,7 +133,7 @@ func TestGetPage(t *testing.T) {
errorMsg := fmt.Sprintf("Test %d", i)
page := s.getPage(test.kind, test.path...)
assert.NotNil(page, errorMsg)
- assert.Equal(test.kind, page.Kind)
+ assert.Equal(test.kind, page.Kind, errorMsg)
assert.Equal(test.expectedTitle, page.Title)
}
diff --git a/hugolib/page_output.go b/hugolib/page_output.go
index 4739e6936..993dcb7a2 100644
--- a/hugolib/page_output.go
+++ b/hugolib/page_output.go
@@ -16,9 +16,12 @@ package hugolib
import (
"fmt"
"html/template"
+ "os"
"strings"
"sync"
+ "github.com/gohugoio/hugo/resource"
+
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/helpers"
@@ -34,6 +37,10 @@ type PageOutput struct {
paginator *Pager
paginatorInit sync.Once
+ // Page output specific resources
+ resources resource.Resources
+ resourcesInit sync.Once
+
// Keep this to create URL/path variations, i.e. paginators.
targetPathDescriptor targetPathDescriptor
@@ -51,10 +58,7 @@ func (p *PageOutput) targetPath(addends ...string) (string, error) {
func newPageOutput(p *Page, createCopy bool, f output.Format) (*PageOutput, error) {
// TODO(bep) This is only needed for tests and we should get rid of it.
if p.targetPathDescriptorPrototype == nil {
- if err := p.initTargetPathDescriptor(); err != nil {
- return nil, err
- }
- if err := p.initURLs(); err != nil {
+ if err := p.initPaths(); err != nil {
return nil, err
}
}
@@ -241,6 +245,68 @@ func (p *PageOutput) AlternativeOutputFormats() (OutputFormats, error) {
return o, nil
}
+// deleteResource removes the resource from this PageOutput and the Page. They will
+// always be of the same length, but may contain different elements.
+func (p *PageOutput) deleteResource(i int) {
+ p.resources = append(p.resources[:i], p.resources[i+1:]...)
+ p.Page.Resources = append(p.Page.Resources[:i], p.Page.Resources[i+1:]...)
+
+}
+
+func (p *PageOutput) Resources() resource.Resources {
+ p.resourcesInit.Do(func() {
+ // If the current out shares the same path as the main page output, we reuse
+ // the resource set. For the "amp" use case, we need to clone them with new
+ // base folder.
+ ff := p.outputFormats[0]
+ if p.outputFormat.Path == ff.Path {
+ p.resources = p.Page.Resources
+ return
+ }
+
+ // Clone it with new base.
+ resources := make(resource.Resources, len(p.Page.Resources))
+
+ for i, r := range p.Page.Resources {
+ if c, ok := r.(resource.Cloner); ok {
+ // Clone the same resource with a new target.
+ resources[i] = c.WithNewBase(p.outputFormat.Path)
+ } else {
+ resources[i] = r
+ }
+ }
+
+ p.resources = resources
+ })
+
+ return p.resources
+}
+
+func (p *PageOutput) renderResources() error {
+
+ for i, r := range p.Resources() {
+ src, ok := r.(resource.Source)
+ if !ok {
+ // Pages gets rendered with the owning page.
+ continue
+ }
+
+ if err := src.Publish(); err != nil {
+ if os.IsNotExist(err) {
+ // The resource has been deleted from the file system.
+ // This should be extremely rare, but can happen on live reload in server
+ // mode when the same resource is member of different page bundles.
+ p.deleteResource(i)
+ } else {
+ p.s.Log.ERROR.Printf("Failed to publish %q for page %q: %s", src.AbsSourceFilename(), p.pathOrTitle(), err)
+ }
+ } else {
+ p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
+ }
+ }
+ return nil
+}
+
// AlternativeOutputFormats is only available on the top level rendering
// entry point, and not inside range loops on the Page collections.
// This method is just here to inform users of that restriction.
diff --git a/hugolib/page_paths.go b/hugolib/page_paths.go
index a72304651..3eb1add80 100644
--- a/hugolib/page_paths.go
+++ b/hugolib/page_paths.go
@@ -82,7 +82,6 @@ func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor
}
func (p *Page) initTargetPathDescriptor() error {
-
d := &targetPathDescriptor{
PathSpec: p.s.PathSpec,
Kind: p.Kind,
@@ -126,6 +125,35 @@ func (p *Page) initTargetPathDescriptor() error {
}
+func (p *Page) initURLs() error {
+ if len(p.outputFormats) == 0 {
+ p.outputFormats = p.s.outputFormats[p.Kind]
+ }
+ rel := p.createRelativePermalink()
+
+ var err error
+ f := p.outputFormats[0]
+ p.permalink, err = p.s.permalinkForOutputFormat(rel, f)
+ if err != nil {
+ return err
+ }
+ rel = p.s.PathSpec.PrependBasePath(rel)
+ p.relPermalink = rel
+ p.relPermalinkBase = strings.TrimSuffix(rel, f.MediaType.FullSuffix())
+ p.layoutDescriptor = p.createLayoutDescriptor()
+ return nil
+}
+
+func (p *Page) initPaths() error {
+ if err := p.initTargetPathDescriptor(); err != nil {
+ return err
+ }
+ if err := p.initURLs(); err != nil {
+ return err
+ }
+ return nil
+}
+
// createTargetPath creates the target filename for this Page for the given
// output.Format. Some additional URL parts can also be provided, the typical
// use case being pagination.
@@ -156,12 +184,7 @@ func createTargetPath(d targetPathDescriptor) string {
isUgly := d.UglyURLs && !d.Type.NoUgly
- // If the page output format's base name is the same as the page base name,
- // we treat it as an ugly path, i.e.
- // my-blog-post-1/index.md => my-blog-post-1/index.html
- // (given the default values for that content file, i.e. no slug set etc.).
- // This introduces the behaviour from < Hugo 0.20, see issue #3396.
- if d.BaseName != "" && d.BaseName == d.Type.BaseName {
+ if d.ExpandedPermalink == "" && d.BaseName != "" && d.BaseName == d.Type.BaseName {
isUgly = true
}
@@ -247,6 +270,9 @@ func createTargetPath(d targetPathDescriptor) string {
func (p *Page) createRelativePermalink() string {
if len(p.outputFormats) == 0 {
+ if p.Kind == kindUnknown {
+ panic(fmt.Sprintf("Page %q has unknown kind", p.Title))
+ }
panic(fmt.Sprintf("Page %q missing output format(s)", p.Title))
}
@@ -264,6 +290,7 @@ func (p *Page) createRelativePermalinkForOutputFormat(f output.Format) string {
p.s.Log.ERROR.Printf("Failed to create permalink for page %q: %s", p.FullFilePath(), err)
return ""
}
+
// For /index.json etc. we must use the full path.
if strings.HasSuffix(f.BaseFilename(), "html") {
tp = strings.TrimSuffix(tp, f.BaseFilename())
diff --git a/hugolib/page_paths_test.go b/hugolib/page_paths_test.go
index 4147ffb85..149505ee4 100644
--- a/hugolib/page_paths_test.go
+++ b/hugolib/page_paths_test.go
@@ -79,7 +79,6 @@ func TestPageTargetPath(t *testing.T) {
Type: output.HTMLFormat}, "/a/b/mypage/index.html"},
{
- // Issue #3396
"HTML page with index as base", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b",
diff --git a/hugolib/page_resource.go b/hugolib/page_resource.go
new file mode 100644
index 000000000..808a692da
--- /dev/null
+++ b/hugolib/page_resource.go
@@ -0,0 +1,23 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "github.com/gohugoio/hugo/resource"
+)
+
+var (
+ _ resource.Resource = (*Page)(nil)
+ _ resource.Resource = (*PageOutput)(nil)
+)
diff --git a/hugolib/page_test.go b/hugolib/page_test.go
index 973e8509b..7b6dd646a 100644
--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -1458,8 +1458,7 @@ func TestTranslationKey(t *testing.T) {
assert.Equal("page/k1", s.RegularPages[0].TranslationKey())
p2 := s.RegularPages[1]
- // This is a single language setup
- assert.Equal("page/sect/simple.en", p2.TranslationKey())
+ assert.Equal("page/sect/simple", p2.TranslationKey())
}
@@ -1582,6 +1581,7 @@ tags:
*some blog content*`))
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
require.Len(t, s.RegularPages, 4)
pathFunc := func(s string) string {
diff --git a/hugolib/pagination.go b/hugolib/pagination.go
index 894f467a4..6d27f65ca 100644
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -300,7 +300,6 @@ func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) {
p.paginator = pagers[0]
p.paginator.source = "paginator"
p.paginator.options = options
- p.Site.addToPaginationPageCount(uint64(p.paginator.TotalPages()))
}
})
@@ -353,7 +352,6 @@ func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager,
p.paginator = pagers[0]
p.paginator.source = seq
p.paginator.options = options
- p.Site.addToPaginationPageCount(uint64(p.paginator.TotalPages()))
}
})
@@ -417,6 +415,10 @@ func paginatePages(td targetPathDescriptor, seq interface{}, pagerSize int) (pag
}
func toPages(seq interface{}) (Pages, error) {
+ if seq == nil {
+ return Pages{}, nil
+ }
+
switch seq.(type) {
case Pages:
return seq.(Pages), nil
diff --git a/hugolib/permalinks.go b/hugolib/permalinks.go
index 14d0cba5e..8f64614b8 100644
--- a/hugolib/permalinks.go
+++ b/hugolib/permalinks.go
@@ -17,6 +17,7 @@ import (
"errors"
"fmt"
"path"
+ "path/filepath"
"regexp"
"strconv"
"strings"
@@ -156,9 +157,13 @@ func pageToPermalinkTitle(p *Page, _ string) (string, error) {
// pageToPermalinkFilename returns the URL-safe form of the filename
func pageToPermalinkFilename(p *Page, _ string) (string, error) {
- //var extension = p.Source.Ext
- //var name = p.Source.Path()[0 : len(p.Source.Path())-len(extension)]
- return p.s.PathSpec.URLize(p.Source.TranslationBaseName()), nil
+ name := p.File.TranslationBaseName()
+ if name == "index" {
+ // Page bundles; the directory name will hopefully have a better name.
+ _, name = filepath.Split(p.File.Dir())
+ }
+
+ return p.s.PathSpec.URLize(name), nil
}
// if the page has a slug, return the slug, else return the title
diff --git a/hugolib/prune_resources.go b/hugolib/prune_resources.go
new file mode 100644
index 000000000..8eddafb53
--- /dev/null
+++ b/hugolib/prune_resources.go
@@ -0,0 +1,84 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/spf13/afero"
+)
+
+// GC requires a build first.
+func (h *HugoSites) GC() (int, error) {
+ s := h.Sites[0]
+ imageCacheDir := s.resourceSpec.AbsGenImagePath
+ if len(imageCacheDir) < 10 {
+ panic("invalid image cache")
+ }
+
+ isInUse := func(filename string) bool {
+ key := strings.TrimPrefix(filename, imageCacheDir)
+ for _, site := range h.Sites {
+ if site.resourceSpec.IsInCache(key) {
+ return true
+ }
+ }
+
+ return false
+ }
+
+ counter := 0
+
+ err := afero.Walk(s.Fs.Source, imageCacheDir, func(path string, info os.FileInfo, err error) error {
+ if info == nil {
+ return nil
+ }
+
+ if !strings.HasPrefix(path, imageCacheDir) {
+ return fmt.Errorf("Invalid state, walk outside of resource dir: %q", path)
+ }
+
+ if info.IsDir() {
+ f, err := s.Fs.Source.Open(path)
+ if err != nil {
+ return nil
+ }
+ defer f.Close()
+ _, err = f.Readdirnames(1)
+ if err == io.EOF {
+ // Empty dir.
+ s.Fs.Source.Remove(path)
+ }
+
+ return nil
+ }
+
+ inUse := isInUse(path)
+ if !inUse {
+ err := s.Fs.Source.Remove(path)
+ if err != nil && !os.IsNotExist(err) {
+ s.Log.ERROR.Printf("Failed to remove %q: %s", path, err)
+ } else {
+ counter++
+ }
+ }
+ return nil
+ })
+
+ return counter, err
+
+}
diff --git a/hugolib/rss_test.go b/hugolib/rss_test.go
index 268b13073..9b5130507 100644
--- a/hugolib/rss_test.go
+++ b/hugolib/rss_test.go
@@ -38,7 +38,7 @@ func TestRSSOutput(t *testing.T) {
cfg.Set("rssLimit", rssLimit)
for _, src := range weightedSources {
- writeSource(t, fs, filepath.Join("content", "sect", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", "sect", src[0]), src[1])
}
buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go
index 7672befc7..0b5c29e94 100644
--- a/hugolib/shortcode_test.go
+++ b/hugolib/shortcode_test.go
@@ -32,7 +32,6 @@ import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/tpl"
"github.com/stretchr/testify/require"
)
@@ -530,10 +529,10 @@ tags:
"<p><strong>Tags:</strong> 2</p>\n"},
}
- sources := make([]source.ByteSource, len(tests))
+ sources := make([][2]string, len(tests))
for i, test := range tests {
- sources[i] = source.ByteSource{Name: filepath.FromSlash(test.contentPath), Content: []byte(test.content)}
+ sources[i] = [2]string{filepath.FromSlash(test.contentPath), test.content}
}
addTemplates := func(templ tpl.TemplateHandler) error {
diff --git a/hugolib/site.go b/hugolib/site.go
index 936584580..a5e2144e1 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -25,16 +25,18 @@ import (
"sort"
"strconv"
"strings"
- "sync"
"time"
+ "github.com/gohugoio/hugo/resource"
+
+ "golang.org/x/sync/errgroup"
+
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/media"
"github.com/markbates/inflect"
-
- "sync/atomic"
+ "golang.org/x/net/context"
"github.com/fsnotify/fsnotify"
bp "github.com/gohugoio/hugo/bufferpool"
@@ -81,7 +83,6 @@ type Site struct {
*PageCollections
- Files []*source.File
Taxonomies TaxonomyList
// Plural is what we get in the folder, so keep track of this mapping
@@ -93,7 +94,6 @@ type Site struct {
// is set.
taxonomiesOrigKey map[string]string
- Source source.Input
Sections Taxonomy
Info SiteInfo
Menus Menus
@@ -104,8 +104,9 @@ type Site struct {
draftCount int
futureCount int
expiredCount int
- Data map[string]interface{}
- Language *helpers.Language
+
+ Data map[string]interface{}
+ Language *helpers.Language
disabledKinds map[string]bool
@@ -131,14 +132,13 @@ type Site struct {
renderFormats output.Formats
// Logger etc.
- *deps.Deps `json:"-"`
+ *deps.Deps `json:"-"`
+ resourceSpec *resource.Spec
// The func used to title case titles.
titleFunc func(s string) string
relatedDocsHandler *relatedDocsHandler
-
- siteStats *siteStats
}
type siteRenderingContext struct {
@@ -161,11 +161,6 @@ func (s *Site) initRenderFormats() {
s.renderFormats = formats
}
-type siteStats struct {
- pageCount int
- pageCountRegular int
-}
-
func (s *Site) isEnabled(kind string) bool {
if kind == kindUnknown {
panic("Unknown kind")
@@ -183,6 +178,7 @@ func (s *Site) reset() *Site {
outputFormats: s.outputFormats,
outputFormatsConfig: s.outputFormatsConfig,
mediaTypesConfig: s.mediaTypesConfig,
+ resourceSpec: s.resourceSpec,
Language: s.Language,
owner: s.owner,
PageCollections: newPageCollections()}
@@ -342,20 +338,10 @@ func NewSiteForCfg(cfg deps.DepsCfg) (*Site, error) {
}
type SiteInfo struct {
- // atomic requires 64-bit alignment for struct field access
- // According to the docs, " The first word in a global variable or in an
- // allocated struct or slice can be relied upon to be 64-bit aligned."
- // Moving paginationPageCount to the top of this struct didn't do the
- // magic, maybe due to the way SiteInfo is embedded.
- // Adding the 4 byte padding below does the trick.
- _ [4]byte
- paginationPageCount uint64
-
Taxonomies TaxonomyList
Authors AuthorList
Social SiteSocial
*PageCollections
- Files *[]*source.File
Menus *Menus
Hugo *HugoInfo
Title string
@@ -385,6 +371,11 @@ type SiteInfo struct {
sectionPagesMenu string
}
+func (s *SiteInfo) Files() []source.File {
+ helpers.Deprecated(".Site", "Files", "", true)
+ return nil
+}
+
func (s *SiteInfo) String() string {
return fmt.Sprintf("Site(%q)", s.Title)
}
@@ -530,16 +521,8 @@ func (s *SiteInfo) RelRef(ref string, page *Page, options ...string) (string, er
return s.refLink(ref, page, true, outputFormat)
}
-func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
- atomic.AddUint64(&s.paginationPageCount, cnt)
-}
-
-type runmode struct {
- Watching bool
-}
-
func (s *Site) running() bool {
- return s.owner.runMode.Watching
+ return s.owner.running
}
func init() {
@@ -567,32 +550,105 @@ func (s *Site) RegisterMediaTypes() {
}
}
+func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
+ var filtered []fsnotify.Event
+ seen := make(map[fsnotify.Event]bool)
+
+ for _, ev := range events {
+ // Avoid processing the same event twice.
+ if seen[ev] {
+ continue
+ }
+ seen[ev] = true
+
+ if s.SourceSpec.IgnoreFile(ev.Name) {
+ continue
+ }
+
+ // Throw away any directories
+ isRegular, err := s.SourceSpec.IsRegularSourceFile(ev.Name)
+ if err != nil && os.IsNotExist(err) && (ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename) {
+ // Force keep of event
+ isRegular = true
+ }
+ if !isRegular {
+ continue
+ }
+
+ filtered = append(filtered, ev)
+ }
+
+ return filtered
+}
+
+func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
+ var filtered []fsnotify.Event
+
+ eventMap := make(map[string][]fsnotify.Event)
+
+ // We often get a Remove etc. followed by a Create, a Create followed by a Write.
+ // Remove the superflous events to mage the update logic simpler.
+ for _, ev := range events {
+ eventMap[ev.Name] = append(eventMap[ev.Name], ev)
+ }
+
+ for _, ev := range events {
+ mapped := eventMap[ev.Name]
+
+ // Keep one
+ found := false
+ var kept fsnotify.Event
+ for i, ev2 := range mapped {
+ if i == 0 {
+ kept = ev2
+ }
+
+ if ev2.Op&fsnotify.Write == fsnotify.Write {
+ kept = ev2
+ found = true
+ }
+
+ if !found && ev2.Op&fsnotify.Create == fsnotify.Create {
+ kept = ev2
+ }
+ }
+
+ filtered = append(filtered, kept)
+ }
+
+ return filtered
+}
+
// reBuild partially rebuilds a site given the filesystem events.
// It returns whetever the content source was changed.
-func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
+// TODO(bep) clean up/rewrite this method.
+func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
+
+ events = s.filterFileEvents(events)
+ events = s.translateFileEvents(events)
+
s.Log.DEBUG.Printf("Rebuild for events %q", events)
+ h := s.owner
+
s.timerStep("initialize rebuild")
// First we need to determine what changed
- sourceChanged := []fsnotify.Event{}
- sourceReallyChanged := []fsnotify.Event{}
- tmplChanged := []fsnotify.Event{}
- dataChanged := []fsnotify.Event{}
- i18nChanged := []fsnotify.Event{}
- shortcodesChanged := make(map[string]bool)
- // prevent spamming the log on changes
- logger := helpers.NewDistinctFeedbackLogger()
- seen := make(map[fsnotify.Event]bool)
+ var (
+ sourceChanged = []fsnotify.Event{}
+ sourceReallyChanged = []fsnotify.Event{}
+ contentFilesChanged []string
+ tmplChanged = []fsnotify.Event{}
+ dataChanged = []fsnotify.Event{}
+ i18nChanged = []fsnotify.Event{}
+ shortcodesChanged = make(map[string]bool)
+
+ // prevent spamming the log on changes
+ logger = helpers.NewDistinctFeedbackLogger()
+ )
for _, ev := range events {
- // Avoid processing the same event twice.
- if seen[ev] {
- continue
- }
- seen[ev] = true
-
if s.isContentDirEvent(ev) {
logger.Println("Source changed", ev)
sourceChanged = append(sourceChanged, ev)
@@ -647,49 +703,11 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
}
}
- // If a content file changes, we need to reload only it and re-render the entire site.
-
- // First step is to read the changed files and (re)place them in site.AllPages
- // This includes processing any meta-data for that content
-
- // The second step is to convert the content into HTML
- // This includes processing any shortcodes that may be present.
-
- // We do this in parallel... even though it's likely only one file at a time.
- // We need to process the reading prior to the conversion for each file, but
- // we can convert one file while another one is still reading.
- errs := make(chan error, 2)
- readResults := make(chan HandledResult)
- filechan := make(chan *source.File)
- convertResults := make(chan HandledResult)
- pageChan := make(chan *Page)
- fileConvChan := make(chan *source.File)
- coordinator := make(chan bool)
-
- wg := &sync.WaitGroup{}
- wg.Add(2)
- for i := 0; i < 2; i++ {
- go sourceReader(s, filechan, readResults, wg)
- }
-
- wg2 := &sync.WaitGroup{}
- wg2.Add(4)
- for i := 0; i < 2; i++ {
- go fileConverter(s, fileConvChan, convertResults, wg2)
- go pageConverter(pageChan, convertResults, wg2)
- }
-
- sp := source.NewSourceSpec(s.Cfg, s.Fs)
- fs := sp.NewFilesystem("")
-
for _, ev := range sourceChanged {
- // The incrementalReadCollator below will also make changes to the site's pages,
- // so we do this first to prevent races.
+ removed := false
+
if ev.Op&fsnotify.Remove == fsnotify.Remove {
- //remove the file & a create will follow
- path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name))
- s.removePageByPathPrefix(path)
- continue
+ removed = true
}
// Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
@@ -698,38 +716,16 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
if ev.Op&fsnotify.Rename == fsnotify.Rename {
// If the file is still on disk, it's only been updated, if it's not, it's been moved
if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil {
- path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name))
- s.removePageByPath(path)
- continue
+ removed = true
}
}
+ if removed && isContentFile(ev.Name) {
+ path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name))
- // ignore files shouldn't be proceed
- if fi, err := s.Fs.Source.Stat(ev.Name); err != nil {
- continue
- } else {
- if ok, err := fs.ShouldRead(ev.Name, fi); err != nil || !ok {
- continue
- }
+ h.removePageByPath(path)
}
sourceReallyChanged = append(sourceReallyChanged, ev)
- }
-
- go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs)
- go converterCollator(convertResults, errs)
-
- for _, ev := range sourceReallyChanged {
-
- file, err := s.reReadFile(ev.Name)
-
- if err != nil {
- s.Log.ERROR.Println("Error reading file", ev.Name, ";", err)
- }
-
- if file != nil {
- filechan <- file
- }
}
@@ -740,39 +736,25 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
// and then creates the shortcode on the file system.
// To handle these scenarios, we must do a full reprocessing of the
// pages that keeps a reference to the changed shortcode.
- pagesWithShortcode := s.findPagesByShortcode(shortcode)
+ pagesWithShortcode := h.findPagesByShortcode(shortcode)
for _, p := range pagesWithShortcode {
- p.rendered = false
- pageChan <- p
+ contentFilesChanged = append(contentFilesChanged, p.File.Filename())
}
}
- // we close the filechan as we have sent everything we want to send to it.
- // this will tell the sourceReaders to stop iterating on that channel
- close(filechan)
-
- // waiting for the sourceReaders to all finish
- wg.Wait()
- // Now closing readResults as this will tell the incrementalReadCollator to
- // stop iterating over that.
- close(readResults)
-
- // once readResults is finished it will close coordinator and move along
- <-coordinator
- // allow that routine to finish, then close page & fileconvchan as we've sent
- // everything to them we need to.
- close(pageChan)
- close(fileConvChan)
-
- wg2.Wait()
- close(convertResults)
+ if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 {
+ var filenamesChanged []string
+ for _, e := range sourceReallyChanged {
+ filenamesChanged = append(filenamesChanged, e.Name)
+ }
+ if len(contentFilesChanged) > 0 {
+ filenamesChanged = append(filenamesChanged, contentFilesChanged...)
+ }
- s.timerStep("read & convert pages from source")
+ filenamesChanged = helpers.UniqueStrings(filenamesChanged)
- for i := 0; i < 2; i++ {
- err := <-errs
- if err != nil {
- s.Log.ERROR.Println(err)
+ if err := s.readAndProcessContent(filenamesChanged...); err != nil {
+ return whatChanged{}, err
}
}
@@ -785,88 +767,111 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) {
}
-func (s *Site) loadData(sources []source.Input) (err error) {
- s.Log.DEBUG.Printf("Load Data from %d source(s)", len(sources))
+func (s *Site) loadData(sourceDirs []string) (err error) {
+ s.Log.DEBUG.Printf("Load Data from %d source(s)", len(sourceDirs))
s.Data = make(map[string]interface{})
- var current map[string]interface{}
- for _, currentSource := range sources {
- for _, r := range currentSource.Files() {
- // Crawl in data tree to insert data
- current = s.Data
- for _, key := range strings.Split(r.Dir(), helpers.FilePathSeparator) {
- if key != "" {
- if _, ok := current[key]; !ok {
- current[key] = make(map[string]interface{})
- }
- current = current[key].(map[string]interface{})
- }
+ for _, sourceDir := range sourceDirs {
+ fs := s.SourceSpec.NewFilesystem(sourceDir)
+ for _, r := range fs.Files() {
+ if err := s.handleDataFile(r); err != nil {
+ return err
}
+ }
+ }
- data, err := s.readData(r)
- if err != nil {
- s.Log.WARN.Printf("Failed to read data from %s: %s", filepath.Join(r.Path(), r.LogicalName()), err)
- continue
- }
+ return
+}
- if data == nil {
- continue
- }
+func (s *Site) handleDataFile(r source.ReadableFile) error {
+ var current map[string]interface{}
- // Copy content from current to data when needed
- if _, ok := current[r.BaseFileName()]; ok {
- data := data.(map[string]interface{})
-
- for key, value := range current[r.BaseFileName()].(map[string]interface{}) {
- if _, override := data[key]; override {
- // filepath.Walk walks the files in lexical order, '/' comes before '.'
- // this warning could happen if
- // 1. A theme uses the same key; the main data folder wins
- // 2. A sub folder uses the same key: the sub folder wins
- s.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden in subfolder", key, r.Path())
- }
- data[key] = value
- }
+ f, err := r.Open()
+ if err != nil {
+ return fmt.Errorf("Failed to open data file %q: %s", r.LogicalName(), err)
+ }
+ defer f.Close()
+
+ // Crawl in data tree to insert data
+ current = s.Data
+ for _, key := range strings.Split(r.Dir(), helpers.FilePathSeparator) {
+ if key != "" {
+ if _, ok := current[key]; !ok {
+ current[key] = make(map[string]interface{})
}
+ current = current[key].(map[string]interface{})
+ }
+ }
- // Insert data
- current[r.BaseFileName()] = data
+ data, err := s.readData(r)
+ if err != nil {
+ s.Log.WARN.Printf("Failed to read data from %s: %s", filepath.Join(r.Path(), r.LogicalName()), err)
+ return nil
+ }
+
+ if data == nil {
+ return nil
+ }
+
+ // Copy content from current to data when needed
+ if _, ok := current[r.BaseFileName()]; ok {
+ data := data.(map[string]interface{})
+
+ for key, value := range current[r.BaseFileName()].(map[string]interface{}) {
+ if _, override := data[key]; override {
+ // filepath.Walk walks the files in lexical order, '/' comes before '.'
+ // this warning could happen if
+ // 1. A theme uses the same key; the main data folder wins
+ // 2. A sub folder uses the same key: the sub folder wins
+ s.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden in subfolder", key, r.Path())
+ }
+ data[key] = value
}
}
- return
+ // Insert data
+ current[r.BaseFileName()] = data
+
+ return nil
}
-func (s *Site) readData(f *source.File) (interface{}, error) {
+func (s *Site) readData(f source.ReadableFile) (interface{}, error) {
+ file, err := f.Open()
+ if err != nil {
+ return nil, err
+ }
+ defer file.Close()
+ content := helpers.ReaderToBytes(file)
+
switch f.Extension() {
case "yaml", "yml":
- return parser.HandleYAMLMetaData(f.Bytes())
+ return parser.HandleYAMLMetaData(content)
case "json":
- return parser.HandleJSONMetaData(f.Bytes())
+ return parser.HandleJSONMetaData(content)
case "toml":
- return parser.HandleTOMLMetaData(f.Bytes())
+ return parser.HandleTOMLMetaData(content)
default:
return nil, fmt.Errorf("Data not supported for extension '%s'", f.Extension())
}
}
func (s *Site) readDataFromSourceFS() error {
- sp := source.NewSourceSpec(s.Cfg, s.Fs)
- dataSources := make([]source.Input, 0, 2)
- dataSources = append(dataSources, sp.NewFilesystem(s.absDataDir()))
+ var dataSourceDirs []string
// have to be last - duplicate keys in earlier entries will win
themeDataDir, err := s.PathSpec.GetThemeDataDirPath()
if err == nil {
- dataSources = append(dataSources, sp.NewFilesystem(themeDataDir))
+ dataSourceDirs = []string{s.absDataDir(), themeDataDir}
+ } else {
+ dataSourceDirs = []string{s.absDataDir()}
+
}
- err = s.loadData(dataSources)
+ err = s.loadData(dataSourceDirs)
s.timerStep("load data")
return err
}
func (s *Site) process(config BuildCfg) (err error) {
- s.timerStep("Go initialization")
if err = s.initialize(); err != nil {
return
}
@@ -877,7 +882,13 @@ func (s *Site) process(config BuildCfg) (err error) {
}
s.timerStep("load i18n")
- return s.createPages()
+
+ if err := s.readAndProcessContent(); err != nil {
+ return err
+ }
+ s.timerStep("read and convert pages from source")
+
+ return err
}
@@ -967,19 +978,10 @@ func (s *Site) initialize() (err error) {
defer s.initializeSiteInfo()
s.Menus = Menus{}
- // May be supplied in tests.
- if s.Source != nil && len(s.Source.Files()) > 0 {
- s.Log.DEBUG.Println("initialize: Source is already set")
- return
- }
-
if err = s.checkDirectories(); err != nil {
return err
}
- sp := source.NewSourceSpec(s.Cfg, s.Fs)
- s.Source = sp.NewFilesystem(s.absContentDir())
-
return
}
@@ -1053,7 +1055,6 @@ func (s *Site) initializeSiteInfo() {
uglyURLs: s.Cfg.GetBool("uglyURLs"),
preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"),
PageCollections: s.PageCollections,
- Files: &s.Files,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
@@ -1144,7 +1145,7 @@ func (s *Site) getThemeLayoutDir(path string) string {
}
func (s *Site) absContentDir() string {
- return s.PathSpec.AbsPathify(s.Cfg.GetString("contentDir"))
+ return s.PathSpec.AbsPathify(s.PathSpec.ContentDir())
}
func (s *Site) isContentDirEvent(e fsnotify.Event) bool {
@@ -1190,241 +1191,86 @@ func (s *Site) checkDirectories() (err error) {
return
}
-// reReadFile resets file to be read from disk again
-func (s *Site) reReadFile(absFilePath string) (*source.File, error) {
- s.Log.INFO.Println("rereading", absFilePath)
- var file *source.File
-
- reader, err := source.NewLazyFileReader(s.Fs.Source, absFilePath)
- if err != nil {
- return nil, err
- }
-
- sp := source.NewSourceSpec(s.Cfg, s.Fs)
- file, err = sp.NewFileFromAbs(s.getContentDir(absFilePath), absFilePath, reader)
-
- if err != nil {
- return nil, err
- }
-
- return file, nil
+type contentCaptureResultHandler struct {
+ contentProcessors map[string]*siteContentProcessor
}
-func (s *Site) readPagesFromSource() chan error {
- if s.Source == nil {
- panic(fmt.Sprintf("s.Source not set %s", s.absContentDir()))
- }
-
- s.Log.DEBUG.Printf("Read %d pages from source", len(s.Source.Files()))
-
- errs := make(chan error)
- if len(s.Source.Files()) < 1 {
- close(errs)
- return errs
- }
-
- files := s.Source.Files()
- results := make(chan HandledResult)
- filechan := make(chan *source.File)
- wg := &sync.WaitGroup{}
- numWorkers := getGoMaxProcs() * 4
- wg.Add(numWorkers)
- for i := 0; i < numWorkers; i++ {
- go sourceReader(s, filechan, results, wg)
- }
-
- // we can only have exactly one result collator, since it makes changes that
- // must be synchronized.
- go readCollator(s, results, errs)
+func (c *contentCaptureResultHandler) handleSingles(fis ...*fileInfo) {
+ for _, fi := range fis {
+ // May be connected to a language (content files)
+ proc, found := c.contentProcessors[fi.Lang()]
+ if !found {
+ panic("proc not found")
+ }
+ proc.fileSinglesChan <- fi
- for _, file := range files {
- filechan <- file
}
-
- close(filechan)
- wg.Wait()
- close(results)
-
- return errs
}
+func (c *contentCaptureResultHandler) handleBundles(d *bundleDirs) {
+ for _, b := range d.bundles {
+ lang := b.fi.Lang()
-func (s *Site) convertSource() chan error {
- errs := make(chan error)
- results := make(chan HandledResult)
- pageChan := make(chan *Page)
- fileConvChan := make(chan *source.File)
- numWorkers := getGoMaxProcs() * 4
- wg := &sync.WaitGroup{}
-
- for i := 0; i < numWorkers; i++ {
- wg.Add(2)
- go fileConverter(s, fileConvChan, results, wg)
- go pageConverter(pageChan, results, wg)
- }
-
- go converterCollator(results, errs)
-
- for _, p := range s.rawAllPages {
- if p.shouldBuild() {
- pageChan <- p
+ proc, found := c.contentProcessors[lang]
+ if !found {
+ panic("proc not found")
}
- }
+ proc.fileBundlesChan <- b
- for _, f := range s.Files {
- fileConvChan <- f
}
-
- close(pageChan)
- close(fileConvChan)
- wg.Wait()
- close(results)
-
- return errs
}
-func (s *Site) createPages() error {
- readErrs := <-s.readPagesFromSource()
- s.timerStep("read pages from source")
-
- renderErrs := <-s.convertSource()
- s.timerStep("convert source")
-
- if renderErrs == nil && readErrs == nil {
- return nil
- }
- if renderErrs == nil {
- return readErrs
- }
- if readErrs == nil {
- return renderErrs
+func (c *contentCaptureResultHandler) handleCopyFiles(filenames ...string) {
+ for _, proc := range c.contentProcessors {
+ proc.fileAssetsChan <- filenames
}
-
- return fmt.Errorf("%s\n%s", readErrs, renderErrs)
}
-func sourceReader(s *Site, files <-chan *source.File, results chan<- HandledResult, wg *sync.WaitGroup) {
- defer wg.Done()
- for file := range files {
- readSourceFile(s, file, results)
- }
-}
+func (s *Site) readAndProcessContent(filenames ...string) error {
-func readSourceFile(s *Site, file *source.File, results chan<- HandledResult) {
- h := NewMetaHandler(file.Extension())
- if h != nil {
- h.Read(file, s, results)
- } else {
- s.Log.ERROR.Println("Unsupported File Type", file.Path())
- }
-}
+ ctx := context.Background()
+ g, ctx := errgroup.WithContext(ctx)
-func pageConverter(pages <-chan *Page, results HandleResults, wg *sync.WaitGroup) {
- defer wg.Done()
- for page := range pages {
- var h *MetaHandle
- if page.Markup != "" {
- h = NewMetaHandler(page.Markup)
- } else {
- h = NewMetaHandler(page.File.Extension())
- }
- if h != nil {
- // Note that we convert pages from the site's rawAllPages collection
- // Which may contain pages from multiple sites, so we use the Page's site
- // for the conversion.
- h.Convert(page, page.s, results)
- }
- }
-}
+ sourceSpec := source.NewSourceSpec(s.owner.Cfg, s.Fs)
+ baseDir := s.absContentDir()
-func fileConverter(s *Site, files <-chan *source.File, results HandleResults, wg *sync.WaitGroup) {
- defer wg.Done()
- for file := range files {
- h := NewMetaHandler(file.Extension())
- if h != nil {
- h.Convert(file, s, results)
- }
- }
-}
+ contentProcessors := make(map[string]*siteContentProcessor)
+ sites := s.owner.langSite()
+ for k, v := range sites {
+ proc := newSiteContentProcessor(baseDir, len(filenames) > 0, v)
+ contentProcessors[k] = proc
-func converterCollator(results <-chan HandledResult, errs chan<- error) {
- errMsgs := []string{}
- for r := range results {
- if r.err != nil {
- errMsgs = append(errMsgs, r.err.Error())
- continue
- }
- }
- if len(errMsgs) == 0 {
- errs <- nil
- return
+ g.Go(func() error {
+ return proc.process(ctx)
+ })
}
- errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
-}
-func (s *Site) replaceFile(sf *source.File) {
- for i, f := range s.Files {
- if f.Path() == sf.Path() {
- s.Files[i] = sf
- return
- }
- }
+ var (
+ handler captureResultHandler
+ bundleMap *contentChangeMap
+ )
- // If a match isn't found, then append it
- s.Files = append(s.Files, sf)
-}
+ mainHandler := &contentCaptureResultHandler{contentProcessors: contentProcessors}
-func incrementalReadCollator(s *Site, results <-chan HandledResult, pageChan chan *Page, fileConvChan chan *source.File, coordinator chan bool, errs chan<- error) {
- errMsgs := []string{}
- for r := range results {
- if r.err != nil {
- errMsgs = append(errMsgs, r.Error())
- continue
- }
+ if s.running() {
+ // Need to track changes.
+ bundleMap = s.owner.ContentChanges
+ handler = &captureResultHandlerChain{handlers: []captureBundlesHandler{mainHandler, bundleMap}}
- if r.page == nil {
- s.replaceFile(r.file)
- fileConvChan <- r.file
- } else {
- s.replacePage(r.page)
- pageChan <- r.page
- }
+ } else {
+ handler = mainHandler
}
- s.rawAllPages.Sort()
- close(coordinator)
+ c := newCapturer(s.Log, sourceSpec, handler, bundleMap, baseDir, filenames...)
- if len(errMsgs) == 0 {
- errs <- nil
- return
- }
- errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
-}
-
-func readCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
- if s.PageCollections == nil {
- panic("No page collections")
+ if err := c.capture(); err != nil {
+ return err
}
- errMsgs := []string{}
- for r := range results {
- if r.err != nil {
- errMsgs = append(errMsgs, r.Error())
- continue
- }
- // !page == file
- if r.page == nil {
- s.Files = append(s.Files, r.file)
- } else {
- s.addPage(r.page)
- }
+ for _, proc := range contentProcessors {
+ proc.closeInput()
}
- s.rawAllPages.Sort()
- if len(errMsgs) == 0 {
- errs <- nil
- return
- }
- errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
+ return g.Wait()
}
func (s *Site) buildSiteMeta() (err error) {
@@ -1647,7 +1493,6 @@ func (s *Site) resetBuildState() {
// TODO(bep) get rid of this double
s.Info.PageCollections = s.PageCollections
- s.Info.paginationPageCount = 0
s.draftCount = 0
s.futureCount = 0
@@ -1661,6 +1506,10 @@ func (s *Site) resetBuildState() {
}
func (s *Site) kindFromSections(sections []string) string {
+ if len(sections) == 0 {
+ return KindSection
+ }
+
if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy {
if len(sections) == 1 {
return KindTaxonomyTerm
@@ -1738,28 +1587,6 @@ func (s *Site) appendThemeTemplates(in []string) []string {
}
-// Stats prints Hugo builds stats to the console.
-// This is what you see after a successful hugo build.
-func (s *Site) Stats() {
- s.Log.FEEDBACK.Printf("\nBuilt site for language %s:\n", s.Language.Lang)
- s.Log.FEEDBACK.Println(s.draftStats())
- s.Log.FEEDBACK.Println(s.futureStats())
- s.Log.FEEDBACK.Println(s.expiredStats())
- s.Log.FEEDBACK.Printf("%d regular pages created\n", s.siteStats.pageCountRegular)
- s.Log.FEEDBACK.Printf("%d other pages created\n", (s.siteStats.pageCount - s.siteStats.pageCountRegular))
- s.Log.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files))
- s.Log.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount)
-
- if s.isEnabled(KindTaxonomy) {
- taxonomies := s.Language.GetStringMapString("taxonomies")
-
- for _, pl := range taxonomies {
- s.Log.FEEDBACK.Printf("%d %s created\n", len(s.Taxonomies[pl]), pl)
- }
- }
-
-}
-
// GetPage looks up a page of a given type in the path given.
// {{ with .Site.GetPage "section" "blog" }}{{ .Title }}{{ end }}
//
@@ -1783,23 +1610,15 @@ func (s *Site) permalinkForOutputFormat(link string, f output.Format) (string, e
} else {
baseURL = s.PathSpec.BaseURL.String()
}
- return s.permalinkForBaseURL(link, baseURL), nil
+ return s.PathSpec.PermalinkForBaseURL(link, baseURL), nil
}
func (s *Site) permalink(link string) string {
- return s.permalinkForBaseURL(link, s.PathSpec.BaseURL.String())
+ return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.BaseURL.String())
}
-func (s *Site) permalinkForBaseURL(link, baseURL string) string {
- link = strings.TrimPrefix(link, "/")
- if !strings.HasSuffix(baseURL, "/") {
- baseURL += "/"
- }
- return baseURL + link
-}
-
-func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error {
+func (s *Site) renderAndWriteXML(statCounter *uint64, name string, dest string, d interface{}, layouts ...string) error {
s.Log.DEBUG.Printf("Render XML for %q to %q", name, dest)
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
@@ -1829,11 +1648,11 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout
return nil
}
- return s.publish(dest, outBuffer)
+ return s.publish(statCounter, dest, outBuffer)
}
-func (s *Site) renderAndWritePage(name string, dest string, p *PageOutput, layouts ...string) error {
+func (s *Site) renderAndWritePage(statCounter *uint64, name string, dest string, p *PageOutput, layouts ...string) error {
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
@@ -1888,7 +1707,7 @@ func (s *Site) renderAndWritePage(name string, dest string, p *PageOutput, layou
return nil
}
- return s.publish(dest, outBuffer)
+ return s.publish(statCounter, dest, outBuffer)
}
func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts ...string) (err error) {
@@ -1915,7 +1734,15 @@ func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts
if err = templ.Execute(w, d); err != nil {
// Behavior here should be dependent on if running in server or watch mode.
- helpers.DistinctErrorLog.Printf("Error while rendering %q: %s", name, err)
+ if p, ok := d.(*PageOutput); ok {
+ if p.File != nil {
+ helpers.DistinctErrorLog.Printf("Error while rendering %q in %q: %s", name, p.File.Dir(), err)
+ } else {
+ helpers.DistinctErrorLog.Printf("Error while rendering %q: %s", name, err)
+ }
+ } else {
+ helpers.DistinctErrorLog.Printf("Error while rendering %q: %s", name, err)
+ }
if !s.running() && !testMode {
// TODO(bep) check if this can be propagated
os.Exit(-1)
@@ -1936,8 +1763,11 @@ func (s *Site) findFirstTemplate(layouts ...string) tpl.Template {
return nil
}
-func (s *Site) publish(path string, r io.Reader) (err error) {
+func (s *Site) publish(statCounter *uint64, path string, r io.Reader) (err error) {
+ s.PathSpec.ProcessingStats.Incr(statCounter)
+
path = filepath.Join(s.absPublishDir(), path)
+
return helpers.WriteToDisk(path, r, s.Fs.Destination)
}
@@ -2012,6 +1842,7 @@ func (s *Site) newNodePage(typ string, sections ...string) *Page {
language: s.Language,
pageInit: &pageInit{},
Kind: typ,
+ Source: Source{File: &source.FileInfo{}},
Data: make(map[string]interface{}),
Site: &s.Info,
sections: sections,
diff --git a/hugolib/siteJSONEncode_test.go b/hugolib/siteJSONEncode_test.go
index 9c83899fd..5bb6e52e8 100644
--- a/hugolib/siteJSONEncode_test.go
+++ b/hugolib/siteJSONEncode_test.go
@@ -29,11 +29,13 @@ func TestEncodePage(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg()
- // borrowed from menu_test.go
- for _, src := range menuPageSources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", "page.md"), `---
+title: Simple
+---
+Summary text
- }
+<!--more-->
+`)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
diff --git a/hugolib/site_render.go b/hugolib/site_render.go
index c14592c89..497433ff6 100644
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -98,6 +98,26 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
continue
}
+ // We only need to re-publish the resources if the output format is different
+ // from all of the previous (e.g. the "amp" use case).
+ shouldRender := i == 0
+ if i > 0 {
+ for j := i; j >= 0; j-- {
+ if outFormat.Path != page.outputFormats[j].Path {
+ shouldRender = true
+ } else {
+ shouldRender = false
+ }
+ }
+ }
+
+ if shouldRender {
+ if err := pageOutput.renderResources(); err != nil {
+ s.Log.ERROR.Printf("Failed to render resources for page %q: %s", page, err)
+ continue
+ }
+ }
+
var layouts []string
if page.selfLayout != "" {
@@ -125,7 +145,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts)
- if err := s.renderAndWritePage("page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil {
+ if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil {
results <- err
}
@@ -191,6 +211,7 @@ func (s *Site) renderPaginator(p *PageOutput) error {
}
if err := s.renderAndWritePage(
+ &s.PathSpec.ProcessingStats.PaginatorPages,
pagerNode.Title,
targetPath, pagerNode, layouts...); err != nil {
return err
@@ -232,7 +253,7 @@ func (s *Site) renderRSS(p *PageOutput) error {
return err
}
- return s.renderAndWriteXML(p.Title,
+ return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Pages, p.Title,
targetPath, p, layouts...)
}
@@ -271,7 +292,7 @@ func (s *Site) render404() error {
s.Log.ERROR.Printf("Failed to create target path for page %q: %s", p, err)
}
- return s.renderAndWritePage("404 page", targetPath, pageOutput, s.appendThemeTemplates(nfLayouts)...)
+ return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, pageOutput, s.appendThemeTemplates(nfLayouts)...)
}
func (s *Site) renderSitemap() error {
@@ -325,7 +346,7 @@ func (s *Site) renderSitemap() error {
smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
addLanguagePrefix := n.Site.IsMultiLingual()
- return s.renderAndWriteXML("sitemap",
+ return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap",
n.addLangPathPrefixIfFlagSet(page.Sitemap.Filename, addLanguagePrefix), n, s.appendThemeTemplates(smLayouts)...)
}
@@ -357,7 +378,7 @@ func (s *Site) renderRobotsTXT() error {
return nil
}
- return s.publish("robots.txt", outBuffer)
+ return s.publish(&s.PathSpec.ProcessingStats.Pages, "robots.txt", outBuffer)
}
// renderAliases renders shell pages that simply have a redirect in the header.
diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go
index 37752467b..8b5b37fcc 100644
--- a/hugolib/site_sections_test.go
+++ b/hugolib/site_sections_test.go
@@ -114,6 +114,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
cfg.Set("paginate", 2)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
require.Len(t, s.RegularPages, 21)
tests := []struct {
@@ -264,6 +265,8 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
}},
}
+ home := s.getPage(KindHome)
+
for _, test := range tests {
sections := strings.Split(test.sections, ",")
p := s.getPage(KindSection, sections...)
@@ -276,8 +279,6 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
test.verify(p)
}
- home := s.getPage(KindHome)
-
assert.NotNil(home)
assert.Len(home.Sections(), 9)
diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go
new file mode 100644
index 000000000..522b5636b
--- /dev/null
+++ b/hugolib/site_stats_test.go
@@ -0,0 +1,101 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/afero"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestSiteStats(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ siteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+defaultContentLanguage = "nn"
+
+[languages]
+[languages.nn]
+languageName = "Nynorsk"
+weight = 1
+title = "Hugo på norsk"
+
+[languages.en]
+languageName = "English"
+weight = 2
+title = "Hugo in English"
+
+`
+
+ pageTemplate := `---
+title: "T%d"
+tags:
+%s
+categories:
+%s
+aliases: [Ali%d]
+---
+# Doc
+`
+
+ th, h := newTestSitesFromConfig(t, afero.NewMemMapFs(), siteConfig,
+ "layouts/_default/single.html", "Single|{{ .Title }}|{{ .Content }}",
+ "layouts/_default/list.html", `List|{{ .Title }}|Pages: {{ .Paginator.TotalPages }}|{{ .Content }}`,
+ "layouts/_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
+ )
+ require.Len(t, h.Sites, 2)
+
+ fs := th.Fs
+
+ for i := 0; i < 2; i++ {
+ for j := 0; j < 2; j++ {
+ pageID := i + j + 1
+ writeSource(t, fs, fmt.Sprintf("content/sect/p%d.md", pageID),
+ fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID))
+ }
+ }
+
+ for i := 0; i < 5; i++ {
+ writeSource(t, fs, fmt.Sprintf("content/assets/image%d.png", i+1), "image")
+ }
+
+ err := h.Build(BuildCfg{})
+
+ assert.NoError(err)
+
+ stats := []*helpers.ProcessingStats{
+ h.Sites[0].PathSpec.ProcessingStats,
+ h.Sites[1].PathSpec.ProcessingStats}
+
+ stats[0].Table(ioutil.Discard)
+ stats[1].Table(ioutil.Discard)
+
+ var buff bytes.Buffer
+
+ helpers.ProcessingStatsTable(&buff, stats...)
+
+ assert.Contains(buff.String(), "Pages | 19 | 6")
+
+}
diff --git a/hugolib/site_test.go b/hugolib/site_test.go
index 7c5af6671..6c5c28c54 100644
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -23,7 +23,6 @@ import (
jww "github.com/spf13/jwalterweatherman"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/hugofs"
@@ -74,11 +73,11 @@ func TestRenderWithInvalidTemplate(t *testing.T) {
func TestDraftAndFutureRender(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*")},
- {Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*")},
- {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*")},
- {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*")},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), "---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*"},
+ {filepath.FromSlash("sect/doc2.md"), "---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*"},
+ {filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*"},
+ {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*"},
}
siteSetup := func(t *testing.T, configKeyValues ...interface{}) *Site {
@@ -91,7 +90,7 @@ func TestDraftAndFutureRender(t *testing.T) {
}
for _, src := range sources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
@@ -132,9 +131,9 @@ func TestDraftAndFutureRender(t *testing.T) {
func TestFutureExpirationRender(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*")},
- {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*")},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*"},
+ {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*"},
}
siteSetup := func(t *testing.T) *Site {
@@ -142,7 +141,7 @@ func TestFutureExpirationRender(t *testing.T) {
cfg.Set("baseURL", "http://auth/bub")
for _, src := range sources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
@@ -234,29 +233,29 @@ func doTestCrossrefs(t *testing.T, relative, uglyURLs bool) {
doc3Slashed := filepath.FromSlash("/sect/doc3.md")
- sources := []source.ByteSource{
+ sources := [][2]string{
{
- Name: filepath.FromSlash("sect/doc1.md"),
- Content: []byte(fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode)),
+ filepath.FromSlash("sect/doc1.md"),
+ fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode),
},
// Issue #1148: Make sure that no P-tags is added around shortcodes.
{
- Name: filepath.FromSlash("sect/doc2.md"),
- Content: []byte(fmt.Sprintf(`**Ref 1:**
+ filepath.FromSlash("sect/doc2.md"),
+ fmt.Sprintf(`**Ref 1:**
{{< %s "sect/doc1.md" >}}
-THE END.`, refShortcode)),
+THE END.`, refShortcode),
},
// Issue #1753: Should not add a trailing newline after shortcode.
{
- Name: filepath.FromSlash("sect/doc3.md"),
- Content: []byte(fmt.Sprintf(`**Ref 1:**{{< %s "sect/doc3.md" >}}.`, refShortcode)),
+ filepath.FromSlash("sect/doc3.md"),
+ fmt.Sprintf(`**Ref 1:**{{< %s "sect/doc3.md" >}}.`, refShortcode),
},
// Issue #3703
{
- Name: filepath.FromSlash("sect/doc4.md"),
- Content: []byte(fmt.Sprintf(`**Ref 1:**{{< %s "%s" >}}.`, refShortcode, doc3Slashed)),
+ filepath.FromSlash("sect/doc4.md"),
+ fmt.Sprintf(`**Ref 1:**{{< %s "%s" >}}.`, refShortcode, doc3Slashed),
},
}
@@ -267,7 +266,7 @@ THE END.`, refShortcode)),
cfg.Set("verbose", true)
for _, src := range sources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
s := buildSingleSite(
@@ -323,13 +322,13 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
cfg.Set("uglyURLs", uglyURLs)
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
- {Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*")},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
+ {filepath.FromSlash("sect/doc2.md"), "---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*"},
}
for _, src := range sources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
writeSource(t, fs, filepath.Join("layouts", "index.html"), "Home Sweet {{ if.IsHome }}Home{{ end }}.")
@@ -402,7 +401,9 @@ func TestSectionNaming(t *testing.T) {
for _, canonify := range []bool{true, false} {
for _, uglify := range []bool{true, false} {
for _, pluralize := range []bool{true, false} {
- doTestSectionNaming(t, canonify, uglify, pluralize)
+ t.Run(fmt.Sprintf("canonify=%t,uglify=%t,pluralize=%t", canonify, uglify, pluralize), func(t *testing.T) {
+ doTestSectionNaming(t, canonify, uglify, pluralize)
+ })
}
}
}
@@ -418,12 +419,12 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
expectedPathSuffix = "/index.html"
}
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("doc1")},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.html"), "doc1"},
// Add one more page to sect to make sure sect is picked in mainSections
- {Name: filepath.FromSlash("sect/sect.html"), Content: []byte("sect")},
- {Name: filepath.FromSlash("Fish and Chips/doc2.html"), Content: []byte("doc2")},
- {Name: filepath.FromSlash("ラーメン/doc3.html"), Content: []byte("doc3")},
+ {filepath.FromSlash("sect/sect.html"), "sect"},
+ {filepath.FromSlash("Fish and Chips/doc2.html"), "doc2"},
+ {filepath.FromSlash("ラーメン/doc3.html"), "doc3"},
}
cfg, fs := newTestCfg()
@@ -433,8 +434,8 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
cfg.Set("pluralizeListTitles", pluralize)
cfg.Set("canonifyURLs", canonify)
- for _, source := range sources {
- writeSource(t, fs, filepath.Join("content", source.Name), string(source.Content))
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}")
@@ -472,17 +473,17 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
}
func TestSkipRender(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
- {Name: filepath.FromSlash("sect/doc2.html"), Content: []byte("<!doctype html><html><body>more content</body></html>")},
- {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("# doc3\n*some* content")},
- {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\n---\n# doc4\n*some content*")},
- {Name: filepath.FromSlash("sect/doc5.html"), Content: []byte("<!doctype html><html>{{ template \"head\" }}<body>body5</body></html>")},
- {Name: filepath.FromSlash("sect/doc6.html"), Content: []byte("<!doctype html><html>{{ template \"head_abs\" }}<body>body5</body></html>")},
- {Name: filepath.FromSlash("doc7.html"), Content: []byte("<html><body>doc7 content</body></html>")},
- {Name: filepath.FromSlash("sect/doc8.html"), Content: []byte("---\nmarkup: md\n---\n# title\nsome *content*")},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
+ {filepath.FromSlash("sect/doc2.html"), "<!doctype html><html><body>more content</body></html>"},
+ {filepath.FromSlash("sect/doc3.md"), "# doc3\n*some* content"},
+ {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\n---\n# doc4\n*some content*"},
+ {filepath.FromSlash("sect/doc5.html"), "<!doctype html><html>{{ template \"head\" }}<body>body5</body></html>"},
+ {filepath.FromSlash("sect/doc6.html"), "<!doctype html><html>{{ template \"head_abs\" }}<body>body5</body></html>"},
+ {filepath.FromSlash("doc7.html"), "<html><body>doc7 content</body></html>"},
+ {filepath.FromSlash("sect/doc8.html"), "---\nmarkup: md\n---\n# title\nsome *content*"},
// Issue #3021
- {Name: filepath.FromSlash("doc9.html"), Content: []byte("<html><body>doc9: {{< myshortcode >}}</body></html>")},
+ {filepath.FromSlash("doc9.html"), "<html><body>doc9: {{< myshortcode >}}</body></html>"},
}
cfg, fs := newTestCfg()
@@ -493,7 +494,7 @@ func TestSkipRender(t *testing.T) {
cfg.Set("baseURL", "http://auth/bub")
for _, src := range sources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
@@ -535,9 +536,9 @@ func TestSkipRender(t *testing.T) {
func TestAbsURLify(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>")},
- {Name: filepath.FromSlash("blue/doc2.html"), Content: []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.html"), "<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>"},
+ {filepath.FromSlash("blue/doc2.html"), "---\nf: t\n---\n<!doctype html><html><body>more content</body></html>"},
}
for _, baseURL := range []string{"http://auth/bub", "http://base", "//base"} {
for _, canonify := range []bool{true, false} {
@@ -549,7 +550,7 @@ func TestAbsURLify(t *testing.T) {
cfg.Set("baseURL", baseURL)
for _, src := range sources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
@@ -584,23 +585,23 @@ func TestAbsURLify(t *testing.T) {
}
}
-var weightedPage1 = []byte(`+++
+var weightedPage1 = `+++
weight = "2"
title = "One"
my_param = "foo"
my_date = 1979-05-27T07:32:00Z
+++
-Front Matter with Ordered Pages`)
+Front Matter with Ordered Pages`
-var weightedPage2 = []byte(`+++
+var weightedPage2 = `+++
weight = "6"
title = "Two"
publishdate = "2012-03-05"
my_param = "foo"
+++
-Front Matter with Ordered Pages 2`)
+Front Matter with Ordered Pages 2`
-var weightedPage3 = []byte(`+++
+var weightedPage3 = `+++
weight = "4"
title = "Three"
date = "2012-04-06"
@@ -609,9 +610,9 @@ my_param = "bar"
only_one = "yes"
my_date = 2010-05-27T07:32:00Z
+++
-Front Matter with Ordered Pages 3`)
+Front Matter with Ordered Pages 3`
-var weightedPage4 = []byte(`+++
+var weightedPage4 = `+++
weight = "4"
title = "Four"
date = "2012-01-01"
@@ -620,13 +621,13 @@ my_param = "baz"
my_date = 2010-05-27T07:32:00Z
categories = [ "hugo" ]
+++
-Front Matter with Ordered Pages 4. This is longer content`)
+Front Matter with Ordered Pages 4. This is longer content`
-var weightedSources = []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.md"), Content: weightedPage1},
- {Name: filepath.FromSlash("sect/doc2.md"), Content: weightedPage2},
- {Name: filepath.FromSlash("sect/doc3.md"), Content: weightedPage3},
- {Name: filepath.FromSlash("sect/doc4.md"), Content: weightedPage4},
+var weightedSources = [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), weightedPage1},
+ {filepath.FromSlash("sect/doc2.md"), weightedPage2},
+ {filepath.FromSlash("sect/doc3.md"), weightedPage3},
+ {filepath.FromSlash("sect/doc4.md"), weightedPage4},
}
func TestOrderedPages(t *testing.T) {
@@ -635,7 +636,7 @@ func TestOrderedPages(t *testing.T) {
cfg.Set("baseURL", "http://auth/bub")
for _, src := range weightedSources {
- writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
}
@@ -678,11 +679,11 @@ func TestOrderedPages(t *testing.T) {
}
}
-var groupedSources = []source.ByteSource{
- {Name: filepath.FromSlash("sect1/doc1.md"), Content: weightedPage1},
- {Name: filepath.FromSlash("sect1/doc2.md"), Content: weightedPage2},
- {Name: filepath.FromSlash("sect2/doc3.md"), Content: weightedPage3},
- {Name: filepath.FromSlash("sect3/doc4.md"), Content: weightedPage4},
+var groupedSources = [][2]string{
+ {filepath.FromSlash("sect1/doc1.md"), weightedPage1},
+ {filepath.FromSlash("sect1/doc2.md"), weightedPage2},
+ {filepath.FromSlash("sect2/doc3.md"), weightedPage3},
+ {filepath.FromSlash("sect3/doc4.md"), weightedPage4},
}
func TestGroupedPages(t *testing.T) {
@@ -822,16 +823,16 @@ func TestGroupedPages(t *testing.T) {
}
}
-var pageWithWeightedTaxonomies1 = []byte(`+++
+var pageWithWeightedTaxonomies1 = `+++
tags = [ "a", "b", "c" ]
tags_weight = 22
categories = ["d"]
title = "foo"
categories_weight = 44
+++
-Front Matter with weighted tags and categories`)
+Front Matter with weighted tags and categories`
-var pageWithWeightedTaxonomies2 = []byte(`+++
+var pageWithWeightedTaxonomies2 = `+++
tags = "a"
tags_weight = 33
title = "bar"
@@ -840,23 +841,23 @@ categories_weight = 11
alias = "spf13"
date = 1979-05-27T07:32:00Z
+++
-Front Matter with weighted tags and categories`)
+Front Matter with weighted tags and categories`
-var pageWithWeightedTaxonomies3 = []byte(`+++
+var pageWithWeightedTaxonomies3 = `+++
title = "bza"
categories = [ "e" ]
categories_weight = 11
alias = "spf13"
date = 2010-05-27T07:32:00Z
+++
-Front Matter with weighted tags and categories`)
+Front Matter with weighted tags and categories`
func TestWeightedTaxonomies(t *testing.T) {
t.Parallel()
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("sect/doc1.md"), Content: pageWithWeightedTaxonomies2},
- {Name: filepath.FromSlash("sect/doc2.md"), Content: pageWithWeightedTaxonomies1},
- {Name: filepath.FromSlash("sect/doc3.md"), Content: pageWithWeightedTaxonomies3},
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), pageWithWeightedTaxonomies2},
+ {filepath.FromSlash("sect/doc2.md"), pageWithWeightedTaxonomies1},
+ {filepath.FromSlash("sect/doc3.md"), pageWithWeightedTaxonomies3},
}
taxonomies := make(map[string]string)
@@ -884,39 +885,23 @@ func TestWeightedTaxonomies(t *testing.T) {
}
}
-func findPage(site *Site, f string) *Page {
- sp := source.NewSourceSpec(site.Cfg, site.Fs)
- currentPath := sp.NewFile(filepath.FromSlash(f))
- //t.Logf("looking for currentPath: %s", currentPath.Path())
-
- for _, page := range site.Pages {
- //t.Logf("page: %s", page.Source.Path())
- if page.Source.Path() == currentPath.Path() {
- return page
- }
- }
- return nil
-}
-
func setupLinkingMockSite(t *testing.T) *Site {
- sources := []source.ByteSource{
- {Name: filepath.FromSlash("level2/unique.md"), Content: []byte("")},
- {Name: filepath.FromSlash("index.md"), Content: []byte("")},
- {Name: filepath.FromSlash("rootfile.md"), Content: []byte("")},
- {Name: filepath.FromSlash("root-image.png"), Content: []byte("")},
+ sources := [][2]string{
+ {filepath.FromSlash("level2/unique.md"), ""},
+ {filepath.FromSlash("rootfile.md"), ""},
+ {filepath.FromSlash("root-image.png"), ""},
- {Name: filepath.FromSlash("level2/2-root.md"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/index.md"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/common.md"), Content: []byte("")},
+ {filepath.FromSlash("level2/2-root.md"), ""},
+ {filepath.FromSlash("level2/common.md"), ""},
- {Name: filepath.FromSlash("level2/2-image.png"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/common.png"), Content: []byte("")},
+ {filepath.FromSlash("level2/2-image.png"), ""},
+ {filepath.FromSlash("level2/common.png"), ""},
- {Name: filepath.FromSlash("level2/level3/3-root.md"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/level3/index.md"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/level3/common.md"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/level3/3-image.png"), Content: []byte("")},
- {Name: filepath.FromSlash("level2/level3/common.png"), Content: []byte("")},
+ {filepath.FromSlash("level2/level3/start.md"), ""},
+ {filepath.FromSlash("level2/level3/3-root.md"), ""},
+ {filepath.FromSlash("level2/level3/common.md"), ""},
+ {filepath.FromSlash("level2/level3/3-image.png"), ""},
+ {filepath.FromSlash("level2/level3/common.png"), ""},
}
cfg, fs := newTestCfg()
@@ -939,7 +924,7 @@ func TestRefLinking(t *testing.T) {
t.Parallel()
site := setupLinkingMockSite(t)
- currentPage := findPage(site, "level2/level3/index.md")
+ currentPage := site.getPage(KindPage, "level2/level3/start.md")
if currentPage == nil {
t.Fatalf("failed to find current page in site")
}
@@ -953,8 +938,6 @@ func TestRefLinking(t *testing.T) {
{"unique.md", "", true, "/level2/unique/"},
{"level2/common.md", "", true, "/level2/common/"},
{"3-root.md", "", true, "/level2/level3/3-root/"},
- {"level2/level3/index.md", "amp", true, "/amp/level2/level3/"},
- {"level2/index.md", "amp", false, "http://auth/amp/level2/"},
} {
if out, err := site.Info.refLink(test.link, currentPage, test.relative, test.outputFormat); err != nil || out != test.expected {
t.Errorf("[%d] Expected %s to resolve to (%s), got (%s) - error: %s", i, test.link, test.expected, out, err)
diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go
index 272c78c7e..479967673 100644
--- a/hugolib/site_url_test.go
+++ b/hugolib/site_url_test.go
@@ -20,7 +20,6 @@ import (
"html/template"
"github.com/gohugoio/hugo/deps"
- "github.com/gohugoio/hugo/source"
"github.com/stretchr/testify/require"
)
@@ -33,9 +32,9 @@ slug: slug-doc-2
slug doc 2 content
`
-var urlFakeSource = []source.ByteSource{
- {Name: filepath.FromSlash("content/blue/doc1.md"), Content: []byte(slugDoc1)},
- {Name: filepath.FromSlash("content/blue/doc2.md"), Content: []byte(slugDoc2)},
+var urlFakeSource = [][2]string{
+ {filepath.FromSlash("content/blue/doc1.md"), slugDoc1},
+ {filepath.FromSlash("content/blue/doc2.md"), slugDoc2},
}
// Issue #1105
diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go
index 3db2d9d51..349c39ebc 100644
--- a/hugolib/testhelpers_test.go
+++ b/hugolib/testhelpers_test.go
@@ -14,7 +14,6 @@ import (
"github.com/spf13/afero"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/tpl"
"github.com/spf13/viper"
@@ -169,6 +168,11 @@ func newDebugLogger() *jww.Notepad {
func newErrorLogger() *jww.Notepad {
return jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
}
+
+func newWarningLogger() *jww.Notepad {
+ return jww.NewNotepad(jww.LevelWarn, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
+}
+
func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ tpl.TemplateHandler) error {
return func(templ tpl.TemplateHandler) error {
@@ -203,9 +207,17 @@ func buildSingleSiteExpected(t testing.TB, expectBuildError bool, depsCfg deps.D
return h.Sites[0]
}
-func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...source.ByteSource) {
+func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...[2]string) {
for _, src := range sources {
- writeSource(t, fs, filepath.Join(base, src.Name), string(src.Content))
+ writeSource(t, fs, filepath.Join(base, src[0]), src[1])
+ }
+}
+
+func dumpPages(pages ...*Page) {
+ for i, p := range pages {
+ fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n",
+ i+1,
+ p.Kind, p.Title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections()))
}
}