aboutsummaryrefslogtreecommitdiffhomepage
path: root/hugolib/site.go
diff options
context:
space:
mode:
Diffstat (limited to 'hugolib/site.go')
-rw-r--r--hugolib/site.go736
1 files changed, 211 insertions, 525 deletions
diff --git a/hugolib/site.go b/hugolib/site.go
index c682eebc9..312f6b97f 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1,4 +1,4 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,18 +19,18 @@ import (
"io"
"mime"
"net/url"
- "path"
"path/filepath"
"runtime"
"sort"
"strings"
+ "sync"
"time"
"github.com/bep/logg"
- "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/htime"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/hugolib/doctree"
"golang.org/x/text/unicode/norm"
"github.com/gohugoio/hugo/common/paths"
@@ -41,11 +41,6 @@ import (
"github.com/gohugoio/hugo/markup/converter"
- "github.com/gohugoio/hugo/hugofs/files"
- hglob "github.com/gohugoio/hugo/hugofs/glob"
-
- "github.com/gohugoio/hugo/common/maps"
-
"github.com/gohugoio/hugo/common/text"
"github.com/gohugoio/hugo/publisher"
@@ -55,19 +50,14 @@ import (
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
- "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/lazy"
"github.com/fsnotify/fsnotify"
bp "github.com/gohugoio/hugo/bufferpool"
- "github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/output"
- "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/tpl"
-
- "github.com/spf13/afero"
)
func (s *Site) Taxonomies() page.TaxonomyList {
@@ -75,25 +65,32 @@ func (s *Site) Taxonomies() page.TaxonomyList {
return s.taxonomies
}
-type taxonomiesConfig map[string]string
+type (
+ taxonomiesConfig map[string]string
+ taxonomiesConfigValues struct {
+ views []viewName
+ viewsByTreeKey map[string]viewName
+ }
+)
-func (t taxonomiesConfig) Values() []viewName {
- var vals []viewName
+func (t taxonomiesConfig) Values() taxonomiesConfigValues {
+ var views []viewName
for k, v := range t {
- vals = append(vals, viewName{singular: k, plural: v})
+ views = append(views, viewName{singular: k, plural: v, pluralTreeKey: cleanTreeKey(v)})
}
- sort.Slice(vals, func(i, j int) bool {
- return vals[i].plural < vals[j].plural
+ sort.Slice(views, func(i, j int) bool {
+ return views[i].plural < views[j].plural
})
- return vals
-}
+ viewsByTreeKey := make(map[string]viewName)
+ for _, v := range views {
+ viewsByTreeKey[v.pluralTreeKey] = v
+ }
-type siteConfigHolder struct {
- sitemap config.SitemapConfig
- taxonomiesConfig taxonomiesConfig
- timeout time.Duration
- hasCJKLanguage bool
+ return taxonomiesConfigValues{
+ views: views,
+ viewsByTreeKey: viewsByTreeKey,
+ }
}
// Lazily loaded site dependencies.
@@ -111,15 +108,6 @@ func (init *siteInit) Reset() {
init.taxonomies.Reset()
}
-func (s *Site) initInit(ctx context.Context, init *lazy.Init, pctx pageContext) bool {
- _, err := init.Do(ctx)
-
- if err != nil {
- s.h.FatalError(pctx.wrapError(err))
- }
- return err == nil
-}
-
func (s *Site) prepareInits() {
s.init = &siteInit{}
@@ -153,11 +141,6 @@ func (s *Site) prepareInits() {
})
s.init.prevNextInSection = init.Branch(func(context.Context) (any, error) {
- var sections page.Pages
- s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) {
- sections = append(sections, n.p)
- })
-
setNextPrev := func(pas page.Pages) {
for i, p := range pas {
np, ok := p.(nextPrevInSectionProvider)
@@ -183,40 +166,35 @@ func (s *Site) prepareInits() {
}
}
- for _, sect := range sections {
- treeRef := sect.(treeRefProvider).getTreeRef()
-
- var pas page.Pages
- treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) {
- pas = append(pas, c.p)
- })
- page.SortByDefault(pas)
+ sections := s.pageMap.getPagesInSection(
+ pageMapQueryPagesInSection{
+ pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
+ Path: "",
+ KeyPart: "sectionorhome",
+ Include: pagePredicates.KindSection.Or(pagePredicates.KindHome),
+ },
+ IncludeSelf: true,
+ Recursive: true,
+ },
+ )
- setNextPrev(pas)
+ for _, section := range sections {
+ setNextPrev(section.RegularPages())
}
- // The root section only goes one level down.
- treeRef := s.home.getTreeRef()
-
- var pas page.Pages
- treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) {
- pas = append(pas, c.p)
- })
- page.SortByDefault(pas)
-
- setNextPrev(pas)
-
return nil, nil
})
s.init.menus = init.Branch(func(context.Context) (any, error) {
- s.assembleMenus()
- return nil, nil
+ err := s.assembleMenus()
+ return nil, err
})
- s.init.taxonomies = init.Branch(func(context.Context) (any, error) {
- err := s.pageMap.assembleTaxonomies()
- return nil, err
+ s.init.taxonomies = init.Branch(func(ctx context.Context) (any, error) {
+ if err := s.pageMap.CreateSiteTaxonomies(ctx); err != nil {
+ return nil, err
+ }
+ return s.taxonomies, nil
})
}
@@ -232,20 +210,25 @@ func (s *Site) Menus() navigation.Menus {
func (s *Site) initRenderFormats() {
formatSet := make(map[string]bool)
formats := output.Formats{}
- rssDisabled := !s.conf.IsKindEnabled("rss")
- s.pageMap.pageTrees.WalkRenderable(func(s string, n *contentNode) bool {
- for _, f := range n.p.m.configuredOutputFormats {
- if rssDisabled && f.Name == "rss" {
- // legacy
- continue
- }
- if !formatSet[f.Name] {
- formats = append(formats, f)
- formatSet[f.Name] = true
+
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: s.pageMap.treePages,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if p, ok := n.(*pageState); ok {
+ for _, f := range p.m.configuredOutputFormats {
+ if !formatSet[f.Name] {
+ formats = append(formats, f)
+ formatSet[f.Name] = true
+ }
+ }
}
- }
- return false
- })
+ return false, nil
+ },
+ }
+
+ if err := w.Walk(context.TODO()); err != nil {
+ panic(err)
+ }
// Add the per kind configured output formats
for _, kind := range kinds.AllKindsInPages {
@@ -275,10 +258,6 @@ func (s *Site) Languages() langs.Languages {
return s.h.Configs.Languages
}
-func (s *Site) isEnabled(kind string) bool {
- return s.conf.IsKindEnabled(kind)
-}
-
type siteRefLinker struct {
s *Site
@@ -303,7 +282,7 @@ func (s siteRefLinker) logNotFound(ref, what string, p page.Page, position text.
} else if p == nil {
s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q: %s", s.s.Lang(), ref, what)
} else {
- s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Pathc(), what)
+ s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Path(), what)
}
}
@@ -391,8 +370,26 @@ func (s *Site) watching() bool {
}
type whatChanged struct {
- source bool
- files map[string]bool
+ mu sync.Mutex
+
+ contentChanged bool
+ identitySet identity.Identities
+}
+
+func (w *whatChanged) Add(ids ...identity.Identity) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ for _, id := range ids {
+ w.identitySet[id] = true
+ }
+}
+
+func (w *whatChanged) Changes() []identity.Identity {
+ if w == nil || w.identitySet == nil {
+ return nil
+ }
+ return w.identitySet.AsSlice()
}
// RegisterMediaTypes will register the Site's media types in the mime
@@ -405,10 +402,10 @@ func (s *Site) RegisterMediaTypes() {
}
}
-func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
- var filtered []fsnotify.Event
+func (h *HugoSites) fileEventsFilter(events []fsnotify.Event) []fsnotify.Event {
seen := make(map[fsnotify.Event]bool)
+ n := 0
for _, ev := range events {
// Avoid processing the same event twice.
if seen[ev] {
@@ -416,17 +413,7 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
}
seen[ev] = true
- if s.SourceSpec.IgnoreFile(ev.Name) {
- continue
- }
-
- // Throw away any directories
- isRegular, err := s.SourceSpec.IsRegularSourceFile(ev.Name)
- if err != nil && herrors.IsNotExist(err) && (ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename) {
- // Force keep of event
- isRegular = true
- }
- if !isRegular {
+ if h.SourceSpec.IgnoreFile(ev.Name) {
continue
}
@@ -434,23 +421,22 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
ev.Name = norm.NFC.String(ev.Name)
}
- filtered = append(filtered, ev)
+ events[n] = ev
+ n++
}
-
- return filtered
+ return events[:n]
}
-func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
- var filtered []fsnotify.Event
-
+func (h *HugoSites) fileEventsTranslate(events []fsnotify.Event) []fsnotify.Event {
eventMap := make(map[string][]fsnotify.Event)
// We often get a Remove etc. followed by a Create, a Create followed by a Write.
- // Remove the superfluous events to mage the update logic simpler.
+ // Remove the superfluous events to make the update logic simpler.
for _, ev := range events {
eventMap[ev.Name] = append(eventMap[ev.Name], ev)
}
+ n := 0
for _, ev := range events {
mapped := eventMap[ev.Name]
@@ -472,236 +458,77 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
}
}
- filtered = append(filtered, kept)
+ events[n] = kept
+ n++
}
- return filtered
+ return events
}
-// reBuild partially rebuilds a site given the filesystem events.
-// It returns whatever the content source was changed.
-// TODO(bep) clean up/rewrite this method.
-func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
- events = s.filterFileEvents(events)
- events = s.translateFileEvents(events)
-
- changeIdentities := make(identity.Identities)
-
- s.Log.Debugf("Rebuild for events %q", events)
+func (h *HugoSites) fileEventsContentPaths(p []pathChange) []pathChange {
+ var bundles []pathChange
+ var dirs []pathChange
+ var regular []pathChange
- h := s.h
-
- // First we need to determine what changed
-
- var (
- sourceChanged = []fsnotify.Event{}
- sourceReallyChanged = []fsnotify.Event{}
- contentFilesChanged []string
-
- tmplChanged bool
- tmplAdded bool
- dataChanged bool
- i18nChanged bool
-
- sourceFilesChanged = make(map[string]bool)
- )
-
- var cacheBusters []func(string) bool
- bcfg := s.conf.Build
-
- for _, ev := range events {
- component, relFilename := s.BaseFs.MakePathRelative(ev.Name)
- if relFilename != "" {
- p := hglob.NormalizePath(path.Join(component, relFilename))
- g, err := bcfg.MatchCacheBuster(s.Log, p)
- if err == nil && g != nil {
- cacheBusters = append(cacheBusters, g)
- }
- }
-
- id, found := s.eventToIdentity(ev)
- if found {
- changeIdentities[id] = id
-
- switch id.Type {
- case files.ComponentFolderContent:
- s.Log.Println("Source changed", ev)
- sourceChanged = append(sourceChanged, ev)
- case files.ComponentFolderLayouts:
- tmplChanged = true
- if !s.Tmpl().HasTemplate(id.Path) {
- tmplAdded = true
- }
- if tmplAdded {
- s.Log.Println("Template added", ev)
- } else {
- s.Log.Println("Template changed", ev)
- }
-
- case files.ComponentFolderData:
- s.Log.Println("Data changed", ev)
- dataChanged = true
- case files.ComponentFolderI18n:
- s.Log.Println("i18n changed", ev)
- i18nChanged = true
-
- }
+ var others []pathChange
+ for _, p := range p {
+ if p.isDir {
+ dirs = append(dirs, p)
+ } else {
+ others = append(others, p)
}
}
- changed := &whatChanged{
- source: len(sourceChanged) > 0,
- files: sourceFilesChanged,
- }
-
- config.whatChanged = changed
-
- if err := init(config); err != nil {
- return err
- }
-
- var cacheBusterOr func(string) bool
- if len(cacheBusters) > 0 {
- cacheBusterOr = func(s string) bool {
- for _, cb := range cacheBusters {
- if cb(s) {
- return true
+ // Remve all files below dir.
+ if len(dirs) > 0 {
+ n := 0
+ for _, d := range dirs {
+ dir := d.p.Path() + "/"
+ for _, o := range others {
+ if !strings.HasPrefix(o.p.Path(), dir) {
+ others[n] = o
+ n++
}
}
- return false
- }
- }
-
- // These in memory resource caches will be rebuilt on demand.
- if len(cacheBusters) > 0 {
- s.h.ResourceSpec.ResourceCache.DeleteMatches(cacheBusterOr)
- }
-
- if tmplChanged || i18nChanged {
- s.h.init.Reset()
- var prototype *deps.Deps
- for i, s := range s.h.Sites {
- if err := s.Deps.Compile(prototype); err != nil {
- return err
- }
- if i == 0 {
- prototype = s.Deps
- }
- }
- }
-
- if dataChanged {
- s.h.init.data.Reset()
- }
-
- for _, ev := range sourceChanged {
- removed := false
-
- if ev.Op&fsnotify.Remove == fsnotify.Remove {
- removed = true
- }
-
- // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
- // Sometimes a rename operation means that file has been renamed other times it means
- // it's been updated
- if ev.Op&fsnotify.Rename == fsnotify.Rename {
- // If the file is still on disk, it's only been updated, if it's not, it's been moved
- if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil {
- removed = true
- }
- }
-
- if removed && files.IsContentFile(ev.Name) {
- h.removePageByFilename(ev.Name)
- }
-
- sourceReallyChanged = append(sourceReallyChanged, ev)
- sourceFilesChanged[ev.Name] = true
- }
- if config.ErrRecovery || tmplAdded || dataChanged {
- h.resetPageState()
- } else {
- h.resetPageStateFromEvents(changeIdentities)
- }
-
- if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 {
- var filenamesChanged []string
- for _, e := range sourceReallyChanged {
- filenamesChanged = append(filenamesChanged, e.Name)
}
- if len(contentFilesChanged) > 0 {
- filenamesChanged = append(filenamesChanged, contentFilesChanged...)
- }
-
- filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged)
-
- if err := s.readAndProcessContent(*config, filenamesChanged...); err != nil {
- return err
- }
-
+ others = others[:n]
}
- return nil
-}
-
-func (s *Site) process(config BuildCfg) (err error) {
- if err = s.readAndProcessContent(config); err != nil {
- err = fmt.Errorf("readAndProcessContent: %w", err)
- return
- }
- return err
-}
-
-func (s *Site) render(ctx *siteRenderContext) (err error) {
- if err := page.Clear(); err != nil {
- return err
- }
-
- if ctx.outIdx == 0 {
- // Note that even if disableAliases is set, the aliases themselves are
- // preserved on page. The motivation with this is to be able to generate
- // 301 redirects in a .htacess file and similar using a custom output format.
- if !s.conf.DisableAliases {
- // Aliases must be rendered before pages.
- // Some sites, Hugo docs included, have faulty alias definitions that point
- // to itself or another real page. These will be overwritten in the next
- // step.
- if err = s.renderAliases(); err != nil {
- return
- }
+ for _, p := range others {
+ if p.p.IsBundle() {
+ bundles = append(bundles, p)
+ } else {
+ regular = append(regular, p)
}
}
- if err = s.renderPages(ctx); err != nil {
- return
- }
-
- if ctx.outIdx == 0 {
- if err = s.renderSitemap(); err != nil {
- return
- }
+ // Remove any files below leaf bundles.
+ // Remove any files in the same folder as branch bundles.
+ var keepers []pathChange
- if ctx.multihost {
- if err = s.renderRobotsTXT(); err != nil {
- return
+ for _, o := range regular {
+ keep := true
+ for _, b := range bundles {
+ prefix := b.p.Base() + "/"
+ if b.p.IsLeafBundle() && strings.HasPrefix(o.p.Path(), prefix) {
+ keep = false
+ break
+ } else if b.p.IsBranchBundle() && o.p.Dir() == b.p.Dir() {
+ keep = false
+ break
}
}
- if err = s.render404(); err != nil {
- return
+ if keep {
+ keepers = append(keepers, o)
}
}
- if !ctx.renderSingletonPages() {
- return
- }
+ keepers = append(dirs, keepers...)
+ keepers = append(bundles, keepers...)
- if err = s.renderMainLanguageRedirect(); err != nil {
- return
- }
-
- return
+ return keepers
}
// HomeAbsURL is a convenience method giving the absolute URL to the home page.
@@ -723,47 +550,20 @@ func (s *Site) SitemapAbsURL() string {
return p
}
-func (s *Site) eventToIdentity(e fsnotify.Event) (identity.PathIdentity, bool) {
- for _, fs := range s.BaseFs.SourceFilesystems.FileSystems() {
- if p := fs.Path(e.Name); p != "" {
- return identity.NewPathIdentity(fs.Name, filepath.ToSlash(p)), true
- }
- }
- return identity.PathIdentity{}, false
-}
-
-func (s *Site) readAndProcessContent(buildConfig BuildCfg, filenames ...string) error {
- if s.Deps == nil {
- panic("nil deps on site")
- }
-
- sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs)
-
- proc := newPagesProcessor(s.h, sourceSpec)
-
- c := newPagesCollector(sourceSpec, s.h.getContentMaps(), s.Log, s.h.ContentChanges, proc, filenames...)
-
- if err := c.Collect(); err != nil {
- return err
- }
-
- return nil
-}
-
func (s *Site) createNodeMenuEntryURL(in string) string {
if !strings.HasPrefix(in, "/") {
return in
}
// make it match the nodes
menuEntryURL := in
- menuEntryURL = helpers.SanitizeURLKeepTrailingSlash(s.s.PathSpec.URLize(menuEntryURL))
+ menuEntryURL = s.s.PathSpec.URLize(menuEntryURL)
if !s.conf.CanonifyURLs {
menuEntryURL = paths.AddContextRoot(s.s.PathSpec.Cfg.BaseURL().String(), menuEntryURL)
}
return menuEntryURL
}
-func (s *Site) assembleMenus() {
+func (s *Site) assembleMenus() error {
s.menus = make(navigation.Menus)
type twoD struct {
@@ -775,14 +575,9 @@ func (s *Site) assembleMenus() {
// add menu entries from config to flat hash
for name, menu := range s.conf.Menus.Config {
for _, me := range menu {
- if types.IsNil(me.Page) {
- if me.PageRef != "" {
- // Try to resolve the page.
- p, _ := s.getPageNew(nil, me.PageRef)
- if !types.IsNil(p) {
- navigation.SetPageValues(me, p)
- }
- }
+ if types.IsNil(me.Page) && me.PageRef != "" {
+ // Try to resolve the page.
+ me.Page, _ = s.getPage(nil, me.PageRef)
}
// If page is still nill, we must make sure that we have a URL that considers baseURL etc.
@@ -797,37 +592,32 @@ func (s *Site) assembleMenus() {
sectionPagesMenu := s.conf.SectionPagesMenu
if sectionPagesMenu != "" {
- s.pageMap.sections.Walk(func(s string, v any) bool {
- p := v.(*contentNode).p
- if p.IsHome() {
- return false
+ if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) {
+ if p.IsHome() || !p.m.shouldBeCheckedForMenuDefinitions() {
+ return false, nil
}
- // From Hugo 0.22 we have nested sections, but until we get a
- // feel of how that would work in this setting, let us keep
- // this menu for the top level only.
+ // The section pages menus are attached to the top level section.
id := p.Section()
if _, ok := flat[twoD{sectionPagesMenu, id}]; ok {
- return false
+ return false, nil
}
-
me := navigation.MenuEntry{
MenuConfig: navigation.MenuConfig{
Identifier: id,
Name: p.LinkTitle(),
Weight: p.Weight(),
},
+ Page: p,
}
navigation.SetPageValues(&me, p)
flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
-
- return false
- })
+ return false, nil
+ }); err != nil {
+ return err
+ }
}
-
// Add menu entries provided by pages
- s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool {
- p := n.p
-
+ if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) {
for name, me := range p.pageMenus.menus() {
if _, ok := flat[twoD{name, me.KeyName()}]; ok {
err := p.wrapError(fmt.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))
@@ -836,9 +626,10 @@ func (s *Site) assembleMenus() {
}
flat[twoD{name, me.KeyName()}] = me
}
-
- return false
- })
+ return false, nil
+ }); err != nil {
+ return err
+ }
// Create Children Menus First
for _, e := range flat {
@@ -871,6 +662,8 @@ func (s *Site) assembleMenus() {
s.menus[menu.MenuName] = s.menus[menu.MenuName].Add(e)
}
}
+
+ return nil
}
// get any language code to prefix the target file path with.
@@ -893,39 +686,12 @@ func (s *Site) getLanguagePermalinkLang(alwaysInSubDir bool) string {
}
return s.GetLanguagePrefix()
-
-}
-
-func (s *Site) getTaxonomyKey(key string) string {
- if s.conf.DisablePathToLower {
- return s.PathSpec.MakePath(key)
- }
- return strings.ToLower(s.PathSpec.MakePath(key))
}
// Prepare site for a new full build.
func (s *Site) resetBuildState(sourceChanged bool) {
s.relatedDocsHandler = s.relatedDocsHandler.Clone()
s.init.Reset()
-
- if sourceChanged {
- s.pageMap.contentMap.pageReverseIndex.Reset()
- s.PageCollections = newPageCollections(s.pageMap)
- s.pageMap.withEveryBundlePage(func(p *pageState) bool {
- p.pagePages = &pagePages{}
- if p.bucket != nil {
- p.bucket.pagesMapBucketPages = &pagesMapBucketPages{}
- }
- p.parent = nil
- p.Scratcher = maps.NewScratcher()
- return false
- })
- } else {
- s.pageMap.withEveryBundlePage(func(p *pageState) bool {
- p.Scratcher = maps.NewScratcher()
- return false
- })
- }
}
func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
@@ -947,7 +713,7 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
// as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
// i.e. 2 arguments, so we test for that.
func (s *Site) GetPage(ref ...string) (page.Page, error) {
- p, err := s.s.getPageOldVersion(ref...)
+ p, err := s.s.getPageForRefs(ref...)
if p == nil {
// The nil struct has meaning in some situations, mostly to avoid breaking
@@ -959,22 +725,6 @@ func (s *Site) GetPage(ref ...string) (page.Page, error) {
return p, err
}
-func (s *Site) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (page.Page, error) {
- p, err := s.GetPage(ref...)
- if p != nil {
- // Track pages referenced by templates/shortcodes
- // when in server mode.
- if im, ok := info.(identity.Manager); ok {
- im.Add(p)
- }
- }
- return p, err
-}
-
-func (s *Site) permalink(link string) string {
- return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.Cfg.BaseURL().String())
-}
-
func (s *Site) absURLPath(targetPath string) string {
var path string
if s.conf.RelativeURLs {
@@ -990,46 +740,23 @@ func (s *Site) absURLPath(targetPath string) string {
return path
}
-func (s *Site) lookupLayouts(layouts ...string) tpl.Template {
- for _, l := range layouts {
- if templ, found := s.Tmpl().Lookup(l); found {
- return templ
- }
- }
-
- return nil
-}
-
-func (s *Site) renderAndWriteXML(ctx context.Context, statCounter *uint64, name string, targetPath string, d any, templ tpl.Template) error {
- renderBuffer := bp.GetBuffer()
- defer bp.PutBuffer(renderBuffer)
-
- if err := s.renderForTemplate(ctx, name, "", d, renderBuffer, templ); err != nil {
- return err
- }
-
- pd := publisher.Descriptor{
- Src: renderBuffer,
- TargetPath: targetPath,
- StatCounter: statCounter,
- // For the minification part of XML,
- // we currently only use the MIME type.
- OutputFormat: output.RSSFormat,
- AbsURLPath: s.absURLPath(targetPath),
- }
-
- return s.publisher.Publish(pd)
-}
+const (
+ pageDependencyScopeDefault int = iota
+ pageDependencyScopeGlobal
+)
-func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, templ tpl.Template) error {
- s.h.IncrPageRender()
+func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, d any, templ tpl.Template) error {
+ s.h.buildCounters.pageRenderCounter.Add(1)
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
of := p.outputFormat()
- ctx := tpl.SetPageInContext(context.Background(), p)
+ p.incrRenderState()
+
+ ctx := tpl.Context.Page.Set(context.Background(), p)
+ ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, p)
- if err := s.renderForTemplate(ctx, p.Kind(), of.Name, p, renderBuffer, templ); err != nil {
+ if err := s.renderForTemplate(ctx, p.Kind(), of.Name, d, renderBuffer, templ); err != nil {
return err
}
@@ -1078,7 +805,6 @@ var infoOnMissingLayout = map[string]bool{
// where ITEM is the thing being hooked.
type hookRendererTemplate struct {
templateHandler tpl.TemplateHandler
- identity.SearchProvider
templ tpl.Template
resolvePosition func(ctx any) text.Position
}
@@ -1119,92 +845,17 @@ func (s *Site) renderForTemplate(ctx context.Context, name, outputFormat string,
return
}
-func (s *Site) lookupTemplate(layouts ...string) (tpl.Template, bool) {
- for _, l := range layouts {
- if templ, found := s.Tmpl().Lookup(l); found {
- return templ, true
- }
- }
-
- return nil, false
-}
-
-func (s *Site) publish(statCounter *uint64, path string, r io.Reader, fs afero.Fs) (err error) {
- s.PathSpec.ProcessingStats.Incr(statCounter)
-
- return helpers.WriteToDisk(filepath.Clean(path), r, fs)
-}
-
-func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) string {
- if fi.TranslationBaseName() == "_index" {
- if fi.Dir() == "" {
- return kinds.KindHome
- }
-
- return s.kindFromSections(sections)
-
- }
-
- return kinds.KindPage
-}
-
-func (s *Site) kindFromSections(sections []string) string {
- if len(sections) == 0 {
- return kinds.KindHome
- }
-
- return s.kindFromSectionPath(path.Join(sections...))
-}
-
-func (s *Site) kindFromSectionPath(sectionPath string) string {
- var taxonomiesConfig taxonomiesConfig = s.conf.Taxonomies
- for _, plural := range taxonomiesConfig {
- if plural == sectionPath {
- return kinds.KindTaxonomy
- }
-
- if strings.HasPrefix(sectionPath, plural) {
- return kinds.KindTerm
- }
-
- }
-
- return kinds.KindSection
-}
-
-func (s *Site) newPage(
- n *contentNode,
- parentbBucket *pagesMapBucket,
- kind, title string,
- sections ...string) *pageState {
- m := map[string]any{}
- if title != "" {
- m["title"] = title
- }
-
- p, err := newPageFromMeta(
- n,
- parentbBucket,
- m,
- &pageMeta{
- s: s,
- kind: kind,
- sections: sections,
- })
- if err != nil {
- panic(err)
- }
-
- return p
-}
-
func (s *Site) shouldBuild(p page.Page) bool {
+ if !s.conf.IsKindEnabled(p.Kind()) {
+ return false
+ }
return shouldBuild(s.Conf.BuildFuture(), s.Conf.BuildExpired(),
s.Conf.BuildDrafts(), p.Draft(), p.PublishDate(), p.ExpiryDate())
}
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
- publishDate time.Time, expiryDate time.Time) bool {
+ publishDate time.Time, expiryDate time.Time,
+) bool {
if !(buildDrafts || !Draft) {
return false
}
@@ -1217,3 +868,38 @@ func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bo
}
return true
}
+
+func (s *Site) render(ctx *siteRenderContext) (err error) {
+ if err := page.Clear(); err != nil {
+ return err
+ }
+
+ if ctx.outIdx == 0 {
+ // Note that even if disableAliases is set, the aliases themselves are
+ // preserved on page. The motivation with this is to be able to generate
+ // 301 redirects in a .htacess file and similar using a custom output format.
+ if !s.conf.DisableAliases {
+ // Aliases must be rendered before pages.
+ // Some sites, Hugo docs included, have faulty alias definitions that point
+ // to itself or another real page. These will be overwritten in the next
+ // step.
+ if err = s.renderAliases(); err != nil {
+ return
+ }
+ }
+ }
+
+ if err = s.renderPages(ctx); err != nil {
+ return
+ }
+
+ if !ctx.shouldRenderStandalonePage("") {
+ return
+ }
+
+ if err = s.renderMainLanguageRedirect(); err != nil {
+ return
+ }
+
+ return
+}