diff options
437 files changed, 18891 insertions, 17971 deletions
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3a76b768c..1fd70492e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -87,6 +87,12 @@ jobs: curl -LJO "https://github.com/sass/dart-sass/releases/download/${env:SASS_VERSION}/dart-sass-${env:SASS_VERSION}-windows-x64.zip"; Expand-Archive -Path "dart-sass-${env:SASS_VERSION}-windows-x64.zip" -DestinationPath .; echo "$env:GITHUB_WORKSPACE/dart-sass/" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf-8 -Append + - if: matrix.os == 'ubuntu-latest' + name: Install staticcheck + run: go install honnef.co/go/tools/cmd/staticcheck@latest + - if: matrix.os == 'ubuntu-latest' + name: Run staticcheck + run: staticcheck ./... - if: matrix.os != 'windows-latest' name: Check run: | diff --git a/cache/dynacache/dynacache.go b/cache/dynacache/dynacache.go new file mode 100644 index 000000000..bb3f7b098 --- /dev/null +++ b/cache/dynacache/dynacache.go @@ -0,0 +1,550 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dynacache + +import ( + "context" + "fmt" + "math" + "path" + "regexp" + "runtime" + "sync" + "time" + + "github.com/bep/lazycache" + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/common/rungroup" + "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources/resource" +) + +const minMaxSize = 10 + +// New creates a new cache. +func New(opts Options) *Cache { + if opts.CheckInterval == 0 { + opts.CheckInterval = time.Second * 2 + } + + if opts.MaxSize == 0 { + opts.MaxSize = 100000 + } + if opts.Log == nil { + panic("nil Log") + } + + if opts.MinMaxSize == 0 { + opts.MinMaxSize = 30 + } + + stats := &stats{ + opts: opts, + adjustmentFactor: 1.0, + currentMaxSize: opts.MaxSize, + availableMemory: config.GetMemoryLimit(), + } + + infol := opts.Log.InfoCommand("dynacache") + + c := &Cache{ + partitions: make(map[string]PartitionManager), + opts: opts, + stats: stats, + infol: infol, + } + + c.stop = c.start() + + return c +} + +// Options for the cache. +type Options struct { + Log loggers.Logger + CheckInterval time.Duration + MaxSize int + MinMaxSize int + Running bool +} + +// Options for a partition. +type OptionsPartition struct { + // When to clear the this partition. + ClearWhen ClearWhen + + // Weight is a number between 1 and 100 that indicates how, in general, how big this partition may get. + Weight int +} + +func (o OptionsPartition) WeightFraction() float64 { + return float64(o.Weight) / 100 +} + +func (o OptionsPartition) CalculateMaxSize(maxSizePerPartition int) int { + return int(math.Floor(float64(maxSizePerPartition) * o.WeightFraction())) +} + +// A dynamic partitioned cache. +type Cache struct { + mu sync.RWMutex + + partitions map[string]PartitionManager + opts Options + infol logg.LevelLogger + + stats *stats + stopOnce sync.Once + stop func() +} + +// ClearMatching clears all partition for which the predicate returns true. +func (c *Cache) ClearMatching(predicate func(k, v any) bool) { + g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ + NumWorkers: len(c.partitions), + Handle: func(ctx context.Context, partition PartitionManager) error { + partition.clearMatching(predicate) + return nil + }, + }) + + for _, p := range c.partitions { + g.Enqueue(p) + } + + g.Wait() +} + +// ClearOnRebuild prepares the cache for a new rebuild taking the given changeset into account. +func (c *Cache) ClearOnRebuild(changeset ...identity.Identity) { + g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ + NumWorkers: len(c.partitions), + Handle: func(ctx context.Context, partition PartitionManager) error { + partition.clearOnRebuild(changeset...) + return nil + }, + }) + + for _, p := range c.partitions { + g.Enqueue(p) + } + + g.Wait() + + // Clear any entries marked as stale above. + g = rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ + NumWorkers: len(c.partitions), + Handle: func(ctx context.Context, partition PartitionManager) error { + partition.clearStale() + return nil + }, + }) + + for _, p := range c.partitions { + g.Enqueue(p) + } + + g.Wait() +} + +type keysProvider interface { + Keys() []string +} + +// Keys returns a list of keys in all partitions. +func (c *Cache) Keys(predicate func(s string) bool) []string { + if predicate == nil { + predicate = func(s string) bool { return true } + } + var keys []string + for pn, g := range c.partitions { + pkeys := g.(keysProvider).Keys() + for _, k := range pkeys { + p := path.Join(pn, k) + if predicate(p) { + keys = append(keys, p) + } + } + + } + return keys +} + +func calculateMaxSizePerPartition(maxItemsTotal, totalWeightQuantity, numPartitions int) int { + if numPartitions == 0 { + panic("numPartitions must be > 0") + } + if totalWeightQuantity == 0 { + panic("totalWeightQuantity must be > 0") + } + + avgWeight := float64(totalWeightQuantity) / float64(numPartitions) + return int(math.Floor(float64(maxItemsTotal) / float64(numPartitions) * (100.0 / avgWeight))) +} + +// Stop stops the cache. +func (c *Cache) Stop() { + c.stopOnce.Do(func() { + c.stop() + }) +} + +func (c *Cache) adjustCurrentMaxSize() { + c.mu.RLock() + defer c.mu.RUnlock() + + if len(c.partitions) == 0 { + return + } + var m runtime.MemStats + runtime.ReadMemStats(&m) + s := c.stats + s.memstatsCurrent = m + // fmt.Printf("\n\nAvailable = %v\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\nMaxSize = %d\nAdjustmentFactor=%f\n\n", helpers.FormatByteCount(s.availableMemory), helpers.FormatByteCount(m.Alloc), helpers.FormatByteCount(m.TotalAlloc), helpers.FormatByteCount(m.Sys), m.NumGC, c.stats.currentMaxSize, s.adjustmentFactor) + + if s.availableMemory >= s.memstatsCurrent.Alloc { + if s.adjustmentFactor <= 1.0 { + s.adjustmentFactor += 0.2 + } + } else { + // We're low on memory. + s.adjustmentFactor -= 0.4 + } + + if s.adjustmentFactor <= 0 { + s.adjustmentFactor = 0.05 + } + + if !s.adjustCurrentMaxSize() { + return + } + + totalWeight := 0 + for _, pm := range c.partitions { + totalWeight += pm.getOptions().Weight + } + + maxSizePerPartition := calculateMaxSizePerPartition(c.stats.currentMaxSize, totalWeight, len(c.partitions)) + + evicted := 0 + for _, p := range c.partitions { + evicted += p.adjustMaxSize(p.getOptions().CalculateMaxSize(maxSizePerPartition)) + } + + if evicted > 0 { + c.infol. + WithFields( + logg.Fields{ + {Name: "evicted", Value: evicted}, + {Name: "numGC", Value: m.NumGC}, + {Name: "limit", Value: helpers.FormatByteCount(c.stats.availableMemory)}, + {Name: "alloc", Value: helpers.FormatByteCount(m.Alloc)}, + {Name: "totalAlloc", Value: helpers.FormatByteCount(m.TotalAlloc)}, + }, + ).Logf("adjusted partitions' max size") + } +} + +func (c *Cache) start() func() { + ticker := time.NewTicker(c.opts.CheckInterval) + quit := make(chan struct{}) + + go func() { + for { + select { + case <-ticker.C: + c.adjustCurrentMaxSize() + case <-quit: + ticker.Stop() + return + } + } + }() + + return func() { + close(quit) + } +} + +var partitionNameRe = regexp.MustCompile(`^\/[a-zA-Z0-9]{4}(\/[a-zA-Z0-9]+)?(\/[a-zA-Z0-9]+)?`) + +// GetOrCreatePartition gets or creates a partition with the given name. +func GetOrCreatePartition[K comparable, V any](c *Cache, name string, opts OptionsPartition) *Partition[K, V] { + if c == nil { + panic("nil Cache") + } + if opts.Weight < 1 || opts.Weight > 100 { + panic("invalid Weight, must be between 1 and 100") + } + + if partitionNameRe.FindString(name) != name { + panic(fmt.Sprintf("invalid partition name %q", name)) + } + + c.mu.RLock() + p, found := c.partitions[name] + c.mu.RUnlock() + if found { + return p.(*Partition[K, V]) + } + + c.mu.Lock() + defer c.mu.Unlock() + + // Double check. + p, found = c.partitions[name] + if found { + return p.(*Partition[K, V]) + } + + // At this point, we don't know the the number of partitions or their configuration, but + // this will be re-adjusted later. + const numberOfPartitionsEstimate = 10 + maxSize := opts.CalculateMaxSize(c.opts.MaxSize / numberOfPartitionsEstimate) + + // Create a new partition and cache it. + partition := &Partition[K, V]{ + c: lazycache.New(lazycache.Options[K, V]{MaxEntries: maxSize}), + maxSize: maxSize, + trace: c.opts.Log.Logger().WithLevel(logg.LevelTrace).WithField("partition", name), + opts: opts, + } + c.partitions[name] = partition + + return partition +} + +// Partition is a partition in the cache. +type Partition[K comparable, V any] struct { + c *lazycache.Cache[K, V] + + zero V + + trace logg.LevelLogger + opts OptionsPartition + + maxSize int +} + +// GetOrCreate gets or creates a value for the given key. +func (p *Partition[K, V]) GetOrCreate(key K, create func(key K) (V, error)) (V, error) { + v, _, err := p.c.GetOrCreate(key, create) + return v, err +} + +// GetOrCreateWitTimeout gets or creates a value for the given key and times out if the create function +// takes too long. +func (p *Partition[K, V]) GetOrCreateWitTimeout(key K, duration time.Duration, create func(key K) (V, error)) (V, error) { + resultch := make(chan V, 1) + errch := make(chan error, 1) + + go func() { + v, _, err := p.c.GetOrCreate(key, create) + if err != nil { + errch <- err + return + } + resultch <- v + }() + + select { + case v := <-resultch: + return v, nil + case err := <-errch: + return p.zero, err + case <-time.After(duration): + return p.zero, &herrors.TimeoutError{ + Duration: duration, + } + } +} + +func (p *Partition[K, V]) clearMatching(predicate func(k, v any) bool) { + p.c.DeleteFunc(func(key K, v V) bool { + if predicate(key, v) { + p.trace.Log( + logg.StringFunc( + func() string { + return fmt.Sprintf("clearing cache key %v", key) + }, + ), + ) + return true + } + return false + }) +} + +func (p *Partition[K, V]) clearOnRebuild(changeset ...identity.Identity) { + opts := p.getOptions() + if opts.ClearWhen == ClearNever { + return + } + + if opts.ClearWhen == ClearOnRebuild { + // Clear all. + p.Clear() + return + } + + depsFinder := identity.NewFinder(identity.FinderConfig{}) + + shouldDelete := func(key K, v V) bool { + // We always clear elements marked as stale. + if resource.IsStaleAny(v) { + return true + } + + // Now check if this entry has changed based on the changeset + // based on filesystem events. + if len(changeset) == 0 { + // Nothing changed. + return false + } + + var probablyDependent bool + identity.WalkIdentitiesShallow(v, func(level int, id2 identity.Identity) bool { + for _, id := range changeset { + if r := depsFinder.Contains(id, id2, -1); r > 0 { + // It's probably dependent, evict from cache. + probablyDependent = true + return true + } + } + return false + }) + + return probablyDependent + } + + // First pass. + // Second pass needs to be done in a separate loop to catch any + // elements marked as stale in the other partitions. + p.c.DeleteFunc(func(key K, v V) bool { + if shouldDelete(key, v) { + p.trace.Log( + logg.StringFunc( + func() string { + return fmt.Sprintf("first pass: clearing cache key %v", key) + }, + ), + ) + resource.MarkStale(v) + return true + } + return false + }) +} + +func (p *Partition[K, V]) Keys() []K { + var keys []K + p.c.DeleteFunc(func(key K, v V) bool { + keys = append(keys, key) + return false + }) + return keys +} + +func (p *Partition[K, V]) clearStale() { + p.c.DeleteFunc(func(key K, v V) bool { + isStale := resource.IsStaleAny(v) + if isStale { + p.trace.Log( + logg.StringFunc( + func() string { + return fmt.Sprintf("second pass: clearing cache key %v", key) + }, + ), + ) + } + + return isStale + }) +} + +// adjustMaxSize adjusts the max size of the and returns the number of items evicted. +func (p *Partition[K, V]) adjustMaxSize(newMaxSize int) int { + if newMaxSize < minMaxSize { + newMaxSize = minMaxSize + } + p.maxSize = newMaxSize + // fmt.Println("Adjusting max size of partition from", oldMaxSize, "to", newMaxSize) + return p.c.Resize(newMaxSize) +} + +func (p *Partition[K, V]) getMaxSize() int { + return p.maxSize +} + +func (p *Partition[K, V]) getOptions() OptionsPartition { + return p.opts +} + +func (p *Partition[K, V]) Clear() { + p.c.DeleteFunc(func(key K, v V) bool { + return true + }) +} + +func (p *Partition[K, V]) Get(ctx context.Context, key K) (V, bool) { + return p.c.Get(key) +} + +type PartitionManager interface { + adjustMaxSize(addend int) int + getMaxSize() int + getOptions() OptionsPartition + clearOnRebuild(changeset ...identity.Identity) + clearMatching(predicate func(k, v any) bool) + clearStale() +} + +const ( + ClearOnRebuild ClearWhen = iota + 1 + ClearOnChange + ClearNever +) + +type ClearWhen int + +type stats struct { + opts Options + memstatsCurrent runtime.MemStats + currentMaxSize int + availableMemory uint64 + + adjustmentFactor float64 +} + +func (s *stats) adjustCurrentMaxSize() bool { + newCurrentMaxSize := int(math.Floor(float64(s.opts.MaxSize) * s.adjustmentFactor)) + + if newCurrentMaxSize < s.opts.MaxSize { + newCurrentMaxSize = int(s.opts.MinMaxSize) + } + changed := newCurrentMaxSize != s.currentMaxSize + s.currentMaxSize = newCurrentMaxSize + return changed +} + +// CleanKey turns s into a format suitable for a cache key for this package. +// The key will be a Unix-styled path with a leading slash but no trailing slash. +func CleanKey(s string) string { + return path.Clean(paths.ToSlashPreserveLeading(s)) +} diff --git a/cache/dynacache/dynacache_test.go b/cache/dynacache/dynacache_test.go new file mode 100644 index 000000000..53de2385e --- /dev/null +++ b/cache/dynacache/dynacache_test.go @@ -0,0 +1,175 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dynacache + +import ( + "path/filepath" + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources/resource" +) + +var ( + _ resource.StaleInfo = (*testItem)(nil) + _ identity.Identity = (*testItem)(nil) +) + +type testItem struct { + name string + isStale bool +} + +func (t testItem) IsStale() bool { + return t.isStale +} + +func (t testItem) IdentifierBase() string { + return t.name +} + +func TestCache(t *testing.T) { + t.Parallel() + c := qt.New(t) + + cache := New(Options{ + Log: loggers.NewDefault(), + }) + + c.Cleanup(func() { + cache.Stop() + }) + + opts := OptionsPartition{Weight: 30} + + c.Assert(cache, qt.Not(qt.IsNil)) + + p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts) + c.Assert(p1, qt.Not(qt.IsNil)) + + p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts) + + c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "foo bar", opts) }, qt.PanicMatches, ".*invalid partition name.*") + c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 1234}) }, qt.PanicMatches, ".*invalid Weight.*") + + c.Assert(p2, qt.Equals, p1) + + p3 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", opts) + c.Assert(p3, qt.Not(qt.IsNil)) + c.Assert(p3, qt.Not(qt.Equals), p1) + + c.Assert(func() { New(Options{}) }, qt.PanicMatches, ".*nil Log.*") +} + +func TestCalculateMaxSizePerPartition(t *testing.T) { + t.Parallel() + c := qt.New(t) + + c.Assert(calculateMaxSizePerPartition(1000, 500, 5), qt.Equals, 200) + c.Assert(calculateMaxSizePerPartition(1000, 250, 5), qt.Equals, 400) + c.Assert(func() { calculateMaxSizePerPartition(1000, 250, 0) }, qt.PanicMatches, ".*must be > 0.*") + c.Assert(func() { calculateMaxSizePerPartition(1000, 0, 1) }, qt.PanicMatches, ".*must be > 0.*") +} + +func TestCleanKey(t *testing.T) { + c := qt.New(t) + + c.Assert(CleanKey("a/b/c"), qt.Equals, "/a/b/c") + c.Assert(CleanKey("/a/b/c"), qt.Equals, "/a/b/c") + c.Assert(CleanKey("a/b/c/"), qt.Equals, "/a/b/c") + c.Assert(CleanKey(filepath.FromSlash("/a/b/c/")), qt.Equals, "/a/b/c") +} + +func newTestCache(t *testing.T) *Cache { + cache := New( + Options{ + Log: loggers.NewDefault(), + }, + ) + + p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", OptionsPartition{Weight: 30, ClearWhen: ClearOnRebuild}) + p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 30, ClearWhen: ClearOnChange}) + + p1.GetOrCreate("clearOnRebuild", func(string) (testItem, error) { + return testItem{}, nil + }) + + p2.GetOrCreate("clearBecauseStale", func(string) (testItem, error) { + return testItem{ + isStale: true, + }, nil + }) + + p2.GetOrCreate("clearBecauseIdentityChanged", func(string) (testItem, error) { + return testItem{ + name: "changed", + }, nil + }) + + p2.GetOrCreate("clearNever", func(string) (testItem, error) { + return testItem{ + isStale: false, + }, nil + }) + + t.Cleanup(func() { + cache.Stop() + }) + + return cache +} + +func TestClear(t *testing.T) { + t.Parallel() + c := qt.New(t) + + predicateAll := func(string) bool { + return true + } + + cache := newTestCache(t) + + c.Assert(cache.Keys(predicateAll), qt.HasLen, 4) + + cache.ClearOnRebuild() + + // Stale items are always cleared. + c.Assert(cache.Keys(predicateAll), qt.HasLen, 2) + + cache = newTestCache(t) + cache.ClearOnRebuild(identity.StringIdentity("changed")) + + c.Assert(cache.Keys(nil), qt.HasLen, 1) + + cache = newTestCache(t) + + cache.ClearMatching(func(k, v any) bool { + return k.(string) == "clearOnRebuild" + }) + + c.Assert(cache.Keys(predicateAll), qt.HasLen, 3) + + cache.adjustCurrentMaxSize() +} + +func TestAdjustCurrentMaxSize(t *testing.T) { + t.Parallel() + c := qt.New(t) + cache := newTestCache(t) + alloc := cache.stats.memstatsCurrent.Alloc + cache.adjustCurrentMaxSize() + c.Assert(cache.stats.memstatsCurrent.Alloc, qt.Not(qt.Equals), alloc) +} diff --git a/cache/filecache/filecache.go b/cache/filecache/filecache.go index 414478ee2..093d2941c 100644 --- a/cache/filecache/filecache.go +++ b/cache/filecache/filecache.go @@ -24,6 +24,7 @@ import ( "time" "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/helpers" @@ -109,7 +110,7 @@ func (l *lockedFile) Close() error { func (c *Cache) init() error { c.initOnce.Do(func() { // Create the base dir if it does not exist. - if err := c.Fs.MkdirAll("", 0777); err != nil && !os.IsExist(err) { + if err := c.Fs.MkdirAll("", 0o777); err != nil && !os.IsExist(err) { c.initErr = err } }) @@ -146,7 +147,8 @@ func (c *Cache) WriteCloser(id string) (ItemInfo, io.WriteCloser, error) { // it when done. func (c *Cache) ReadOrCreate(id string, read func(info ItemInfo, r io.ReadSeeker) error, - create func(info ItemInfo, w io.WriteCloser) error) (info ItemInfo, err error) { + create func(info ItemInfo, w io.WriteCloser) error, +) (info ItemInfo, err error) { if err := c.init(); err != nil { return ItemInfo{}, err } @@ -380,7 +382,7 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) { baseDir := v.DirCompiled - bfs := afero.NewBasePathFs(cfs, baseDir) + bfs := hugofs.NewBasePathFs(cfs, baseDir) var pruneAllRootDir string if k == CacheKeyModules { diff --git a/cache/filecache/filecache_test.go b/cache/filecache/filecache_test.go index 61f9eda64..59fb09276 100644 --- a/cache/filecache/filecache_test.go +++ b/cache/filecache/filecache_test.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,7 +17,6 @@ import ( "errors" "fmt" "io" - "path/filepath" "strings" "sync" "testing" @@ -86,17 +85,8 @@ dir = ":cacheDir/c" cache := caches.Get("GetJSON") c.Assert(cache, qt.Not(qt.IsNil)) - bfs, ok := cache.Fs.(*afero.BasePathFs) - c.Assert(ok, qt.Equals, true) - filename, err := bfs.RealPath("key") - c.Assert(err, qt.IsNil) - cache = caches.Get("Images") c.Assert(cache, qt.Not(qt.IsNil)) - bfs, ok = cache.Fs.(*afero.BasePathFs) - c.Assert(ok, qt.Equals, true) - filename, _ = bfs.RealPath("key") - c.Assert(filename, qt.Equals, filepath.FromSlash("_gen/images/key")) rf := func(s string) func() (io.ReadCloser, error) { return func() (io.ReadCloser, error) { diff --git a/cache/filecache/integration_test.go b/cache/filecache/integration_test.go index a8a45988e..1e920c29f 100644 --- a/cache/filecache/integration_test.go +++ b/cache/filecache/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,7 +15,6 @@ package filecache_test import ( "path/filepath" - "testing" "time" @@ -47,7 +46,6 @@ title: "Home" _, err := b.H.BaseFs.ResourcesCache.Stat(filepath.Join("_gen", "images")) b.Assert(err, qt.IsNil) - } func TestPruneImages(t *testing.T) { @@ -55,6 +53,7 @@ func TestPruneImages(t *testing.T) { // TODO(bep) t.Skip("skip flaky test on CI server") } + t.Skip("skip flaky test") files := ` -- hugo.toml -- baseURL = "https://example.com" @@ -92,7 +91,7 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA // TODO(bep) we need a way to test full rebuilds. // For now, just sleep a little so the cache elements expires. - time.Sleep(300 * time.Millisecond) + time.Sleep(500 * time.Millisecond) b.RenameFile("assets/a/pixel.png", "assets/b/pixel2.png").Build() @@ -104,5 +103,4 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA b.Assert(err, qt.Not(qt.IsNil)) _, err = b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir) b.Assert(err, qt.IsNil) - } diff --git a/cache/namedmemcache/named_cache.go b/cache/namedmemcache/named_cache.go deleted file mode 100644 index 7fb4fe8ed..000000000 --- a/cache/namedmemcache/named_cache.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package namedmemcache provides a memory cache with a named lock. This is suitable -// for situations where creating the cached resource can be time consuming or otherwise -// resource hungry, or in situations where a "once only per key" is a requirement. -package namedmemcache - -import ( - "sync" - - "github.com/BurntSushi/locker" -) - -// Cache holds the cached values. -type Cache struct { - nlocker *locker.Locker - cache map[string]cacheEntry - mu sync.RWMutex -} - -type cacheEntry struct { - value any - err error -} - -// New creates a new cache. -func New() *Cache { - return &Cache{ - nlocker: locker.NewLocker(), - cache: make(map[string]cacheEntry), - } -} - -// Clear clears the cache state. -func (c *Cache) Clear() { - c.mu.Lock() - defer c.mu.Unlock() - - c.cache = make(map[string]cacheEntry) - c.nlocker = locker.NewLocker() -} - -// GetOrCreate tries to get the value with the given cache key, if not found -// create will be called and cached. -// This method is thread safe. It also guarantees that the create func for a given -// key is invoked only once for this cache. -func (c *Cache) GetOrCreate(key string, create func() (any, error)) (any, error) { - c.mu.RLock() - entry, found := c.cache[key] - c.mu.RUnlock() - - if found { - return entry.value, entry.err - } - - c.nlocker.Lock(key) - defer c.nlocker.Unlock(key) - - // Create it. - value, err := create() - - c.mu.Lock() - c.cache[key] = cacheEntry{value: value, err: err} - c.mu.Unlock() - - return value, err -} diff --git a/cache/namedmemcache/named_cache_test.go b/cache/namedmemcache/named_cache_test.go deleted file mode 100644 index 2db923d76..000000000 --- a/cache/namedmemcache/named_cache_test.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package namedmemcache - -import ( - "fmt" - "sync" - "testing" - - qt "github.com/frankban/quicktest" -) - -func TestNamedCache(t *testing.T) { - t.Parallel() - c := qt.New(t) - - cache := New() - - counter := 0 - create := func() (any, error) { - counter++ - return counter, nil - } - - for i := 0; i < 5; i++ { - v1, err := cache.GetOrCreate("a1", create) - c.Assert(err, qt.IsNil) - c.Assert(v1, qt.Equals, 1) - v2, err := cache.GetOrCreate("a2", create) - c.Assert(err, qt.IsNil) - c.Assert(v2, qt.Equals, 2) - } - - cache.Clear() - - v3, err := cache.GetOrCreate("a2", create) - c.Assert(err, qt.IsNil) - c.Assert(v3, qt.Equals, 3) -} - -func TestNamedCacheConcurrent(t *testing.T) { - t.Parallel() - - c := qt.New(t) - - var wg sync.WaitGroup - - cache := New() - - create := func(i int) func() (any, error) { - return func() (any, error) { - return i, nil - } - } - - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - for j := 0; j < 100; j++ { - id := fmt.Sprintf("id%d", j) - v, err := cache.GetOrCreate(id, create(j)) - c.Assert(err, qt.IsNil) - c.Assert(v, qt.Equals, j) - } - }() - } - wg.Wait() -} diff --git a/commands/commandeer.go b/commands/commandeer.go index 5d414b04a..1aac08c42 100644 --- a/commands/commandeer.go +++ b/commands/commandeer.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -259,7 +259,7 @@ func (r *rootCommand) ConfigFromProvider(key int32, cfg config.Provider) (*commo publishDirStatic := cfg.GetString("publishDirStatic") workingDir := cfg.GetString("workingDir") absPublishDirStatic := paths.AbsPathify(workingDir, publishDirStatic) - staticFs := afero.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic) + staticFs := hugofs.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic) // Serve from both the static and dynamic fs, // the first will take priority. @@ -405,8 +405,14 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error { return err } - r.commonConfigs = lazycache.New[int32, *commonConfig](lazycache.Options{MaxEntries: 5}) - r.hugoSites = lazycache.New[int32, *hugolib.HugoSites](lazycache.Options{MaxEntries: 5}) + r.commonConfigs = lazycache.New(lazycache.Options[int32, *commonConfig]{MaxEntries: 5}) + // We don't want to keep stale HugoSites in memory longer than needed. + r.hugoSites = lazycache.New(lazycache.Options[int32, *hugolib.HugoSites]{ + MaxEntries: 1, + OnEvict: func(key int32, value *hugolib.HugoSites) { + value.Close() + }, + }) return nil } diff --git a/commands/commands.go b/commands/commands.go index 9d707b841..e21d743ab 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,5 +37,4 @@ func newExec() (*simplecobra.Exec, error) { } return simplecobra.New(rootCmd) - } diff --git a/commands/config.go b/commands/config.go index 63ee4f7c8..dfe54cba2 100644 --- a/commands/config.go +++ b/commands/config.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,7 +37,6 @@ func newConfigCommand() *configCommand { &configMountsCommand{}, }, } - } type configCommand struct { @@ -190,7 +189,6 @@ func (m *configModMounts) MarshalJSON() ([]byte, error) { Dir: m.m.Dir(), Mounts: mounts, }) - } type configMountsCommand struct { diff --git a/commands/convert.go b/commands/convert.go index 702c9227f..c81ec792a 100644 --- a/commands/convert.go +++ b/commands/convert.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -134,7 +134,7 @@ func (c *convertCommand) convertAndSavePage(p page.Page, site *hugolib.Site, tar } } - if p.File().IsZero() { + if p.File() == nil { // No content file. return nil } @@ -209,7 +209,7 @@ func (c *convertCommand) convertContents(format metadecoders.Format) error { var pagesBackedByFile page.Pages for _, p := range site.AllPages() { - if p.File().IsZero() { + if p.File() == nil { continue } pagesBackedByFile = append(pagesBackedByFile, p) diff --git a/commands/deploy.go b/commands/deploy.go index ce1af9546..ca6e4d60e 100644 --- a/commands/deploy.go +++ b/commands/deploy.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ //go:build !nodeploy // +build !nodeploy -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -38,7 +38,6 @@ import ( ) func newDeployCommand() simplecobra.Commander { - return &simpleCommand{ name: "deploy", short: "Deploy your site to a Cloud provider.", diff --git a/commands/deploy_off.go b/commands/deploy_off.go index 3150dba16..8a481bd96 100644 --- a/commands/deploy_off.go +++ b/commands/deploy_off.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ //go:build nodeploy // +build nodeploy -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/commands/env.go b/commands/env.go index 0652deb87..8e4f03c55 100644 --- a/commands/env.go +++ b/commands/env.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/commands/gen.go b/commands/gen.go index 534eb0df5..11c32d778 100644 --- a/commands/gen.go +++ b/commands/gen.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -101,7 +101,7 @@ See https://xyproto.github.io/splash/docs/all.html for a preview of the availabl } if found, _ := helpers.Exists(genmandir, hugofs.Os); !found { r.Println("Directory", genmandir, "does not exist, creating...") - if err := hugofs.Os.MkdirAll(genmandir, 0777); err != nil { + if err := hugofs.Os.MkdirAll(genmandir, 0o777); err != nil { return err } } @@ -150,7 +150,7 @@ url: %s } if found, _ := helpers.Exists(gendocdir, hugofs.Os); !found { r.Println("Directory", gendocdir, "does not exist, creating...") - if err := hugofs.Os.MkdirAll(gendocdir, 0777); err != nil { + if err := hugofs.Os.MkdirAll(gendocdir, 0o777); err != nil { return err } } @@ -177,7 +177,6 @@ url: %s cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{}) }, } - } var docsHelperTarget string @@ -241,7 +240,6 @@ url: %s newDocsHelper(), }, } - } type genCommand struct { diff --git a/commands/helpers.go b/commands/helpers.go index 3b0c50159..a13bdebc2 100644 --- a/commands/helpers.go +++ b/commands/helpers.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -110,12 +110,11 @@ func flagsToCfgWithAdditionalConfigBase(cd *simplecobra.Commandeer, cfg config.P }) return cfg - } func mkdir(x ...string) { p := filepath.Join(x...) - err := os.MkdirAll(p, 0777) // before umask + err := os.MkdirAll(p, 0o777) // before umask if err != nil { log.Fatal(err) } diff --git a/commands/hugo_windows.go b/commands/hugo_windows.go index 169c6288f..c354e889d 100644 --- a/commands/hugo_windows.go +++ b/commands/hugo_windows.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/commands/hugobuilder.go b/commands/hugobuilder.go index d2b43cc77..41f42ae6d 100644 --- a/commands/hugobuilder.go +++ b/commands/hugobuilder.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ import ( "runtime/trace" "strings" "sync" + "sync/atomic" "time" "github.com/bep/logg" @@ -34,6 +35,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/terminal" "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/config" @@ -83,7 +85,6 @@ func (c *hugoBuilder) withConf(fn func(conf *commonConfig)) { c.confmu.Lock() defer c.confmu.Unlock() fn(c.conf) - } type hugoBuilderErrState struct { @@ -135,46 +136,12 @@ func (c *hugoBuilder) errCount() int { // getDirList provides NewWatcher() with a list of directories to watch for changes. func (c *hugoBuilder) getDirList() ([]string, error) { - var filenames []string - - walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - c.r.logger.Errorln("walker: ", err) - return nil - } - - if fi.IsDir() { - if fi.Name() == ".git" || - fi.Name() == "node_modules" || fi.Name() == "bower_components" { - return filepath.SkipDir - } - - filenames = append(filenames, fi.Meta().Filename) - } - - return nil - } - h, err := c.hugo() if err != nil { return nil, err } - watchFiles := h.PathSpec.BaseFs.WatchDirs() - for _, fi := range watchFiles { - if !fi.IsDir() { - filenames = append(filenames, fi.Meta().Filename) - continue - } - - w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.r.logger, Info: fi, WalkFn: walkFn}) - if err := w.Walk(); err != nil { - c.r.logger.Errorln("walker: ", err) - } - } - filenames = helpers.UniqueStringsSorted(filenames) - - return filenames, nil + return helpers.UniqueStringsSorted(h.PathSpec.BaseFs.WatchFilenames()), nil } func (c *hugoBuilder) initCPUProfile() (func(), error) { @@ -441,7 +408,7 @@ func (c *hugoBuilder) copyStatic() (map[string]uint64, error) { } func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint64, error) { - infol := c.r.logger.InfoCommand("copy static") + infol := c.r.logger.InfoCommand("static") publishDir := helpers.FilePathSeparator if sourceFs.PublishFolder != "" { @@ -467,11 +434,11 @@ func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint if syncer.Delete { infol.Logf("removing all files from destination that don't exist in static dirs") - syncer.DeleteFilter = func(f os.FileInfo) bool { + syncer.DeleteFilter = func(f fsync.FileInfo) bool { return f.IsDir() && strings.HasPrefix(f.Name(), ".") } } - infol.Logf("syncing static files to %s", publishDir) + start := time.Now() // because we are using a baseFs (to get the union right). // set sync src to root @@ -479,9 +446,10 @@ func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint if err != nil { return 0, err } + loggers.TimeTrackf(infol, start, nil, "syncing static files to %s", publishDir) - // Sync runs Stat 3 times for every source file (which sounds much) - numFiles := fs.statCounter / 3 + // Sync runs Stat 2 times for every source file. + numFiles := fs.statCounter / 2 return numFiles, err } @@ -652,13 +620,31 @@ func (c *hugoBuilder) handleBuildErr(err error, msg string) { func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, staticSyncer *staticSyncer, evs []fsnotify.Event, - configSet map[string]bool) { + configSet map[string]bool, +) { defer func() { c.errState.setWasErr(false) }() var isHandled bool + // Filter out ghost events (from deleted, renamed directories). + // This seems to be a bug in fsnotify, or possibly MacOS. + var n int + for _, ev := range evs { + keep := true + if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Write) { + if _, err := os.Stat(ev.Name); err != nil { + keep = false + } + } + if keep { + evs[n] = ev + n++ + } + } + evs = evs[:n] + for _, ev := range evs { isConfig := configSet[ev.Name] configChangeType := configChangeConfig @@ -726,48 +712,25 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, return } - c.r.logger.Infoln("Received System Events:", evs) + c.r.logger.Debugln("Received System Events:", evs) staticEvents := []fsnotify.Event{} dynamicEvents := []fsnotify.Event{} - filtered := []fsnotify.Event{} h, err := c.hugo() if err != nil { c.r.logger.Errorln("Error getting the Hugo object:", err) return } + n = 0 for _, ev := range evs { if h.ShouldSkipFileChangeEvent(ev) { continue } - // Check the most specific first, i.e. files. - contentMapped := h.ContentChanges.GetSymbolicLinkMappings(ev.Name) - if len(contentMapped) > 0 { - for _, mapped := range contentMapped { - filtered = append(filtered, fsnotify.Event{Name: mapped, Op: ev.Op}) - } - continue - } - - // Check for any symbolic directory mapping. - - dir, name := filepath.Split(ev.Name) - - contentMapped = h.ContentChanges.GetSymbolicLinkMappings(dir) - - if len(contentMapped) == 0 { - filtered = append(filtered, ev) - continue - } - - for _, mapped := range contentMapped { - mappedFilename := filepath.Join(mapped, name) - filtered = append(filtered, fsnotify.Event{Name: mappedFilename, Op: ev.Op}) - } + evs[n] = ev + n++ } - - evs = filtered + evs = evs[:n] for _, ev := range evs { ext := filepath.Ext(ev.Name) @@ -788,6 +751,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, if istemp { continue } + if h.Deps.SourceSpec.IgnoreFile(ev.Name) { continue } @@ -811,7 +775,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, continue } - walkAdder := func(path string, f hugofs.FileMetaInfo, err error) error { + walkAdder := func(path string, f hugofs.FileMetaInfo) error { if f.IsDir() { c.r.logger.Println("adding created directory to watchlist", path) if err := watcher.Add(path); err != nil { @@ -827,11 +791,10 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, } // recursively add new directories to watch list - // When mkdir -p is used, only the top directory triggers an event (at least on OSX) - if ev.Op&fsnotify.Create == fsnotify.Create { + if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Rename) { c.withConf(func(conf *commonConfig) { if s, err := conf.fs.Source.Stat(ev.Name); err == nil && s.Mode().IsDir() { - _ = helpers.SymbolicWalk(conf.fs.Source, ev.Name, walkAdder) + _ = helpers.Walk(conf.fs.Source, ev.Name, walkAdder) } }) } @@ -872,7 +835,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, return } path := h.BaseFs.SourceFilesystems.MakeStaticPathRelative(ev.Name) - path = h.RelURL(helpers.ToSlashTrimLeading(path), false) + path = h.RelURL(paths.ToSlashTrimLeading(path), false) livereload.RefreshPath(path) } else { @@ -909,7 +872,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher, // Nothing has changed. return } else if len(changed) == 1 { - pathToRefresh := h.PathSpec.RelURL(helpers.ToSlashTrimLeading(changed[0]), false) + pathToRefresh := h.PathSpec.RelURL(paths.ToSlashTrimLeading(changed[0]), false) livereload.RefreshPath(pathToRefresh) } else { livereload.ForceRefresh() @@ -944,7 +907,6 @@ func (c *hugoBuilder) hugo() (*hugolib.HugoSites, error) { var err error h, err = c.r.HugFromConfig(conf) return err - }); err != nil { return nil, err } @@ -1000,6 +962,7 @@ func (c *hugoBuilder) loadConfig(cd *simplecobra.Commandeer, running bool) error } if len(conf.configs.LoadingInfo.ConfigFiles) == 0 { + //lint:ignore ST1005 end user message. return errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\nRun `hugo help new` for details.") } @@ -1011,15 +974,16 @@ func (c *hugoBuilder) loadConfig(cd *simplecobra.Commandeer, running bool) error } return nil - } +var rebuildCounter atomic.Uint64 + func (c *hugoBuilder) printChangeDetected(typ string) { msg := "\nChange" if typ != "" { msg += " of " + typ } - msg += " detected, rebuilding site." + msg += fmt.Sprintf(" detected, rebuilding site (#%d).", rebuildCounter.Add(1)) c.r.logger.Println(msg) const layout = "2006-01-02 15:04:05.000 -0700" @@ -1034,25 +998,12 @@ func (c *hugoBuilder) rebuildSites(events []fsnotify.Event) error { } } c.errState.setBuildErr(nil) - visited := c.visitedURLs.PeekAllSet() h, err := c.hugo() if err != nil { return err } - if c.fastRenderMode { - c.withConf(func(conf *commonConfig) { - // Make sure we always render the home pages - for _, l := range conf.configs.ConfigLangs() { - langPath := l.LanguagePrefix() - if langPath != "" { - langPath = langPath + "/" - } - home := h.PrependBasePath("/"+langPath, false) - visited[home] = true - } - }) - } - return h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: visited, ErrRecovery: c.errState.wasErr()}, events...) + + return h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: c.visitedURLs, ErrRecovery: c.errState.wasErr()}, events...) } func (c *hugoBuilder) reloadConfig() error { diff --git a/commands/import.go b/commands/import.go index 18ed7b328..947b6d11f 100644 --- a/commands/import.go +++ b/commands/import.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,7 +23,6 @@ import ( "os" "path/filepath" "regexp" - "strconv" "strings" "time" @@ -66,7 +65,6 @@ Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root } return c - } type importCommand struct { @@ -312,7 +310,7 @@ func (c *importCommand) convertJekyllPost(path, relPath, targetDir string, draft targetFile := filepath.Join(targetDir, relPath) targetParentDir := filepath.Dir(targetFile) - os.MkdirAll(targetParentDir, 0777) + os.MkdirAll(targetParentDir, 0o777) contentBytes, err := os.ReadFile(path) if err != nil { @@ -398,7 +396,6 @@ func (c *importCommand) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyl } func (c *importCommand) importFromJekyll(args []string) error { - jekyllRoot, err := filepath.Abs(filepath.Clean(args[0])) if err != nil { return newUserError("path error:", args[0]) @@ -429,11 +426,7 @@ func (c *importCommand) importFromJekyll(args []string) error { c.r.Println("Importing...") fileCount := 0 - callback := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } - + callback := func(path string, fi hugofs.FileMetaInfo) error { if fi.IsDir() { return nil } @@ -462,7 +455,7 @@ func (c *importCommand) importFromJekyll(args []string) error { for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs { if hasAnyPostInDir { - if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil { + if err = helpers.Walk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil { return err } } diff --git a/commands/list.go b/commands/list.go index 6690ea9ee..41a45e402 100644 --- a/commands/list.go +++ b/commands/list.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,7 +31,6 @@ import ( // newListCommand creates a new list command and its subcommands. func newListCommand() *listCommand { - createRecord := func(workingDir string, p page.Page) []string { return []string{ filepath.ToSlash(strings.TrimPrefix(p.File().Filename(), workingDir+string(os.PathSeparator))), @@ -83,7 +82,6 @@ func newListCommand() *listCommand { } return nil - } return &listCommand{ @@ -94,11 +92,10 @@ func newListCommand() *listCommand { long: `List all of the drafts in your content directory.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - if !p.Draft() || p.File().IsZero() { + if !p.Draft() || p.File() == nil { return false } return true - } return list(cd, r, shouldInclude, "buildDrafts", true, @@ -113,11 +110,10 @@ func newListCommand() *listCommand { long: `List all of the posts in your content directory which will be posted in the future.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - if !resource.IsFuture(p) || p.File().IsZero() { + if !resource.IsFuture(p) || p.File() == nil { return false } return true - } return list(cd, r, shouldInclude, "buildFuture", true, @@ -131,7 +127,7 @@ func newListCommand() *listCommand { long: `List all of the posts in your content directory which has already expired.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - if !resource.IsExpired(p) || p.File().IsZero() { + if !resource.IsExpired(p) || p.File() == nil { return false } return true @@ -148,14 +144,13 @@ func newListCommand() *listCommand { long: `List all of the posts in your content directory, include drafts, future and expired pages.`, run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { shouldInclude := func(p page.Page) bool { - return !p.File().IsZero() + return p.File() != nil } return list(cd, r, shouldInclude, "buildDrafts", true, "buildFuture", true, "buildExpired", true) }, }, }, } - } type listCommand struct { diff --git a/commands/mod.go b/commands/mod.go index 20b9d3960..d64d2a983 100644 --- a/commands/mod.go +++ b/commands/mod.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ so this may/will change in future versions of Hugo. if err != nil { return err } - return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs) + return npm.Pack(h.BaseFs.ProjectSourceFs, h.BaseFs.AssetsWithDuplicatesPreserved.Fs) }, }, }, diff --git a/commands/new.go b/commands/new.go index 8e348366d..79d2c9e7e 100644 --- a/commands/new.go +++ b/commands/new.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -64,7 +64,6 @@ Ensure you run this within the root directory of your site.`, cmd.Flags().String("editor", "", "edit new content with this editor, if provided") cmd.Flags().BoolVarP(&force, "force", "f", false, "overwrite file if it already exists") applyLocalFlagsBuildConfig(cmd, r) - }, }, &simpleCommand{ @@ -143,7 +142,6 @@ according to your needs.`, } return c - } type newCommand struct { diff --git a/commands/release.go b/commands/release.go index 54cf936e8..1d1aaad53 100644 --- a/commands/release.go +++ b/commands/release.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,7 +24,6 @@ import ( // Note: This is a command only meant for internal use and must be run // via "go run -tags release main.go release" on the actual code base that is in the release. func newReleaseCommand() simplecobra.Commander { - var ( step int skipPush bool diff --git a/commands/server.go b/commands/server.go index 63c09fccd..97cf405b7 100644 --- a/commands/server.go +++ b/commands/server.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,20 +27,19 @@ import ( "net/http" "net/url" "os" - "sync" - "sync/atomic" - - "github.com/bep/mclib" - "os/signal" "path" "path/filepath" "regexp" "strconv" "strings" + "sync" + "sync/atomic" "syscall" "time" + "github.com/bep/mclib" + "github.com/bep/debounce" "github.com/bep/simplecobra" "github.com/fsnotify/fsnotify" @@ -83,10 +82,14 @@ const ( ) func newHugoBuilder(r *rootCommand, s *serverCommand, onConfigLoaded ...func(reloaded bool) error) *hugoBuilder { + var visitedURLs *types.EvictingStringQueue + if s != nil && !s.disableFastRender { + visitedURLs = types.NewEvictingStringQueue(20) + } return &hugoBuilder{ r: r, s: s, - visitedURLs: types.NewEvictingStringQueue(100), + visitedURLs: visitedURLs, fullRebuildSem: semaphore.NewWeighted(1), debounce: debounce.New(4 * time.Second), onConfigLoaded: func(reloaded bool) error { @@ -120,7 +123,6 @@ func newServerCommand() *serverCommand { }, withc: func(cmd *cobra.Command, r *rootCommand) { cmd.Flags().BoolVar(&uninstall, "uninstall", false, "Uninstall the local CA (but do not delete it).") - }, }, }, @@ -219,7 +221,7 @@ func (f *fileChangeDetector) filterIrrelevant(in []string) []string { } type fileServer struct { - baseURLs []string + baseURLs []urls.BaseURL roots []string errorTemplate func(err any) (io.Reader, error) c *serverCommand @@ -255,12 +257,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string r.Println("Running in Fast Render Mode. For full rebuilds on change: hugo server --disableFastRender") } - // We're only interested in the path - u, err := url.Parse(baseURL) - if err != nil { - return nil, nil, "", "", fmt.Errorf("invalid baseURL: %w", err) - } - decorate := func(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if f.c.showErrorInBrowser { @@ -280,7 +276,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string port = lrport } }) - lr := *u + lr := baseURL.URL() lr.Host = fmt.Sprintf("%s:%d", lr.Hostname(), port) fmt.Fprint(w, injectLiveReloadScript(r, lr)) @@ -311,7 +307,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string // This matches Netlify's behaviour and is needed for SPA behaviour. // See https://docs.netlify.com/routing/redirects/rewrites-proxies/ if !redirect.Force { - path := filepath.Clean(strings.TrimPrefix(requestURI, u.Path)) + path := filepath.Clean(strings.TrimPrefix(requestURI, baseURL.Path())) if root != "" { path = filepath.Join(root, path) } @@ -338,7 +334,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string switch redirect.Status { case 404: w.WriteHeader(404) - file, err := fs.Open(strings.TrimPrefix(redirect.To, u.Path)) + file, err := fs.Open(strings.TrimPrefix(redirect.To, baseURL.Path())) if err == nil { defer file.Close() io.Copy(w, file) @@ -347,7 +343,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string } return case 200: - if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, u.Path)); r2 != nil { + if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, baseURL.Path())); r2 != nil { requestURI = redirect.To r = r2 } @@ -385,10 +381,10 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string fileserver := decorate(http.FileServer(fs)) mu := http.NewServeMux() - if u.Path == "" || u.Path == "/" { + if baseURL.Path() == "" || baseURL.Path() == "/" { mu.Handle("/", fileserver) } else { - mu.Handle(u.Path, http.StripPrefix(u.Path, fileserver)) + mu.Handle(baseURL.Path(), http.StripPrefix(baseURL.Path(), fileserver)) } if r.IsTestRun() { var shutDownOnce sync.Once @@ -401,7 +397,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string endpoint := net.JoinHostPort(f.c.serverInterface, strconv.Itoa(port)) - return mu, listener, u.String(), endpoint, nil + return mu, listener, baseURL.String(), endpoint, nil } func (f *fileServer) rewriteRequest(r *http.Request, toPath string) *http.Request { @@ -469,7 +465,6 @@ func (c *serverCommand) Name() string { } func (c *serverCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error { - // Watch runs its own server as part of the routine if c.serverWatch { @@ -676,7 +671,7 @@ func (c *serverCommand) createCertificates(conf *commonConfig) error { // Create the directory if it doesn't exist. if _, err := os.Stat(keyDir); os.IsNotExist(err) { - if err := os.MkdirAll(keyDir, 0777); err != nil { + if err := os.MkdirAll(keyDir, 0o777); err != nil { return err } } @@ -701,7 +696,6 @@ func (c *serverCommand) createCertificates(conf *commonConfig) error { // Yes, this is unfortunate, but it's currently the only way to use Mkcert as a library. os.Args = []string{"-cert-file", c.tlsCertFile, "-key-file", c.tlsKeyFile, hostname} return mclib.RunMain() - } func (c *serverCommand) verifyCert(rootPEM, certPEM []byte, name string) error { @@ -831,9 +825,9 @@ func (c *serverCommand) partialReRender(urls ...string) error { c.errState.setWasErr(false) }() c.errState.setBuildErr(nil) - visited := make(map[string]bool) + visited := types.NewEvictingStringQueue(len(urls)) for _, url := range urls { - visited[url] = true + visited.Add(url) } h, err := c.hugo() @@ -846,7 +840,7 @@ func (c *serverCommand) partialReRender(urls ...string) error { func (c *serverCommand) serve() error { var ( - baseURLs []string + baseURLs []urls.BaseURL roots []string h *hugolib.HugoSites ) @@ -863,18 +857,17 @@ func (c *serverCommand) serve() error { if isMultiHost { for _, l := range conf.configs.ConfigLangs() { - baseURLs = append(baseURLs, l.BaseURL().String()) + baseURLs = append(baseURLs, l.BaseURL()) roots = append(roots, l.Language().Lang) } } else { l := conf.configs.GetFirstLanguageConfig() - baseURLs = []string{l.BaseURL().String()} + baseURLs = []urls.BaseURL{l.BaseURL()} roots = []string{""} } return nil }) - if err != nil { return err } @@ -946,13 +939,9 @@ func (c *serverCommand) serve() error { servers = append(servers, srv) if doLiveReload { - u, err := url.Parse(helpers.SanitizeURL(baseURLs[i])) - if err != nil { - return err - } - - mu.HandleFunc(u.Path+"/livereload.js", livereload.ServeJS) - mu.HandleFunc(u.Path+"/livereload", livereload.Handler) + baseURL := baseURLs[i] + mu.HandleFunc(baseURL.Path()+"livereload.js", livereload.ServeJS) + mu.HandleFunc(baseURL.Path()+"livereload", livereload.Handler) } c.r.Printf("Web Server is available at %s (bind address %s) %s\n", serverURL, c.serverInterface, roots[i]) wg1.Go(func() error { @@ -971,8 +960,12 @@ func (c *serverCommand) serve() error { if c.r.IsTestRun() { // Write a .ready file to disk to signal ready status. // This is where the test is run from. + var baseURLs []string + for _, baseURL := range srv.baseURLs { + baseURLs = append(baseURLs, baseURL.String()) + } testInfo := map[string]any{ - "baseURLs": srv.baseURLs, + "baseURLs": baseURLs, } dir := os.Getenv("WORK") @@ -983,7 +976,7 @@ func (c *serverCommand) serve() error { if err != nil { return err } - err = os.WriteFile(readyFile, b, 0777) + err = os.WriteFile(readyFile, b, 0o777) if err != nil { return err } @@ -1167,7 +1160,7 @@ func cleanErrorLog(content string) string { return strings.Join(keep, ": ") } -func injectLiveReloadScript(src io.Reader, baseURL url.URL) string { +func injectLiveReloadScript(src io.Reader, baseURL *url.URL) string { var b bytes.Buffer chain := transform.Chain{livereloadinject.New(baseURL)} chain.Apply(&b, src) diff --git a/common/constants/constants.go b/common/constants/constants.go index 6afb9e283..e4f5a63a2 100644 --- a/common/constants/constants.go +++ b/common/constants/constants.go @@ -20,3 +20,24 @@ const ( ErrRemoteGetJSON = "error-remote-getjson" ErrRemoteGetCSV = "error-remote-getcsv" ) + +// Field/method names with special meaning. +const ( + FieldRelPermalink = "RelPermalink" + FieldPermalink = "Permalink" +) + +// IsFieldRelOrPermalink returns whether the given name is a RelPermalink or Permalink. +func IsFieldRelOrPermalink(name string) bool { + return name == FieldRelPermalink || name == FieldPermalink +} + +// Resource transformations. +const ( + ResourceTransformationFingerprint = "fingerprint" +) + +// IsResourceTransformationLinkChange returns whether the given name is a resource transformation that changes the permalink based on the content. +func IsResourceTransformationPermalinkHash(name string) bool { + return name == ResourceTransformationFingerprint +} diff --git a/common/hcontext/context.go b/common/hcontext/context.go new file mode 100644 index 000000000..9524ef284 --- /dev/null +++ b/common/hcontext/context.go @@ -0,0 +1,46 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hcontext + +import "context" + +// ContextDispatcher is a generic interface for setting and getting values from a context. +type ContextDispatcher[T any] interface { + Set(ctx context.Context, value T) context.Context + Get(ctx context.Context) T +} + +// NewContextDispatcher creates a new ContextDispatcher with the given key. +func NewContextDispatcher[T any, R comparable](key R) ContextDispatcher[T] { + return keyInContext[T, R]{ + id: key, + } +} + +type keyInContext[T any, R comparable] struct { + zero T + id R +} + +func (f keyInContext[T, R]) Get(ctx context.Context) T { + v := ctx.Value(f.id) + if v == nil { + return f.zero + } + return v.(T) +} + +func (f keyInContext[T, R]) Set(ctx context.Context, value T) context.Context { + return context.WithValue(ctx, f.id, value) +} diff --git a/common/herrors/error_locator.go b/common/herrors/error_locator.go index b880fe045..1ece0cca4 100644 --- a/common/herrors/error_locator.go +++ b/common/herrors/error_locator.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -74,7 +74,6 @@ func ContainsMatcher(text string) func(m LineMatcher) int { // ErrorContext contains contextual information about an error. This will // typically be the lines surrounding some problem in a file. type ErrorContext struct { - // If a match will contain the matched line and up to 2 lines before and after. // Will be empty if no match. Lines []string diff --git a/common/herrors/error_locator_test.go b/common/herrors/error_locator_test.go index 6135657d8..62f15213d 100644 --- a/common/herrors/error_locator_test.go +++ b/common/herrors/error_locator_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/herrors/errors.go b/common/herrors/errors.go index 8e62b2c99..59739a86a 100644 --- a/common/herrors/errors.go +++ b/common/herrors/errors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ import ( "os" "runtime" "runtime/debug" + "time" ) // PrintStackTrace prints the current stacktrace to w. @@ -47,6 +48,24 @@ func Recover(args ...any) { } } +// IsTimeoutError returns true if the given error is or contains a TimeoutError. +func IsTimeoutError(err error) bool { + return errors.Is(err, &TimeoutError{}) +} + +type TimeoutError struct { + Duration time.Duration +} + +func (e *TimeoutError) Error() string { + return fmt.Sprintf("timeout after %s", e.Duration) +} + +func (e *TimeoutError) Is(target error) bool { + _, ok := target.(*TimeoutError) + return ok +} + // IsFeatureNotAvailableError returns true if the given error is or contains a FeatureNotAvailableError. func IsFeatureNotAvailableError(err error) bool { return errors.Is(err, &FeatureNotAvailableError{}) diff --git a/common/herrors/errors_test.go b/common/herrors/errors_test.go index 223782e23..2f53a1e89 100644 --- a/common/herrors/errors_test.go +++ b/common/herrors/errors_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -42,5 +42,4 @@ func TestIsFeatureNotAvailableError(t *testing.T) { c.Assert(IsFeatureNotAvailableError(ErrFeatureNotAvailable), qt.Equals, true) c.Assert(IsFeatureNotAvailableError(&FeatureNotAvailableError{}), qt.Equals, true) c.Assert(IsFeatureNotAvailableError(errors.New("asdf")), qt.Equals, false) - } diff --git a/common/herrors/file_error.go b/common/herrors/file_error.go index f8bcecd34..32a6f0081 100644 --- a/common/herrors/file_error.go +++ b/common/herrors/file_error.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,13 +15,13 @@ package herrors import ( "encoding/json" - - godartsassv1 "github.com/bep/godartsass" - + "errors" "fmt" "io" "path/filepath" + godartsassv1 "github.com/bep/godartsass" + "github.com/bep/godartsass/v2" "github.com/bep/golibsass/libsass/libsasserrors" "github.com/gohugoio/hugo/common/paths" @@ -29,8 +29,6 @@ import ( "github.com/pelletier/go-toml/v2" "github.com/spf13/afero" "github.com/tdewolff/parse/v2" - - "errors" ) // FileError represents an error when handling a file: Parsing a config file, @@ -48,6 +46,9 @@ type FileError interface { // UpdateContent updates the error with a new ErrorContext from the content of the file. UpdateContent(r io.Reader, linematcher LineMatcherFn) FileError + + // SetFilename sets the filename of the error. + SetFilename(filename string) FileError } // Unwrapper can unwrap errors created with fmt.Errorf. @@ -60,6 +61,11 @@ var ( _ Unwrapper = (*fileError)(nil) ) +func (fe *fileError) SetFilename(filename string) FileError { + fe.position.Filename = filename + return fe +} + func (fe *fileError) UpdatePosition(pos text.Position) FileError { oldFilename := fe.Position().Filename if pos.Filename != "" && fe.fileType == "" { @@ -115,7 +121,6 @@ func (fe *fileError) UpdateContent(r io.Reader, linematcher LineMatcherFn) FileE } return fe - } type fileError struct { @@ -181,7 +186,6 @@ func NewFileErrorFromName(err error, name string) FileError { } return &fileError{cause: err, fileType: fileType, position: pos} - } // NewFileErrorFromPos will use the filename and line number from pos to create a new FileError, wrapping err. @@ -192,7 +196,6 @@ func NewFileErrorFromPos(err error, pos text.Position) FileError { _, fileType = paths.FileAndExtNoDelimiter(filepath.Clean(pos.Filename)) } return &fileError{cause: err, fileType: fileType, position: pos} - } func NewFileErrorFromFileInErr(err error, fs afero.Fs, linematcher LineMatcherFn) FileError { @@ -249,7 +252,6 @@ func openFile(filename string, fs afero.Fs) (afero.File, string, error) { }); ok { realFilename = s.Filename() } - } f, err2 := fs.Open(filename) diff --git a/common/herrors/file_error_test.go b/common/herrors/file_error_test.go index 0b260a255..7aca08405 100644 --- a/common/herrors/file_error_test.go +++ b/common/herrors/file_error_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,12 +14,11 @@ package herrors import ( + "errors" "fmt" "strings" "testing" - "errors" - "github.com/gohugoio/hugo/common/text" qt "github.com/frankban/quicktest" @@ -48,7 +47,6 @@ func TestNewFileError(t *testing.T) { c.Assert(errorContext.Lines, qt.DeepEquals, []string{"line 30", "line 31", "line 32", "line 33", "line 34"}) c.Assert(errorContext.LinesPos, qt.Equals, 2) c.Assert(errorContext.ChromaLexer, qt.Equals, "go-html-template") - } func TestNewFileErrorExtractFromMessage(t *testing.T) { diff --git a/common/hreflect/helpers.go b/common/hreflect/helpers.go index 17afbf912..b5a8bacc9 100644 --- a/common/hreflect/helpers.go +++ b/common/hreflect/helpers.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -23,6 +23,7 @@ import ( "time" "github.com/gohugoio/hugo/common/htime" + "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/types" ) @@ -188,6 +189,20 @@ func IsTime(tp reflect.Type) bool { return false } +// IsValid returns whether v is not nil and a valid value. +func IsValid(v reflect.Value) bool { + if !v.IsValid() { + return false + } + + switch v.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return !v.IsNil() + } + + return true +} + // AsTime returns v as a time.Time if possible. // The given location is only used if the value implements AsTimeProvider (e.g. go-toml local). // A zero Time and false is returned if this isn't possible. @@ -217,7 +232,7 @@ func CallMethodByName(cxt context.Context, name string, v reflect.Value) []refle panic("not supported") } first := tp.In(0) - if first.Implements(ContextInterface) { + if IsContextType(first) { args = append(args, reflect.ValueOf(cxt)) } } @@ -236,4 +251,24 @@ func indirectInterface(v reflect.Value) reflect.Value { return v.Elem() } -var ContextInterface = reflect.TypeOf((*context.Context)(nil)).Elem() +var contextInterface = reflect.TypeOf((*context.Context)(nil)).Elem() + +var isContextCache = maps.NewCache[reflect.Type, bool]() + +type k string + +var contextTypeValue = reflect.TypeOf(context.WithValue(context.Background(), k("key"), 32)) + +// IsContextType returns whether tp is a context.Context type. +func IsContextType(tp reflect.Type) bool { + if tp == contextTypeValue { + return true + } + if tp == contextInterface { + return true + } + + return isContextCache.GetOrCreate(tp, func() bool { + return tp.Implements(contextInterface) + }) +} diff --git a/common/hreflect/helpers_test.go b/common/hreflect/helpers_test.go index d16b9b9b3..27b774337 100644 --- a/common/hreflect/helpers_test.go +++ b/common/hreflect/helpers_test.go @@ -14,6 +14,7 @@ package hreflect import ( + "context" "reflect" "testing" "time" @@ -40,6 +41,42 @@ func TestGetMethodByName(t *testing.T) { c.Assert(GetMethodIndexByName(tp, "Foo"), qt.Equals, -1) } +func TestIsContextType(t *testing.T) { + c := qt.New(t) + type k string + ctx := context.Background() + valueCtx := context.WithValue(ctx, k("key"), 32) + c.Assert(IsContextType(reflect.TypeOf(ctx)), qt.IsTrue) + c.Assert(IsContextType(reflect.TypeOf(valueCtx)), qt.IsTrue) +} + +func BenchmarkIsContextType(b *testing.B) { + type k string + b.Run("value", func(b *testing.B) { + ctx := context.Background() + ctxs := make([]reflect.Type, b.N) + for i := 0; i < b.N; i++ { + ctxs[i] = reflect.TypeOf(context.WithValue(ctx, k("key"), i)) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + if !IsContextType(ctxs[i]) { + b.Fatal("not context") + } + } + }) + + b.Run("background", func(b *testing.B) { + var ctxt reflect.Type = reflect.TypeOf(context.Background()) + for i := 0; i < b.N; i++ { + if !IsContextType(ctxt) { + b.Fatal("not context") + } + } + }) +} + func BenchmarkIsTruthFul(b *testing.B) { v := reflect.ValueOf("Hugo") diff --git a/common/hstrings/strings.go b/common/hstrings/strings.go index 88df97607..d9426ab5d 100644 --- a/common/hstrings/strings.go +++ b/common/hstrings/strings.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -122,3 +122,8 @@ func InSlicEqualFold(arr []string, el string) bool { } return false } + +type Tuple struct { + First string + Second string +} diff --git a/common/hstrings/strings_test.go b/common/hstrings/strings_test.go index 85068bdf9..d8e9e204a 100644 --- a/common/hstrings/strings_test.go +++ b/common/hstrings/strings_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -33,7 +33,6 @@ func TestStringEqualFold(t *testing.T) { c.Assert(StringEqualFold(s1).EqualFold("b"), qt.Equals, false) c.Assert(StringEqualFold(s1).Eq(s2), qt.Equals, true) c.Assert(StringEqualFold(s1).Eq("b"), qt.Equals, false) - } func TestGetOrCompileRegexp(t *testing.T) { @@ -42,7 +41,6 @@ func TestGetOrCompileRegexp(t *testing.T) { re, err := GetOrCompileRegexp(`\d+`) c.Assert(err, qt.IsNil) c.Assert(re.MatchString("123"), qt.Equals, true) - } func BenchmarkGetOrCompileRegexp(b *testing.B) { diff --git a/common/htime/integration_test.go b/common/htime/integration_test.go index e72c216d9..983fff1f7 100644 --- a/common/htime/integration_test.go +++ b/common/htime/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/hugio/copy.go b/common/hugio/copy.go index 8dbadc48c..31d679dfc 100644 --- a/common/hugio/copy.go +++ b/common/hugio/copy.go @@ -16,6 +16,7 @@ package hugio import ( "fmt" "io" + iofs "io/fs" "path/filepath" "github.com/spf13/afero" @@ -60,12 +61,16 @@ func CopyDir(fs afero.Fs, from, to string, shouldCopy func(filename string) bool return fmt.Errorf("%q is not a directory", from) } - err = fs.MkdirAll(to, 0777) // before umask + err = fs.MkdirAll(to, 0o777) // before umask if err != nil { return err } - entries, _ := afero.ReadDir(fs, from) + d, err := fs.Open(from) + if err != nil { + return err + } + entries, _ := d.(iofs.ReadDirFile).ReadDir(-1) for _, entry := range entries { fromFilename := filepath.Join(from, entry.Name()) toFilename := filepath.Join(to, entry.Name()) diff --git a/common/hugio/hasBytesWriter.go b/common/hugio/hasBytesWriter.go index 7b7d7a5d7..5148c82f9 100644 --- a/common/hugio/hasBytesWriter.go +++ b/common/hugio/hasBytesWriter.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/hugio/hasBytesWriter_test.go b/common/hugio/hasBytesWriter_test.go index b1b8011d5..af53fa5dd 100644 --- a/common/hugio/hasBytesWriter_test.go +++ b/common/hugio/hasBytesWriter_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/hugio/readers.go b/common/hugio/readers.go index 60bd97992..feb1b1412 100644 --- a/common/hugio/readers.go +++ b/common/hugio/readers.go @@ -14,6 +14,7 @@ package hugio import ( + "bytes" "io" "strings" ) @@ -57,3 +58,22 @@ func NewReadSeekerNoOpCloser(r ReadSeeker) ReadSeekerNoOpCloser { func NewReadSeekerNoOpCloserFromString(content string) ReadSeekerNoOpCloser { return ReadSeekerNoOpCloser{strings.NewReader(content)} } + +// NewReadSeekerNoOpCloserFromString uses strings.NewReader to create a new ReadSeekerNoOpCloser +// from the given bytes slice. +func NewReadSeekerNoOpCloserFromBytes(content []byte) ReadSeekerNoOpCloser { + return ReadSeekerNoOpCloser{bytes.NewReader(content)} +} + +// NewReadSeekCloser creates a new ReadSeekCloser from the given ReadSeeker. +// The ReadSeeker will be seeked to the beginning before returned. +func NewOpenReadSeekCloser(r ReadSeekCloser) OpenReadSeekCloser { + return func() (ReadSeekCloser, error) { + r.Seek(0, io.SeekStart) + return r, nil + } +} + +// OpenReadSeekCloser allows setting some other way (than reading from a filesystem) +// to open or create a ReadSeekCloser. +type OpenReadSeekCloser func() (ReadSeekCloser, error) diff --git a/common/hugo/hugo.go b/common/hugo/hugo.go index 67d52f6c8..be43e2a38 100644 --- a/common/hugo/hugo.go +++ b/common/hugo/hugo.go @@ -35,6 +35,8 @@ import ( "github.com/spf13/afero" + iofs "io/fs" + "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/hugofs" ) @@ -159,7 +161,12 @@ func GetExecEnviron(workDir string, cfg config.AllProvider, fs afero.Fs) []strin config.SetEnvVars(&env, "HUGO_PUBLISHDIR", filepath.Join(workDir, cfg.BaseConfig().PublishDir)) if fs != nil { - fis, err := afero.ReadDir(fs, files.FolderJSConfig) + var fis []iofs.DirEntry + d, err := fs.Open(files.FolderJSConfig) + if err == nil { + fis, err = d.(iofs.ReadDirFile).ReadDir(-1) + } + if err == nil { for _, fi := range fis { key := fmt.Sprintf("HUGO_FILE_%s", strings.ReplaceAll(strings.ToUpper(fi.Name()), ".", "_")) diff --git a/common/loggers/handlerdefault.go b/common/loggers/handlerdefault.go index bb48895bc..bc3c7eec2 100644 --- a/common/loggers/handlerdefault.go +++ b/common/loggers/handlerdefault.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -27,10 +27,9 @@ import ( "github.com/fatih/color" ) -var bold = color.New(color.Bold) - // levelColor mapping. var levelColor = [...]*color.Color{ + logg.LevelTrace: color.New(color.FgWhite), logg.LevelDebug: color.New(color.FgWhite), logg.LevelInfo: color.New(color.FgBlue), logg.LevelWarn: color.New(color.FgYellow), @@ -39,6 +38,7 @@ var levelColor = [...]*color.Color{ // levelString mapping. var levelString = [...]string{ + logg.LevelTrace: "TRACE", logg.LevelDebug: "DEBUG", logg.LevelInfo: "INFO ", logg.LevelWarn: "WARN ", diff --git a/common/loggers/handlersmisc.go b/common/loggers/handlersmisc.go index 5c9d6c091..55bf8b940 100644 --- a/common/loggers/handlersmisc.go +++ b/common/loggers/handlersmisc.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -69,7 +69,7 @@ func (h *logLevelCounter) HandleLog(e *logg.Entry) error { return nil } -var stopError = fmt.Errorf("stop") +var errStop = fmt.Errorf("stop") type logOnceHandler struct { threshold logg.Level @@ -87,7 +87,7 @@ func (h *logOnceHandler) HandleLog(e *logg.Entry) error { defer h.mu.Unlock() hash := identity.HashUint64(e.Level, e.Message, e.Fields) if h.seen[hash] { - return stopError + return errStop } h.seen[hash] = true return nil @@ -107,7 +107,7 @@ type stopHandler struct { func (h *stopHandler) HandleLog(e *logg.Entry) error { for _, handler := range h.handlers { if err := handler.HandleLog(e); err != nil { - if err == stopError { + if err == errStop { return nil } return err @@ -124,26 +124,13 @@ func (h *suppressStatementsHandler) HandleLog(e *logg.Entry) error { for _, field := range e.Fields { if field.Name == FieldNameStatementID { if h.statements[field.Value.(string)] { - return stopError + return errStop } } } return nil } -// replacer creates a new log handler that does string replacement in log messages. -func replacer(repl *strings.Replacer) logg.Handler { - return logg.HandlerFunc(func(e *logg.Entry) error { - e.Message = repl.Replace(e.Message) - for i, field := range e.Fields { - if s, ok := field.Value.(string); ok { - e.Fields[i].Value = repl.Replace(s) - } - } - return nil - }) -} - // whiteSpaceTrimmer creates a new log handler that trims whitespace from log messages and string fields. func whiteSpaceTrimmer() logg.Handler { return logg.HandlerFunc(func(e *logg.Entry) error { diff --git a/common/loggers/handlerterminal.go b/common/loggers/handlerterminal.go index e3d377bbf..53f6e41da 100644 --- a/common/loggers/handlerterminal.go +++ b/common/loggers/handlerterminal.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -81,7 +81,7 @@ func (h *noColoursHandler) HandleLog(e *logg.Entry) error { if strings.HasPrefix(field.Name, reservedFieldNamePrefix) { continue } - fmt.Fprintf(w, " %s %q", field.Name, field.Value) + fmt.Fprintf(w, " %s %v", field.Name, field.Value) } fmt.Fprintln(w) diff --git a/common/loggers/logger.go b/common/loggers/logger.go index bc64ae0e5..c4d81fb83 100644 --- a/common/loggers/logger.go +++ b/common/loggers/logger.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // @@ -68,11 +68,24 @@ func New(opts Options) Logger { errorsw := &strings.Builder{} logCounters := newLogLevelCounter() handlers := []logg.Handler{ - whiteSpaceTrimmer(), - logHandler, logCounters, } + if opts.Level == logg.LevelTrace { + // Trace is used during development only, and it's useful to + // only see the trace messages. + handlers = append(handlers, + logg.HandlerFunc(func(e *logg.Entry) error { + if e.Level != logg.LevelTrace { + return logg.ErrStopLogEntry + } + return nil + }), + ) + } + + handlers = append(handlers, whiteSpaceTrimmer(), logHandler) + if opts.HandlerPost != nil { var hookHandler logg.HandlerFunc = func(e *logg.Entry) error { opts.HandlerPost(e) @@ -127,6 +140,7 @@ func New(opts Options) Logger { out: opts.Stdout, level: opts.Level, logger: logger, + tracel: l.WithLevel(logg.LevelTrace), debugl: l.WithLevel(logg.LevelDebug), infol: l.WithLevel(logg.LevelInfo), warnl: l.WithLevel(logg.LevelWarn), @@ -145,11 +159,22 @@ func NewDefault() Logger { return New(opts) } +func NewTrace() Logger { + opts := Options{ + DistinctLevel: logg.LevelWarn, + Level: logg.LevelTrace, + Stdout: os.Stdout, + Stderr: os.Stdout, + } + return New(opts) +} + func LevelLoggerToWriter(l logg.LevelLogger) io.Writer { return logWriter{l: l} } type Logger interface { + Debug() logg.LevelLogger Debugf(format string, v ...any) Debugln(v ...any) Error() logg.LevelLogger @@ -174,6 +199,7 @@ type Logger interface { Warnf(format string, v ...any) Warnln(v ...any) Deprecatef(fail bool, format string, v ...any) + Trace(s logg.StringFunc) } type logAdapter struct { @@ -183,12 +209,17 @@ type logAdapter struct { out io.Writer level logg.Level logger logg.Logger + tracel logg.LevelLogger debugl logg.LevelLogger infol logg.LevelLogger warnl logg.LevelLogger errorl logg.LevelLogger } +func (l *logAdapter) Debug() logg.LevelLogger { + return l.debugl +} + func (l *logAdapter) Debugf(format string, v ...any) { l.debugl.Logf(format, v...) } @@ -294,6 +325,10 @@ func (l *logAdapter) Errorsf(id, format string, v ...any) { l.errorl.WithField(FieldNameStatementID, id).Logf(format, v...) } +func (l *logAdapter) Trace(s logg.StringFunc) { + l.tracel.Log(s) +} + func (l *logAdapter) sprint(v ...any) string { return strings.TrimRight(fmt.Sprintln(v...), "\n") } @@ -315,3 +350,19 @@ func (w logWriter) Write(p []byte) (n int, err error) { w.l.Log(logg.String(string(p))) return len(p), nil } + +func TimeTrackf(l logg.LevelLogger, start time.Time, fields logg.Fields, format string, a ...any) { + elapsed := time.Since(start) + if fields != nil { + l = l.WithFields(fields) + } + l.WithField("duration", elapsed).Logf(format, a...) +} + +func TimeTrackfn(fn func() (logg.LevelLogger, error)) error { + start := time.Now() + l, err := fn() + elapsed := time.Since(start) + l.WithField("duration", elapsed).Logf("") + return err +} diff --git a/common/loggers/logger_test.go b/common/loggers/logger_test.go index 6f589aafe..dcf94b123 100644 --- a/common/loggers/logger_test.go +++ b/common/loggers/logger_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // diff --git a/common/loggers/loggerglobal.go b/common/loggers/loggerglobal.go index 6fd474a69..c3e2970d0 100644 --- a/common/loggers/loggerglobal.go +++ b/common/loggers/loggerglobal.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // Some functions in this file (see comments) is based on the Go source code, // copyright The Go Authors and governed by a BSD-style license. // diff --git a/common/maps/cache.go b/common/maps/cache.go new file mode 100644 index 000000000..7e23a2662 --- /dev/null +++ b/common/maps/cache.go @@ -0,0 +1,90 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package maps + +import "sync" + +// Cache is a simple thread safe cache backed by a map. +type Cache[K comparable, T any] struct { + m map[K]T + sync.RWMutex +} + +// NewCache creates a new Cache. +func NewCache[K comparable, T any]() *Cache[K, T] { + return &Cache[K, T]{m: make(map[K]T)} +} + +// Delete deletes the given key from the cache. +func (c *Cache[K, T]) Get(key K) (T, bool) { + c.RLock() + v, found := c.m[key] + c.RUnlock() + return v, found +} + +// GetOrCreate gets the value for the given key if it exists, or creates it if not. +func (c *Cache[K, T]) GetOrCreate(key K, create func() T) T { + c.RLock() + v, found := c.m[key] + c.RUnlock() + if found { + return v + } + c.Lock() + defer c.Unlock() + v, found = c.m[key] + if found { + return v + } + v = create() + c.m[key] = v + return v +} + +// Set sets the given key to the given value. +func (c *Cache[K, T]) Set(key K, value T) { + c.Lock() + c.m[key] = value + c.Unlock() +} + +// SliceCache is a simple thread safe cache backed by a map. +type SliceCache[T any] struct { + m map[string][]T + sync.RWMutex +} + +func NewSliceCache[T any]() *SliceCache[T] { + return &SliceCache[T]{m: make(map[string][]T)} +} + +func (c *SliceCache[T]) Get(key string) ([]T, bool) { + c.RLock() + v, found := c.m[key] + c.RUnlock() + return v, found +} + +func (c *SliceCache[T]) Append(key string, values ...T) { + c.Lock() + c.m[key] = append(c.m[key], values...) + c.Unlock() +} + +func (c *SliceCache[T]) Reset() { + c.Lock() + c.m = make(map[string][]T) + c.Unlock() +} diff --git a/common/maps/maps.go b/common/maps/maps.go index f0fd3d5ce..2686baad6 100644 --- a/common/maps/maps.go +++ b/common/maps/maps.go @@ -29,7 +29,7 @@ func ToStringMapE(in any) (map[string]any, error) { case Params: return vv, nil case map[string]string: - var m = map[string]any{} + m := map[string]any{} for k, v := range vv { m[k] = v } @@ -192,21 +192,20 @@ func (KeyRenamer) keyPath(k1, k2 string) string { } func (r KeyRenamer) renamePath(parentKeyPath string, m map[string]any) { - for key, val := range m { - keyPath := r.keyPath(parentKeyPath, key) - switch val.(type) { + for k, v := range m { + keyPath := r.keyPath(parentKeyPath, k) + switch vv := v.(type) { case map[any]any: - val = cast.ToStringMap(val) - r.renamePath(keyPath, val.(map[string]any)) + r.renamePath(keyPath, cast.ToStringMap(vv)) case map[string]any: - r.renamePath(keyPath, val.(map[string]any)) + r.renamePath(keyPath, vv) } newKey := r.getNewKey(keyPath) if newKey != "" { - delete(m, key) - m[newKey] = val + delete(m, k) + m[newKey] = v } } } diff --git a/common/maps/params.go b/common/maps/params.go index d94d16f9d..a8cbba555 100644 --- a/common/maps/params.go +++ b/common/maps/params.go @@ -61,7 +61,7 @@ func SetParams(dst, src Params) { // IsZero returns true if p is considered empty. func (p Params) IsZero() bool { - if p == nil || len(p) == 0 { + if len(p) == 0 { return true } @@ -74,7 +74,6 @@ func (p Params) IsZero() bool { } return false - } // MergeParamsWithStrategy transfers values from src to dst for new keys using the merge strategy given. @@ -93,7 +92,7 @@ func MergeParams(dst, src Params) { func (p Params) merge(ps ParamsMergeStrategy, pp Params) { ns, found := p.GetMergeStrategy() - var ms = ns + ms := ns if !found && ps != "" { ms = ps } @@ -248,7 +247,7 @@ const ( // CleanConfigStringMapString removes any processing instructions from m, // m will never be modified. func CleanConfigStringMapString(m map[string]string) map[string]string { - if m == nil || len(m) == 0 { + if len(m) == 0 { return m } if _, found := m[MergeStrategyKey]; !found { @@ -267,7 +266,7 @@ func CleanConfigStringMapString(m map[string]string) map[string]string { // CleanConfigStringMap is the same as CleanConfigStringMapString but for // map[string]any. func CleanConfigStringMap(m map[string]any) map[string]any { - if m == nil || len(m) == 0 { + if len(m) == 0 { return m } if _, found := m[MergeStrategyKey]; !found { @@ -291,7 +290,6 @@ func CleanConfigStringMap(m map[string]any) map[string]any { } return m2 - } func toMergeStrategy(v any) ParamsMergeStrategy { diff --git a/common/paths/path.go b/common/paths/path.go index 5d211c5e0..da99b16ac 100644 --- a/common/paths/path.go +++ b/common/paths/path.go @@ -16,14 +16,18 @@ package paths import ( "errors" "fmt" + "net/url" "path" "path/filepath" - "regexp" "strings" + "unicode" ) // FilePathSeparator as defined by os.Separator. -const FilePathSeparator = string(filepath.Separator) +const ( + FilePathSeparator = string(filepath.Separator) + slash = "/" +) // filepathPathBridge is a bridge for common functionality in filepath vs path type filepathPathBridge interface { @@ -72,6 +76,30 @@ func AbsPathify(workingDir, inPath string) string { return filepath.Join(workingDir, inPath) } +// AddTrailingSlash adds a trailing Unix styled slash (/) if not already +// there. +func AddTrailingSlash(path string) string { + if !strings.HasSuffix(path, "/") { + path += "/" + } + return path +} + +// AddLeadingSlash adds a leading Unix styled slash (/) if not already +// there. +func AddLeadingSlash(path string) string { + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + return path +} + +// AddTrailingAndLeadingSlash adds a leading and trailing Unix styled slash (/) if not already +// there. +func AddLeadingAndTrailingSlash(path string) string { + return AddTrailingSlash(AddLeadingSlash(path)) +} + // MakeTitle converts the path given to a suitable title, trimming whitespace // and replacing hyphens with whitespace. func MakeTitle(inpath string) string { @@ -94,43 +122,6 @@ func makePathRelative(inPath string, possibleDirectories ...string) (string, err return inPath, errors.New("can't extract relative path, unknown prefix") } -// Should be good enough for Hugo. -var isFileRe = regexp.MustCompile(`.*\..{1,6}$`) - -// GetDottedRelativePath expects a relative path starting after the content directory. -// It returns a relative path with dots ("..") navigating up the path structure. -func GetDottedRelativePath(inPath string) string { - inPath = path.Clean(filepath.ToSlash(inPath)) - - if inPath == "." { - return "./" - } - - if !isFileRe.MatchString(inPath) && !strings.HasSuffix(inPath, "/") { - inPath += "/" - } - - if !strings.HasPrefix(inPath, "/") { - inPath = "/" + inPath - } - - dir, _ := filepath.Split(inPath) - - sectionCount := strings.Count(dir, "/") - - if sectionCount == 0 || dir == "/" { - return "./" - } - - var dottedPath string - - for i := 1; i < sectionCount; i++ { - dottedPath += "../" - } - - return dottedPath -} - // ExtNoDelimiter takes a path and returns the extension, excluding the delimiter, i.e. "md". func ExtNoDelimiter(in string) string { return strings.TrimPrefix(Ext(in), ".") @@ -167,12 +158,6 @@ func Filename(in string) (name string) { return } -// PathNoExt takes a path, strips out the extension, -// and returns the name of the file. -func PathNoExt(in string) string { - return strings.TrimSuffix(in, path.Ext(in)) -} - // FileAndExt returns the filename and any extension of a file path as // two separate strings. // @@ -252,16 +237,125 @@ func prettifyPath(in string, b filepathPathBridge) string { return b.Join(b.Dir(in), name, "index"+ext) } -type NamedSlice struct { - Name string - Slice []string +// CommonDir returns the common directory of the given paths. +func CommonDir(path1, path2 string) string { + if path1 == "" || path2 == "" { + return "" + } + + p1 := strings.Split(path1, "/") + p2 := strings.Split(path2, "/") + + var common []string + + for i := 0; i < len(p1) && i < len(p2); i++ { + if p1[i] == p2[i] { + common = append(common, p1[i]) + } else { + break + } + } + + return strings.Join(common, "/") +} + +// Sanitize sanitizes string to be used in Hugo's file paths and URLs, allowing only +// a predefined set of special Unicode characters. +// +// Spaces will be replaced with a single hyphen. +// +// This function is the core function used to normalize paths in Hugo. +// +// Note that this is the first common step for URL/path sanitation, +// the final URL/path may end up looking differently if the user has stricter rules defined (e.g. removePathAccents=true). +func Sanitize(s string) string { + var willChange bool + for i, r := range s { + willChange = !isAllowedPathCharacter(s, i, r) + if willChange { + break + } + } + + if !willChange { + // Prevent allocation when nothing changes. + return s + } + + target := make([]rune, 0, len(s)) + var ( + prependHyphen bool + wasHyphen bool + ) + + for i, r := range s { + isAllowed := isAllowedPathCharacter(s, i, r) + + if isAllowed { + // track explicit hyphen in input; no need to add a new hyphen if + // we just saw one. + wasHyphen = r == '-' + + if prependHyphen { + // if currently have a hyphen, don't prepend an extra one + if !wasHyphen { + target = append(target, '-') + } + prependHyphen = false + } + target = append(target, r) + } else if len(target) > 0 && !wasHyphen && unicode.IsSpace(r) { + prependHyphen = true + } + } + + return string(target) +} + +func isAllowedPathCharacter(s string, i int, r rune) bool { + if r == ' ' { + return false + } + // Check for the most likely first (faster). + isAllowed := unicode.IsLetter(r) || unicode.IsDigit(r) + isAllowed = isAllowed || r == '.' || r == '/' || r == '\\' || r == '_' || r == '#' || r == '+' || r == '~' || r == '-' || r == '@' + isAllowed = isAllowed || unicode.IsMark(r) + isAllowed = isAllowed || (r == '%' && i+2 < len(s) && ishex(s[i+1]) && ishex(s[i+2])) + return isAllowed } -func (n NamedSlice) String() string { - if len(n.Slice) == 0 { - return n.Name +// From https://golang.org/src/net/url/url.go +func ishex(c byte) bool { + switch { + case '0' <= c && c <= '9': + return true + case 'a' <= c && c <= 'f': + return true + case 'A' <= c && c <= 'F': + return true } - return fmt.Sprintf("%s%s{%s}", n.Name, FilePathSeparator, strings.Join(n.Slice, ",")) + return false +} + +var slashFunc = func(r rune) bool { + return r == '/' +} + +// Dir behaves like path.Dir without the path.Clean step. +// +// The returned path ends in a slash only if it is the root "/". +func Dir(s string) string { + dir, _ := path.Split(s) + if len(dir) > 1 && dir[len(dir)-1] == '/' { + return dir[:len(dir)-1] + } + return dir +} + +// FieldsSlash cuts s into fields separated with '/'. +func FieldsSlash(s string) []string { + f := strings.FieldsFunc(s, slashFunc) + return f } // DirFile holds the result from path.Split. @@ -274,3 +368,27 @@ type DirFile struct { func (df DirFile) String() string { return fmt.Sprintf("%s|%s", df.Dir, df.File) } + +// PathEscape escapes unicode letters in pth. +// Use URLEscape to escape full URLs including scheme, query etc. +// This is slightly faster for the common case. +// Note, there is a url.PathEscape function, but that also +// escapes /. +func PathEscape(pth string) string { + u, err := url.Parse(pth) + if err != nil { + panic(err) + } + return u.EscapedPath() +} + +// ToSlashTrimLeading is just a filepath.ToSlash with an added / prefix trimmer. +func ToSlashTrimLeading(s string) string { + return strings.TrimPrefix(filepath.ToSlash(s), "/") +} + +// ToSlashPreserveLeading converts the path given to a forward slash separated path +// and preserves the leading slash if present trimming any trailing slash. +func ToSlashPreserveLeading(s string) string { + return "/" + strings.Trim(filepath.ToSlash(s), "/") +} diff --git a/common/paths/path_test.go b/common/paths/path_test.go index 2400f16ab..3605bfc43 100644 --- a/common/paths/path_test.go +++ b/common/paths/path_test.go @@ -1,4 +1,4 @@ -// Copyright 2021 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -75,44 +75,6 @@ func TestMakePathRelative(t *testing.T) { } } -func TestGetDottedRelativePath(t *testing.T) { - // on Windows this will receive both kinds, both country and western ... - for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} { - doTestGetDottedRelativePath(f, t) - } -} - -func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) { - type test struct { - input, expected string - } - data := []test{ - {"", "./"}, - {urlFixer("/"), "./"}, - {urlFixer("post"), "../"}, - {urlFixer("/post"), "../"}, - {urlFixer("post/"), "../"}, - {urlFixer("tags/foo.html"), "../"}, - {urlFixer("/tags/foo.html"), "../"}, - {urlFixer("/post/"), "../"}, - {urlFixer("////post/////"), "../"}, - {urlFixer("/foo/bar/index.html"), "../../"}, - {urlFixer("/foo/bar/foo/"), "../../../"}, - {urlFixer("/foo/bar/foo"), "../../../"}, - {urlFixer("foo/bar/foo/"), "../../../"}, - {urlFixer("foo/bar/foo/bar"), "../../../../"}, - {"404.html", "./"}, - {"404.xml", "./"}, - {"/404.html", "./"}, - } - for i, d := range data { - output := GetDottedRelativePath(d.input) - if d.expected != output { - t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output) - } - } -} - func TestMakeTitle(t *testing.T) { type test struct { input, expected string @@ -226,3 +188,77 @@ func TestFileAndExt(t *testing.T) { } } } + +func TestSanitize(t *testing.T) { + c := qt.New(t) + tests := []struct { + input string + expected string + }{ + {" Foo bar ", "Foo-bar"}, + {"Foo.Bar/foo_Bar-Foo", "Foo.Bar/foo_Bar-Foo"}, + {"fOO,bar:foobAR", "fOObarfoobAR"}, + {"FOo/BaR.html", "FOo/BaR.html"}, + {"FOo/Ba---R.html", "FOo/Ba---R.html"}, /// See #10104 + {"FOo/Ba R.html", "FOo/Ba-R.html"}, + {"трям/трям", "трям/трям"}, + {"은행", "은행"}, + {"Банковский кассир", "Банковский-кассир"}, + // Issue #1488 + {"संस्कृत", "संस्कृत"}, + {"a%C3%B1ame", "a%C3%B1ame"}, // Issue #1292 + {"this+is+a+test", "this+is+a+test"}, // Issue #1290 + {"~foo", "~foo"}, // Issue #2177 + + } + + for _, test := range tests { + c.Assert(Sanitize(test.input), qt.Equals, test.expected) + } +} + +func BenchmarkSanitize(b *testing.B) { + const ( + allAlowedPath = "foo/bar" + spacePath = "foo bar" + ) + + // This should not allocate any memory. + b.Run("All allowed", func(b *testing.B) { + for i := 0; i < b.N; i++ { + got := Sanitize(allAlowedPath) + if got != allAlowedPath { + b.Fatal(got) + } + } + }) + + // This will allocate some memory. + b.Run("Spaces", func(b *testing.B) { + for i := 0; i < b.N; i++ { + got := Sanitize(spacePath) + if got != "foo-bar" { + b.Fatal(got) + } + } + }) +} + +func TestDir(t *testing.T) { + c := qt.New(t) + c.Assert(Dir("/a/b/c/d"), qt.Equals, "/a/b/c") + c.Assert(Dir("/a"), qt.Equals, "/") + c.Assert(Dir("/"), qt.Equals, "/") + c.Assert(Dir(""), qt.Equals, "") +} + +func TestFieldsSlash(t *testing.T) { + c := qt.New(t) + + c.Assert(FieldsSlash("a/b/c"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("/a/b/c"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("/a/b/c/"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("a/b/c/"), qt.DeepEquals, []string{"a", "b", "c"}) + c.Assert(FieldsSlash("/"), qt.DeepEquals, []string{}) + c.Assert(FieldsSlash(""), qt.DeepEquals, []string{}) +} diff --git a/common/paths/pathparser.go b/common/paths/pathparser.go new file mode 100644 index 000000000..842d9307b --- /dev/null +++ b/common/paths/pathparser.go @@ -0,0 +1,494 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths + +import ( + "path" + "path/filepath" + "runtime" + "strings" + + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugofs/files" +) + +var defaultPathParser PathParser + +// PathParser parses a path into a Path. +type PathParser struct { + // Maps the language code to its index in the languages/sites slice. + LanguageIndex map[string]int +} + +// Parse parses component c with path s into Path using the default path parser. +func Parse(c, s string) *Path { + return defaultPathParser.Parse(c, s) +} + +// NormalizePathString returns a normalized path string using the very basic Hugo rules. +func NormalizePathStringBasic(s string) string { + // All lower case. + s = strings.ToLower(s) + + // Replace spaces with hyphens. + s = strings.ReplaceAll(s, " ", "-") + + return s +} + +// Parse parses component c with path s into Path using Hugo's content path rules. +func (parser PathParser) Parse(c, s string) *Path { + p, err := parser.parse(c, s) + if err != nil { + panic(err) + } + return p +} + +func (pp *PathParser) parse(component, s string) (*Path, error) { + ss := NormalizePathStringBasic(s) + + p, err := pp.doParse(component, ss) + if err != nil { + return nil, err + } + + if s != ss { + var err error + // Preserve the original case for titles etc. + p.unnormalized, err = pp.doParse(component, s) + + if err != nil { + return nil, err + } + } else { + p.unnormalized = p + } + + return p, nil +} + +func (pp *PathParser) doParse(component, s string) (*Path, error) { + p := &Path{ + component: component, + posContainerLow: -1, + posContainerHigh: -1, + posSectionHigh: -1, + posIdentifierLanguage: -1, + } + + hasLang := pp.LanguageIndex != nil + hasLang = hasLang && (component == files.ComponentFolderContent || component == files.ComponentFolderLayouts) + + if runtime.GOOS == "windows" { + s = path.Clean(filepath.ToSlash(s)) + if s == "." { + s = "" + } + } + + if s == "" { + s = "/" + } + + // Leading slash, no trailing slash. + if !strings.HasPrefix(s, "/") { + s = "/" + s + } + + if s != "/" && s[len(s)-1] == '/' { + s = s[:len(s)-1] + } + + p.s = s + slashCount := 0 + + for i := len(s) - 1; i >= 0; i-- { + c := s[i] + + switch c { + case '.': + if p.posContainerHigh == -1 { + var high int + if len(p.identifiers) > 0 { + high = p.identifiers[len(p.identifiers)-1].Low - 1 + } else { + high = len(p.s) + } + id := types.LowHigh{Low: i + 1, High: high} + if len(p.identifiers) == 0 { + p.identifiers = append(p.identifiers, id) + } else if len(p.identifiers) == 1 { + // Check for a valid language. + s := p.s[id.Low:id.High] + + if hasLang { + if _, found := pp.LanguageIndex[s]; found { + p.posIdentifierLanguage = 1 + p.identifiers = append(p.identifiers, id) + } + } + } + } + case '/': + slashCount++ + if p.posContainerHigh == -1 { + p.posContainerHigh = i + 1 + } else if p.posContainerLow == -1 { + p.posContainerLow = i + 1 + } + if i > 0 { + p.posSectionHigh = i + } + } + } + + isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes + isContent := isContentComponent && files.IsContentExt(p.Ext()) + + if isContent { + id := p.identifiers[len(p.identifiers)-1] + b := p.s[p.posContainerHigh : id.Low-1] + switch b { + case "index": + p.bundleType = PathTypeLeaf + case "_index": + p.bundleType = PathTypeBranch + default: + p.bundleType = PathTypeContentSingle + } + + if slashCount == 2 && p.IsLeafBundle() { + p.posSectionHigh = 0 + } + } + + return p, nil +} + +func ModifyPathBundleTypeResource(p *Path) { + if p.IsContent() { + p.bundleType = PathTypeContentResource + } else { + p.bundleType = PathTypeFile + } +} + +type PathType int + +const ( + // A generic resource, e.g. a JSON file. + PathTypeFile PathType = iota + + // All below are content files. + // A resource of a content type with front matter. + PathTypeContentResource + + // E.g. /blog/my-post.md + PathTypeContentSingle + + // All bewlow are bundled content files. + + // Leaf bundles, e.g. /blog/my-post/index.md + PathTypeLeaf + + // Branch bundles, e.g. /blog/_index.md + PathTypeBranch +) + +type Path struct { + s string + + posContainerLow int + posContainerHigh int + posSectionHigh int + + component string + bundleType PathType + + identifiers []types.LowHigh + + posIdentifierLanguage int + + trimLeadingSlash bool + + unnormalized *Path +} + +// TrimLeadingSlash returns a copy of the Path with the leading slash removed. +func (p Path) TrimLeadingSlash() *Path { + p.trimLeadingSlash = true + return &p +} + +func (p *Path) norm(s string) string { + if p.trimLeadingSlash { + s = strings.TrimPrefix(s, "/") + } + return s +} + +// IdentifierBase satifies identity.Identity. +func (p *Path) IdentifierBase() string { + return p.Base()[1:] +} + +// Component returns the component for this path (e.g. "content"). +func (p *Path) Component() string { + return p.component +} + +// Container returns the base name of the container directory for this path. +func (p *Path) Container() string { + if p.posContainerLow == -1 { + return "" + } + return p.norm(p.s[p.posContainerLow : p.posContainerHigh-1]) +} + +// ContainerDir returns the container directory for this path. +// For content bundles this will be the parent directory. +func (p *Path) ContainerDir() string { + if p.posContainerLow == -1 || !p.IsBundle() { + return p.Dir() + } + return p.norm(p.s[:p.posContainerLow-1]) +} + +// Section returns the first path element (section). +func (p *Path) Section() string { + if p.posSectionHigh <= 0 { + return "" + } + return p.norm(p.s[1:p.posSectionHigh]) +} + +// IsContent returns true if the path is a content file (e.g. mypost.md). +// Note that this will also return true for content files in a bundle. +func (p *Path) IsContent() bool { + return p.BundleType() >= PathTypeContentResource +} + +// isContentPage returns true if the path is a content file (e.g. mypost.md), +// but nof if inside a leaf bundle. +func (p *Path) isContentPage() bool { + return p.BundleType() >= PathTypeContentSingle +} + +// Name returns the last element of path. +func (p *Path) Name() string { + if p.posContainerHigh > 0 { + return p.s[p.posContainerHigh:] + } + return p.s +} + +// Name returns the last element of path withhout any extension. +func (p *Path) NameNoExt() string { + if i := p.identifierIndex(0); i != -1 { + return p.s[p.posContainerHigh : p.identifiers[i].Low-1] + } + return p.s[p.posContainerHigh:] +} + +// Name returns the last element of path withhout any language identifier. +func (p *Path) NameNoLang() string { + i := p.identifierIndex(p.posIdentifierLanguage) + if i == -1 { + return p.Name() + } + + return p.s[p.posContainerHigh:p.identifiers[i].Low-1] + p.s[p.identifiers[i].High:] +} + +// BaseNameNoIdentifier returns the logcical base name for a resource without any idenifier (e.g. no extension). +// For bundles this will be the containing directory's name, e.g. "blog". +func (p *Path) BaseNameNoIdentifier() string { + if p.IsBundle() { + return p.Container() + } + return p.NameNoIdentifier() +} + +// NameNoIdentifier returns the last element of path withhout any identifier (e.g. no extension). +func (p *Path) NameNoIdentifier() string { + if len(p.identifiers) > 0 { + return p.s[p.posContainerHigh : p.identifiers[len(p.identifiers)-1].Low-1] + } + return p.s[p.posContainerHigh:] +} + +// Dir returns all but the last element of path, typically the path's directory. +func (p *Path) Dir() (d string) { + if p.posContainerHigh > 0 { + d = p.s[:p.posContainerHigh-1] + } + if d == "" { + d = "/" + } + d = p.norm(d) + return +} + +// Path returns the full path. +func (p *Path) Path() (d string) { + return p.norm(p.s) +} + +// Unmormalized returns the Path with the original case preserved. +func (p *Path) Unmormalized() *Path { + return p.unnormalized +} + +// PathNoLang returns the Path but with any language identifier removed. +func (p *Path) PathNoLang() string { + return p.base(true, false) +} + +// PathNoIdentifier returns the Path but with any identifier (ext, lang) removed. +func (p *Path) PathNoIdentifier() string { + return p.base(false, false) +} + +// PathRel returns the path relativeto the given owner. +func (p *Path) PathRel(owner *Path) string { + ob := owner.Base() + if !strings.HasSuffix(ob, "/") { + ob += "/" + } + return strings.TrimPrefix(p.Path(), ob) +} + +// BaseRel returns the base path relative to the given owner. +func (p *Path) BaseRel(owner *Path) string { + ob := owner.Base() + if ob == "/" { + ob = "" + } + return p.Base()[len(ob)+1:] +} + +// For content files, Base returns the path without any identifiers (extension, language code etc.). +// Any 'index' as the last path element is ignored. +// +// For other files (Resources), any extension is kept. +func (p *Path) Base() string { + return p.base(!p.isContentPage(), p.IsBundle()) +} + +// BaseNoLeadingSlash returns the base path without the leading slash. +func (p *Path) BaseNoLeadingSlash() string { + return p.Base()[1:] +} + +func (p *Path) base(preserveExt, isBundle bool) string { + if len(p.identifiers) == 0 { + return p.norm(p.s) + } + + if preserveExt && len(p.identifiers) == 1 { + // Preserve extension. + return p.norm(p.s) + } + + id := p.identifiers[len(p.identifiers)-1] + high := id.Low - 1 + + if isBundle { + high = p.posContainerHigh - 1 + } + + if high == 0 { + high++ + } + + if !preserveExt { + return p.norm(p.s[:high]) + } + + // For txt files etc. we want to preserve the extension. + id = p.identifiers[0] + + return p.norm(p.s[:high] + p.s[id.Low-1:id.High]) +} + +func (p *Path) Ext() string { + return p.identifierAsString(0) +} + +func (p *Path) Lang() string { + return p.identifierAsString(1) +} + +func (p *Path) Identifier(i int) string { + return p.identifierAsString(i) +} + +func (p *Path) Identifiers() []string { + ids := make([]string, len(p.identifiers)) + for i, id := range p.identifiers { + ids[i] = p.s[id.Low:id.High] + } + return ids +} + +func (p *Path) IsHTML() bool { + return files.IsHTML(p.Ext()) +} + +func (p *Path) BundleType() PathType { + return p.bundleType +} + +func (p *Path) IsBundle() bool { + return p.bundleType >= PathTypeLeaf +} + +func (p *Path) IsBranchBundle() bool { + return p.bundleType == PathTypeBranch +} + +func (p *Path) IsLeafBundle() bool { + return p.bundleType == PathTypeLeaf +} + +func (p *Path) identifierAsString(i int) string { + i = p.identifierIndex(i) + if i == -1 { + return "" + } + + id := p.identifiers[i] + return p.s[id.Low:id.High] +} + +func (p *Path) identifierIndex(i int) int { + if i < 0 || i >= len(p.identifiers) { + return -1 + } + return i +} + +// HasExt returns true if the Unix styled path has an extension. +func HasExt(p string) bool { + for i := len(p) - 1; i >= 0; i-- { + if p[i] == '.' { + return true + } + if p[i] == '/' { + return false + } + } + return false +} diff --git a/common/paths/pathparser_test.go b/common/paths/pathparser_test.go new file mode 100644 index 000000000..3546b6605 --- /dev/null +++ b/common/paths/pathparser_test.go @@ -0,0 +1,351 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths + +import ( + "path/filepath" + "testing" + + "github.com/gohugoio/hugo/hugofs/files" + + qt "github.com/frankban/quicktest" +) + +var testParser = &PathParser{ + LanguageIndex: map[string]int{ + "no": 0, + "en": 1, + }, +} + +func TestParse(t *testing.T) { + c := qt.New(t) + + tests := []struct { + name string + path string + assert func(c *qt.C, p *Path) + }{ + { + "Basic text file", + "/a/b.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.txt") + c.Assert(p.Base(), qt.Equals, "/a/b.txt") + c.Assert(p.Container(), qt.Equals, "a") + c.Assert(p.Dir(), qt.Equals, "/a") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.IsContent(), qt.IsFalse) + }, + }, + { + "Basic text file, upper case", + "/A/B.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.txt") + c.Assert(p.NameNoExt(), qt.Equals, "b") + c.Assert(p.NameNoIdentifier(), qt.Equals, "b") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + c.Assert(p.Base(), qt.Equals, "/a/b.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + }, + }, + { + "Basic text file, 1 space in dir", + "/a b/c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a-b/c.txt") + }, + }, + { + "Basic text file, 2 spaces in dir", + "/a b/c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a--b/c.txt") + }, + }, + { + "Basic text file, 1 space in filename", + "/a/b c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b-c.txt") + }, + }, + { + "Basic text file, 2 spaces in filename", + "/a/b c.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b--c.txt") + }, + }, + { + "Basic text file, mixed case and spaces, unnormalized", + "/a/Foo BAR.txt", + func(c *qt.C, p *Path) { + pp := p.Unmormalized() + c.Assert(pp, qt.IsNotNil) + c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "Foo BAR") + }, + }, + { + "Basic Markdown file", + "/a/b/c.md", + func(c *qt.C, p *Path) { + c.Assert(p.IsContent(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsFalse) + c.Assert(p.Name(), qt.Equals, "c.md") + c.Assert(p.Base(), qt.Equals, "/a/b/c") + c.Assert(p.Section(), qt.Equals, "a") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "c") + c.Assert(p.Path(), qt.Equals, "/a/b/c.md") + c.Assert(p.Dir(), qt.Equals, "/a/b") + c.Assert(p.Container(), qt.Equals, "b") + c.Assert(p.ContainerDir(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "md") + }, + }, + { + "Content resource", + "/a/b.md", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.md") + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b") + c.Assert(p.Section(), qt.Equals, "a") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + + // Reclassify it as a content resource. + ModifyPathBundleTypeResource(p) + c.Assert(p.BundleType(), qt.Equals, PathTypeContentResource) + c.Assert(p.IsContent(), qt.IsTrue) + c.Assert(p.Name(), qt.Equals, "b.md") + c.Assert(p.Base(), qt.Equals, "/a/b.md") + }, + }, + { + "No ext", + "/a/b", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b") + c.Assert(p.NameNoExt(), qt.Equals, "b") + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "") + }, + }, + { + "No ext, trailing slash", + "/a/b/", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b") + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "") + }, + }, + { + "Identifiers", + "/a/b.a.b.no.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Name(), qt.Equals, "b.a.b.no.txt") + c.Assert(p.NameNoIdentifier(), qt.Equals, "b.a.b") + c.Assert(p.NameNoLang(), qt.Equals, "b.a.b.txt") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"}) + c.Assert(p.Base(), qt.Equals, "/a/b.a.b.txt") + c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b.a.b.txt") + c.Assert(p.PathNoLang(), qt.Equals, "/a/b.a.b.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b.a.b") + }, + }, + { + "Home branch cundle", + "/_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Path(), qt.Equals, "/_index.md") + c.Assert(p.Container(), qt.Equals, "") + c.Assert(p.ContainerDir(), qt.Equals, "/") + }, + }, + { + "Index content file in root", + "/a/index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "a") + c.Assert(p.Container(), qt.Equals, "a") + c.Assert(p.Container(), qt.Equals, "a") + c.Assert(p.ContainerDir(), qt.Equals, "") + c.Assert(p.Dir(), qt.Equals, "/a") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md"}) + c.Assert(p.IsBranchBundle(), qt.IsFalse) + c.Assert(p.IsBundle(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsTrue) + c.Assert(p.Lang(), qt.Equals, "") + c.Assert(p.NameNoExt(), qt.Equals, "index") + c.Assert(p.NameNoIdentifier(), qt.Equals, "index") + c.Assert(p.NameNoLang(), qt.Equals, "index.md") + c.Assert(p.Section(), qt.Equals, "") + }, + }, + { + "Index content file with lang", + "/a/b/index.no.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + c.Assert(p.Container(), qt.Equals, "b") + c.Assert(p.ContainerDir(), qt.Equals, "/a") + c.Assert(p.Dir(), qt.Equals, "/a/b") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md", "no"}) + c.Assert(p.IsBranchBundle(), qt.IsFalse) + c.Assert(p.IsBundle(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsTrue) + c.Assert(p.Lang(), qt.Equals, "no") + c.Assert(p.NameNoExt(), qt.Equals, "index.no") + c.Assert(p.NameNoIdentifier(), qt.Equals, "index") + c.Assert(p.NameNoLang(), qt.Equals, "index.md") + c.Assert(p.PathNoLang(), qt.Equals, "/a/b/index.md") + c.Assert(p.Section(), qt.Equals, "a") + }, + }, + { + "Index branch content file", + "/a/b/_index.no.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b") + c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") + c.Assert(p.Container(), qt.Equals, "b") + c.Assert(p.ContainerDir(), qt.Equals, "/a") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md", "no"}) + c.Assert(p.IsBranchBundle(), qt.IsTrue) + c.Assert(p.IsBundle(), qt.IsTrue) + c.Assert(p.IsLeafBundle(), qt.IsFalse) + c.Assert(p.NameNoExt(), qt.Equals, "_index.no") + c.Assert(p.NameNoLang(), qt.Equals, "_index.md") + }, + }, + { + "Index root no slash", + "_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Name(), qt.Equals, "_index.md") + }, + }, + { + "Index root", + "/_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "md") + c.Assert(p.Name(), qt.Equals, "_index.md") + }, + }, + { + "Index first", + "/a/_index.md", + func(c *qt.C, p *Path) { + c.Assert(p.Section(), qt.Equals, "a") + }, + }, + { + "Index text file", + "/a/b/index.no.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b/index.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"}) + c.Assert(p.IsLeafBundle(), qt.IsFalse) + c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b/index") + }, + }, + { + "Empty", + "", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "") + c.Assert(p.Name(), qt.Equals, "") + c.Assert(p.Path(), qt.Equals, "/") + }, + }, + { + "Slash", + "/", + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/") + c.Assert(p.Ext(), qt.Equals, "") + c.Assert(p.Name(), qt.Equals, "") + }, + }, + { + "Trim Leading Slash bundle", + "foo/bar/index.no.md", + func(c *qt.C, p *Path) { + c.Assert(p.Path(), qt.Equals, "/foo/bar/index.no.md") + pp := p.TrimLeadingSlash() + c.Assert(pp.Path(), qt.Equals, "foo/bar/index.no.md") + c.Assert(pp.PathNoLang(), qt.Equals, "foo/bar/index.md") + c.Assert(pp.Base(), qt.Equals, "foo/bar") + c.Assert(pp.Dir(), qt.Equals, "foo/bar") + c.Assert(pp.ContainerDir(), qt.Equals, "foo") + c.Assert(pp.Container(), qt.Equals, "bar") + c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "bar") + }, + }, + { + "Trim Leading Slash file", + "foo/bar.txt", + func(c *qt.C, p *Path) { + c.Assert(p.Path(), qt.Equals, "/foo/bar.txt") + pp := p.TrimLeadingSlash() + c.Assert(pp.Path(), qt.Equals, "foo/bar.txt") + c.Assert(pp.PathNoLang(), qt.Equals, "foo/bar.txt") + c.Assert(pp.Base(), qt.Equals, "foo/bar.txt") + c.Assert(pp.Dir(), qt.Equals, "foo") + c.Assert(pp.ContainerDir(), qt.Equals, "foo") + c.Assert(pp.Container(), qt.Equals, "foo") + c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "bar") + }, + }, + { + "File separator", + filepath.FromSlash("/a/b/c.txt"), + func(c *qt.C, p *Path) { + c.Assert(p.Base(), qt.Equals, "/a/b/c.txt") + c.Assert(p.Ext(), qt.Equals, "txt") + c.Assert(p.Name(), qt.Equals, "c.txt") + c.Assert(p.Path(), qt.Equals, "/a/b/c.txt") + }, + }, + } + for _, test := range tests { + c.Run(test.name, func(c *qt.C) { + test.assert(c, testParser.Parse(files.ComponentFolderContent, test.path)) + }) + } +} + +func TestHasExt(t *testing.T) { + c := qt.New(t) + + c.Assert(HasExt("/a/b/c.txt"), qt.IsTrue) + c.Assert(HasExt("/a/b.c/d.txt"), qt.IsTrue) + c.Assert(HasExt("/a/b/c"), qt.IsFalse) + c.Assert(HasExt("/a/b.c/d"), qt.IsFalse) +} diff --git a/common/paths/paths_integration_test.go b/common/paths/paths_integration_test.go new file mode 100644 index 000000000..62d40f527 --- /dev/null +++ b/common/paths/paths_integration_test.go @@ -0,0 +1,80 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths_test + +import ( + "testing" + + "github.com/gohugoio/hugo/hugolib" +) + +func TestRemovePathAccents(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +removePathAccents = true +-- content/διακριτικός.md -- +-- content/διακριτικός.fr.md -- +-- layouts/_default/single.html -- +{{ .Language.Lang }}|Single. +-- layouts/_default/list.html -- +List +` + b := hugolib.Test(t, files) + + b.AssertFileContent("public/en/διακριτικός/index.html", "en|Single") + b.AssertFileContent("public/fr/διακριτικος/index.html", "fr|Single") +} + +func TestDisablePathToLower(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +disablePathToLower = true +-- content/MySection/MyPage.md -- +-- content/MySection/MyPage.fr.md -- +-- content/MySection/MyBundle/index.md -- +-- content/MySection/MyBundle/index.fr.md -- +-- layouts/_default/single.html -- +{{ .Language.Lang }}|Single. +-- layouts/_default/list.html -- +{{ .Language.Lang }}|List. +` + b := hugolib.Test(t, files) + + b.AssertFileContent("public/en/mysection/index.html", "en|List") + b.AssertFileContent("public/en/mysection/mypage/index.html", "en|Single") + b.AssertFileContent("public/fr/MySection/index.html", "fr|List") + b.AssertFileContent("public/fr/MySection/MyPage/index.html", "fr|Single") + b.AssertFileContent("public/en/mysection/mybundle/index.html", "en|Single") + b.AssertFileContent("public/fr/MySection/MyBundle/index.html", "fr|Single") +} diff --git a/common/paths/pathtype_string.go b/common/paths/pathtype_string.go new file mode 100644 index 000000000..7a99f8a03 --- /dev/null +++ b/common/paths/pathtype_string.go @@ -0,0 +1,27 @@ +// Code generated by "stringer -type=PathType"; DO NOT EDIT. + +package paths + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[PathTypeFile-0] + _ = x[PathTypeContentResource-1] + _ = x[PathTypeContentSingle-2] + _ = x[PathTypeLeaf-3] + _ = x[PathTypeBranch-4] +} + +const _PathType_name = "PathTypeFilePathTypeContentResourcePathTypeContentSinglePathTypeLeafPathTypeBranch" + +var _PathType_index = [...]uint8{0, 12, 35, 56, 68, 82} + +func (i PathType) String() string { + if i < 0 || i >= PathType(len(_PathType_index)-1) { + return "PathType(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _PathType_name[_PathType_index[i]:_PathType_index[i+1]] +} diff --git a/common/paths/url.go b/common/paths/url.go index 093ba9ff7..4c4a7f2dc 100644 --- a/common/paths/url.go +++ b/common/paths/url.go @@ -184,3 +184,13 @@ func UrlToFilename(s string) (string, bool) { return p, true } + +// URLEscape escapes unicode letters. +func URLEscape(uri string) string { + // escape unicode letters + u, err := url.Parse(uri) + if err != nil { + panic(err) + } + return u.String() +} diff --git a/common/predicate/predicate.go b/common/predicate/predicate.go new file mode 100644 index 000000000..f9cb1bb2b --- /dev/null +++ b/common/predicate/predicate.go @@ -0,0 +1,72 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package predicate + +// P is a predicate function that tests whether a value of type T satisfies some condition. +type P[T any] func(T) bool + +// And returns a predicate that is a short-circuiting logical AND of this and the given predicates. +func (p P[T]) And(ps ...P[T]) P[T] { + return func(v T) bool { + for _, pp := range ps { + if !pp(v) { + return false + } + } + return p(v) + } +} + +// Or returns a predicate that is a short-circuiting logical OR of this and the given predicates. +func (p P[T]) Or(ps ...P[T]) P[T] { + return func(v T) bool { + for _, pp := range ps { + if pp(v) { + return true + } + } + return p(v) + } +} + +// Negate returns a predicate that is a logical negation of this predicate. +func (p P[T]) Negate() P[T] { + return func(v T) bool { + return !p(v) + } +} + +// Filter returns a new slice holding only the elements of s that satisfy p. +// Filter modifies the contents of the slice s and returns the modified slice, which may have a smaller length. +func (p P[T]) Filter(s []T) []T { + var n int + for _, v := range s { + if p(v) { + s[n] = v + n++ + } + } + return s[:n] +} + +// FilterCopy returns a new slice holding only the elements of s that satisfy p. +func (p P[T]) FilterCopy(s []T) []T { + var result []T + for _, v := range s { + if p(v) { + result = append(result, v) + } + } + return result +} diff --git a/common/predicate/predicate_test.go b/common/predicate/predicate_test.go new file mode 100644 index 000000000..1e1ec004b --- /dev/null +++ b/common/predicate/predicate_test.go @@ -0,0 +1,83 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package predicate_test + +import ( + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/predicate" +) + +func TestAdd(t *testing.T) { + c := qt.New(t) + + var p predicate.P[int] = intP1 + + c.Assert(p(1), qt.IsTrue) + c.Assert(p(2), qt.IsFalse) + + neg := p.Negate() + c.Assert(neg(1), qt.IsFalse) + c.Assert(neg(2), qt.IsTrue) + + and := p.And(intP2) + c.Assert(and(1), qt.IsFalse) + c.Assert(and(2), qt.IsFalse) + c.Assert(and(10), qt.IsTrue) + + or := p.Or(intP2) + c.Assert(or(1), qt.IsTrue) + c.Assert(or(2), qt.IsTrue) + c.Assert(or(10), qt.IsTrue) + c.Assert(or(11), qt.IsFalse) +} + +func TestFilter(t *testing.T) { + c := qt.New(t) + + var p predicate.P[int] = intP1 + p = p.Or(intP2) + + ints := []int{1, 2, 3, 4, 1, 6, 7, 8, 2} + + c.Assert(p.Filter(ints), qt.DeepEquals, []int{1, 2, 1, 2}) + c.Assert(ints, qt.DeepEquals, []int{1, 2, 1, 2, 1, 6, 7, 8, 2}) +} + +func TestFilterCopy(t *testing.T) { + c := qt.New(t) + + var p predicate.P[int] = intP1 + p = p.Or(intP2) + + ints := []int{1, 2, 3, 4, 1, 6, 7, 8, 2} + + c.Assert(p.FilterCopy(ints), qt.DeepEquals, []int{1, 2, 1, 2}) + c.Assert(ints, qt.DeepEquals, []int{1, 2, 3, 4, 1, 6, 7, 8, 2}) +} + +var intP1 = func(i int) bool { + if i == 10 { + return true + } + return i == 1 +} + +var intP2 = func(i int) bool { + if i == 10 { + return true + } + return i == 2 +} diff --git a/common/rungroup/rungroup.go b/common/rungroup/rungroup.go new file mode 100644 index 000000000..96ec57883 --- /dev/null +++ b/common/rungroup/rungroup.go @@ -0,0 +1,93 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rungroup + +import ( + "context" + + "golang.org/x/sync/errgroup" +) + +// Group is a group of workers that can be used to enqueue work and wait for +// them to finish. +type Group[T any] interface { + Enqueue(T) error + Wait() error +} + +type runGroup[T any] struct { + ctx context.Context + g *errgroup.Group + ch chan T +} + +// Config is the configuration for a new Group. +type Config[T any] struct { + NumWorkers int + Handle func(context.Context, T) error +} + +// Run creates a new Group with the given configuration. +func Run[T any](ctx context.Context, cfg Config[T]) Group[T] { + if cfg.NumWorkers <= 0 { + cfg.NumWorkers = 1 + } + if cfg.Handle == nil { + panic("Handle must be set") + } + + g, ctx := errgroup.WithContext(ctx) + // Buffered for performance. + ch := make(chan T, cfg.NumWorkers) + + for i := 0; i < cfg.NumWorkers; i++ { + g.Go(func() error { + for { + select { + case <-ctx.Done(): + return nil + case v, ok := <-ch: + if !ok { + return nil + } + if err := cfg.Handle(ctx, v); err != nil { + return err + } + } + } + }) + } + + return &runGroup[T]{ + ctx: ctx, + g: g, + ch: ch, + } +} + +// Enqueue enqueues a new item to be handled by the workers. +func (r *runGroup[T]) Enqueue(t T) error { + select { + case <-r.ctx.Done(): + return nil + case r.ch <- t: + } + return nil +} + +// Wait waits for all workers to finish and returns the first error. +func (r *runGroup[T]) Wait() error { + close(r.ch) + return r.g.Wait() +} diff --git a/common/rungroup/rungroup_test.go b/common/rungroup/rungroup_test.go new file mode 100644 index 000000000..ac902079e --- /dev/null +++ b/common/rungroup/rungroup_test.go @@ -0,0 +1,44 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rungroup + +import ( + "context" + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestNew(t *testing.T) { + c := qt.New(t) + + var result int + adder := func(ctx context.Context, i int) error { + result += i + return nil + } + + g := Run[int]( + context.Background(), + Config[int]{ + Handle: adder, + }, + ) + + c.Assert(g, qt.IsNotNil) + g.Enqueue(32) + g.Enqueue(33) + c.Assert(g.Wait(), qt.IsNil) + c.Assert(result, qt.Equals, 65) +} diff --git a/common/terminal/colors.go b/common/terminal/colors.go index c4a78291e..8aa0e1af2 100644 --- a/common/terminal/colors.go +++ b/common/terminal/colors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/css/csstypes.go b/common/types/css/csstypes.go index a31df00e7..061acfe64 100644 --- a/common/types/css/csstypes.go +++ b/common/types/css/csstypes.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/evictingqueue.go b/common/types/evictingqueue.go index 884762426..88add59d5 100644 --- a/common/types/evictingqueue.go +++ b/common/types/evictingqueue.go @@ -35,11 +35,11 @@ func NewEvictingStringQueue(size int) *EvictingStringQueue { } // Add adds a new string to the tail of the queue if it's not already there. -func (q *EvictingStringQueue) Add(v string) { +func (q *EvictingStringQueue) Add(v string) *EvictingStringQueue { q.mu.Lock() if q.set[v] { q.mu.Unlock() - return + return q } if len(q.set) == q.size { @@ -50,6 +50,17 @@ func (q *EvictingStringQueue) Add(v string) { q.set[v] = true q.vals = append(q.vals, v) q.mu.Unlock() + + return q +} + +func (q *EvictingStringQueue) Len() int { + if q == nil { + return 0 + } + q.mu.Lock() + defer q.mu.Unlock() + return len(q.vals) } // Contains returns whether the queue contains v. diff --git a/common/types/hstring/stringtypes.go b/common/types/hstring/stringtypes.go index 601218e0e..5e8e3a23d 100644 --- a/common/types/hstring/stringtypes.go +++ b/common/types/hstring/stringtypes.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/hstring/stringtypes_test.go b/common/types/hstring/stringtypes_test.go index 8fa1c9760..2f1f865c8 100644 --- a/common/types/hstring/stringtypes_test.go +++ b/common/types/hstring/stringtypes_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/common/types/types.go b/common/types/types.go index c36c51b3e..11683c196 100644 --- a/common/types/types.go +++ b/common/types/types.go @@ -92,5 +92,18 @@ type DevMarker interface { DevOnly() } +// Unwrapper is implemented by types that can unwrap themselves. +type Unwrapper interface { + // Unwrapv is for internal use only. + // It got its slightly odd name to prevent collisions with user types. + Unwrapv() any +} + +// LowHigh is typically used to represent a slice boundary. +type LowHigh struct { + Low int + High int +} + // This is only used for debugging purposes. var InvocationCounter atomic.Int64 diff --git a/common/urls/baseURL.go b/common/urls/baseURL.go index df26730ec..2958a2a04 100644 --- a/common/urls/baseURL.go +++ b/common/urls/baseURL.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,10 +23,12 @@ import ( // A BaseURL in Hugo is normally on the form scheme://path, but the // form scheme: is also valid (mailto:[email protected]). type BaseURL struct { - url *url.URL - WithPath string - WithoutPath string - BasePath string + url *url.URL + WithPath string + WithPathNoTrailingSlash string + WithoutPath string + BasePath string + BasePathNoTrailingSlash string } func (b BaseURL) String() string { @@ -92,19 +94,19 @@ func NewBaseURLFromString(b string) (BaseURL, error) { return BaseURL{}, err } return newBaseURLFromURL(u) - } func newBaseURLFromURL(u *url.URL) (BaseURL, error) { - baseURL := BaseURL{url: u, WithPath: u.String()} - var baseURLNoPath = baseURL.URL() + // A baseURL should always have a trailing slash, see #11669. + if !strings.HasSuffix(u.Path, "/") { + u.Path += "/" + } + baseURL := BaseURL{url: u, WithPath: u.String(), WithPathNoTrailingSlash: strings.TrimSuffix(u.String(), "/")} + baseURLNoPath := baseURL.URL() baseURLNoPath.Path = "" baseURL.WithoutPath = baseURLNoPath.String() - - basePath := u.Path - if basePath != "" && basePath != "/" { - baseURL.BasePath = basePath - } + baseURL.BasePath = u.Path + baseURL.BasePathNoTrailingSlash = strings.TrimSuffix(u.Path, "/") return baseURL, nil } diff --git a/common/urls/baseURL_test.go b/common/urls/baseURL_test.go index 95dc73339..ba337aac8 100644 --- a/common/urls/baseURL_test.go +++ b/common/urls/baseURL_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,17 +21,24 @@ import ( func TestBaseURL(t *testing.T) { c := qt.New(t) - b, err := NewBaseURLFromString("http://example.com") + + b, err := NewBaseURLFromString("http://example.com/") + c.Assert(err, qt.IsNil) + c.Assert(b.String(), qt.Equals, "http://example.com/") + + b, err = NewBaseURLFromString("http://example.com") c.Assert(err, qt.IsNil) - c.Assert(b.String(), qt.Equals, "http://example.com") + c.Assert(b.String(), qt.Equals, "http://example.com/") + c.Assert(b.WithPathNoTrailingSlash, qt.Equals, "http://example.com") + c.Assert(b.BasePath, qt.Equals, "/") p, err := b.WithProtocol("webcal://") c.Assert(err, qt.IsNil) - c.Assert(p.String(), qt.Equals, "webcal://example.com") + c.Assert(p.String(), qt.Equals, "webcal://example.com/") p, err = b.WithProtocol("webcal") c.Assert(err, qt.IsNil) - c.Assert(p.String(), qt.Equals, "webcal://example.com") + c.Assert(p.String(), qt.Equals, "webcal://example.com/") _, err = b.WithProtocol("mailto:") c.Assert(err, qt.Not(qt.IsNil)) @@ -57,11 +64,18 @@ func TestBaseURL(t *testing.T) { b, err = NewBaseURLFromString("") c.Assert(err, qt.IsNil) - c.Assert(b.String(), qt.Equals, "") + c.Assert(b.String(), qt.Equals, "/") // BaseURL with sub path b, err = NewBaseURLFromString("http://example.com/sub") c.Assert(err, qt.IsNil) - c.Assert(b.String(), qt.Equals, "http://example.com/sub") + c.Assert(b.String(), qt.Equals, "http://example.com/sub/") + c.Assert(b.WithPathNoTrailingSlash, qt.Equals, "http://example.com/sub") + c.Assert(b.BasePath, qt.Equals, "/sub/") + c.Assert(b.BasePathNoTrailingSlash, qt.Equals, "/sub") + + b, err = NewBaseURLFromString("http://example.com/sub/") + c.Assert(err, qt.IsNil) + c.Assert(b.String(), qt.Equals, "http://example.com/sub/") c.Assert(b.HostURL(), qt.Equals, "http://example.com") } diff --git a/compare/compare.go b/compare/compare.go index 67bb1c125..fd15bd087 100644 --- a/compare/compare.go +++ b/compare/compare.go @@ -52,3 +52,16 @@ func Eq(v1, v2 any) bool { return v1 == v2 } + +// ProbablyEq returns whether v1 is probably equal to v2. +func ProbablyEq(v1, v2 any) bool { + if Eq(v1, v2) { + return true + } + + if peqer, ok := v1.(ProbablyEqer); ok { + return peqer.ProbablyEq(v2) + } + + return false +} diff --git a/config/allconfig/allconfig.go b/config/allconfig/allconfig.go index 9f0d73ecd..5788e792b 100644 --- a/config/allconfig/allconfig.go +++ b/config/allconfig/allconfig.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/privacy" @@ -283,12 +284,13 @@ func (c *Config) CompileConfig(logger loggers.Logger) error { disabledLangs := make(map[string]bool) for _, lang := range c.DisableLanguages { - if lang == c.DefaultContentLanguage { - return fmt.Errorf("cannot disable default content language %q", lang) - } disabledLangs[lang] = true } for lang, language := range c.Languages { + if !language.Disabled && disabledLangs[lang] { + language.Disabled = true + c.Languages[lang] = language + } if language.Disabled { disabledLangs[lang] = true if lang == c.DefaultContentLanguage { @@ -408,15 +410,19 @@ type ConfigCompiled struct { } // This may be set after the config is compiled. -func (c *ConfigCompiled) SetMainSectionsIfNotSet(sections []string) { +func (c *ConfigCompiled) SetMainSections(sections []string) { c.mu.Lock() defer c.mu.Unlock() - if c.MainSections != nil { - return - } c.MainSections = sections } +// IsMainSectionsSet returns whether the main sections have been set. +func (c *ConfigCompiled) IsMainSectionsSet() bool { + c.mu.Lock() + defer c.mu.Unlock() + return c.MainSections != nil +} + // This is set after the config is compiled by the server command. func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) { c.BaseURL = baseURL @@ -425,7 +431,6 @@ func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) { // RootConfig holds all the top-level configuration options in Hugo type RootConfig struct { - // The base URL of the site. // Note that the default value is empty, but Hugo requires a valid URL (e.g. "https://example.com/") to work properly. // <docsmeta>{"identifiers": ["URL"] }</docsmeta> @@ -648,13 +653,16 @@ type Configs struct { LanguageConfigMap map[string]*Config LanguageConfigSlice []*Config - IsMultihost bool - Languages langs.Languages - LanguagesDefaultFirst langs.Languages + IsMultihost bool Modules modules.Modules ModulesClient *modules.Client + // All below is set in Init. + Languages langs.Languages + LanguagesDefaultFirst langs.Languages + ContentPathParser paths.PathParser + configLangs []config.AllProvider } @@ -674,6 +682,58 @@ func (c *Configs) IsZero() bool { } func (c *Configs) Init() error { + var languages langs.Languages + defaultContentLanguage := c.Base.DefaultContentLanguage + for k, v := range c.LanguageConfigMap { + c.LanguageConfigSlice = append(c.LanguageConfigSlice, v) + languageConf := v.Languages[k] + language, err := langs.NewLanguage(k, defaultContentLanguage, v.TimeZone, languageConf) + if err != nil { + return err + } + languages = append(languages, language) + } + + // Sort the sites by language weight (if set) or lang. + sort.Slice(languages, func(i, j int) bool { + li := languages[i] + lj := languages[j] + if li.Weight != lj.Weight { + return li.Weight < lj.Weight + } + return li.Lang < lj.Lang + }) + + for _, l := range languages { + c.LanguageConfigSlice = append(c.LanguageConfigSlice, c.LanguageConfigMap[l.Lang]) + } + + // Filter out disabled languages. + var n int + for _, l := range languages { + if !l.Disabled { + languages[n] = l + n++ + } + } + languages = languages[:n] + + var languagesDefaultFirst langs.Languages + for _, l := range languages { + if l.Lang == defaultContentLanguage { + languagesDefaultFirst = append(languagesDefaultFirst, l) + } + } + for _, l := range languages { + if l.Lang != defaultContentLanguage { + languagesDefaultFirst = append(languagesDefaultFirst, l) + } + } + + c.Languages = languages + c.LanguagesDefaultFirst = languagesDefaultFirst + c.ContentPathParser = paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet()} + c.configLangs = make([]config.AllProvider, len(c.Languages)) for i, l := range c.LanguagesDefaultFirst { c.configLangs[i] = ConfigLanguage{ @@ -751,7 +811,6 @@ func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadCon } langConfigMap := make(map[string]*Config) - var langConfigs []*Config languagesConfig := cfg.GetStringMap("languages") var isMultiHost bool @@ -848,65 +907,24 @@ func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadCon } } - var languages langs.Languages - defaultContentLanguage := all.DefaultContentLanguage - for k, v := range langConfigMap { - languageConf := v.Languages[k] - language, err := langs.NewLanguage(k, defaultContentLanguage, v.TimeZone, languageConf) - if err != nil { - return nil, err - } - languages = append(languages, language) - } - - // Sort the sites by language weight (if set) or lang. - sort.Slice(languages, func(i, j int) bool { - li := languages[i] - lj := languages[j] - if li.Weight != lj.Weight { - return li.Weight < lj.Weight - } - return li.Lang < lj.Lang - }) - - for _, l := range languages { - langConfigs = append(langConfigs, langConfigMap[l.Lang]) - } - - var languagesDefaultFirst langs.Languages - for _, l := range languages { - if l.Lang == defaultContentLanguage { - languagesDefaultFirst = append(languagesDefaultFirst, l) - } - } - for _, l := range languages { - if l.Lang != defaultContentLanguage { - languagesDefaultFirst = append(languagesDefaultFirst, l) - } - } - bcfg.PublishDir = all.PublishDir res.BaseConfig = bcfg all.CommonDirs.CacheDir = bcfg.CacheDir - for _, l := range langConfigs { + for _, l := range langConfigMap { l.CommonDirs.CacheDir = bcfg.CacheDir } cm := &Configs{ - Base: all, - LanguageConfigMap: langConfigMap, - LanguageConfigSlice: langConfigs, - LoadingInfo: res, - IsMultihost: isMultiHost, - Languages: languages, - LanguagesDefaultFirst: languagesDefaultFirst, + Base: all, + LanguageConfigMap: langConfigMap, + LoadingInfo: res, + IsMultihost: isMultiHost, } return cm, nil } func decodeConfigFromParams(fs afero.Fs, logger loggers.Logger, bcfg config.BaseConfig, p config.Provider, target *Config, keys []string) error { - var decoderSetups []decodeWeight if len(keys) == 0 { diff --git a/config/allconfig/alldecoders.go b/config/allconfig/alldecoders.go index dc58882f3..f96c19cfc 100644 --- a/config/allconfig/alldecoders.go +++ b/config/allconfig/alldecoders.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/config/allconfig/configlanguage.go b/config/allconfig/configlanguage.go index 2c5a116f4..71bd232de 100644 --- a/config/allconfig/configlanguage.go +++ b/config/allconfig/configlanguage.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ package allconfig import ( "time" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/langs" @@ -41,10 +42,15 @@ func (c ConfigLanguage) LanguagesDefaultFirst() langs.Languages { return c.m.LanguagesDefaultFirst } +func (c ConfigLanguage) PathParser() paths.PathParser { + return c.m.ContentPathParser +} + func (c ConfigLanguage) LanguagePrefix() string { if c.DefaultContentLanguageInSubdir() && c.DefaultContentLanguage() == c.Language().Lang { return c.Language().Lang } + if !c.IsMultiLingual() || c.DefaultContentLanguage() == c.Language().Lang { return "" } @@ -119,6 +125,10 @@ func (c ConfigLanguage) Quiet() bool { return c.m.Base.Internal.Quiet } +func (c ConfigLanguage) Watching() bool { + return c.m.Base.Internal.Watch +} + // GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use. func (c ConfigLanguage) GetConfigSection(s string) any { switch s { diff --git a/config/allconfig/docshelper.go b/config/allconfig/docshelper.go index 48a09de51..1a5fb6153 100644 --- a/config/allconfig/docshelper.go +++ b/config/allconfig/docshelper.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -22,7 +22,6 @@ import ( // This is is just some helpers used to create some JSON used in the Hugo docs. func init() { docsProvider := func() docshelper.DocProvider { - cfg := config.New() for configRoot, v := range allDecoderSetups { if v.internalOrDeprecated { diff --git a/config/allconfig/integration_test.go b/config/allconfig/integration_test.go index fcb92e71d..4f2f1a06e 100644 --- a/config/allconfig/integration_test.go +++ b/config/allconfig/integration_test.go @@ -10,7 +10,6 @@ import ( ) func TestDirsMount(t *testing.T) { - files := ` -- hugo.toml -- baseURL = "https://example.com" @@ -44,7 +43,7 @@ Title: {{ .Title }} hugolib.IntegrationTestConfig{T: t, TxtarString: files}, ).Build() - //b.AssertFileContent("public/p1/index.html", "Title: p1") + // b.AssertFileContent("public/p1/index.html", "Title: p1") sites := b.H.Sites b.Assert(len(sites), qt.Equals, 2) @@ -58,7 +57,7 @@ Title: {{ .Title }} enConcp := sites[0].Conf enConf := enConcp.GetConfig().(*allconfig.Config) - b.Assert(enConcp.BaseURL().String(), qt.Equals, "https://example.com") + b.Assert(enConcp.BaseURL().String(), qt.Equals, "https://example.com/") modConf := enConf.Module b.Assert(modConf.Mounts, qt.HasLen, 8) b.Assert(modConf.Mounts[0].Source, qt.Equals, filepath.FromSlash("content/en")) @@ -67,11 +66,9 @@ Title: {{ .Title }} b.Assert(modConf.Mounts[1].Source, qt.Equals, filepath.FromSlash("content/sv")) b.Assert(modConf.Mounts[1].Target, qt.Equals, "content") b.Assert(modConf.Mounts[1].Lang, qt.Equals, "sv") - } func TestConfigAliases(t *testing.T) { - files := ` -- hugo.toml -- baseURL = "https://example.com" diff --git a/config/allconfig/load.go b/config/allconfig/load.go index 7d706c7e3..eceed31f4 100644 --- a/config/allconfig/load.go +++ b/config/allconfig/load.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ import ( "github.com/spf13/afero" ) +//lint:ignore ST1005 end user message. var ErrNoConfigFile = errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\n Run `hugo help new` for details.\n") func LoadConfig(d ConfigSourceDescriptor) (*Configs, error) { @@ -566,15 +567,6 @@ func (l configLoader) deleteMergeStrategies() { }) } -func (l configLoader) loadModulesConfig() (modules.Config, error) { - modConfig, err := modules.DecodeConfig(l.cfg) - if err != nil { - return modules.Config{}, err - } - - return modConfig, nil -} - func (l configLoader) wrapFileError(err error, filename string) error { fe := herrors.UnwrapFileError(err) if fe != nil { diff --git a/config/commonConfig.go b/config/commonConfig.go index ef9d47553..6ca061093 100644 --- a/config/commonConfig.go +++ b/config/commonConfig.go @@ -86,28 +86,21 @@ var defaultBuild = BuildConfig{ CacheBusters: []CacheBuster{ { - Source: `assets/.*\.(js|ts|jsx|tsx)`, - Target: `(js|scripts|javascript)`, - }, - { - Source: `assets/.*\.(css|sass|scss)$`, - Target: cssTargetCachebusterRe, - }, - { Source: `(postcss|tailwind)\.config\.js`, Target: cssTargetCachebusterRe, }, - // This is deliberately coarse grained; it will cache bust resources with "json" in the cache key when js files changes, which is good. - { - Source: `assets/.*\.(.*)$`, - Target: `$1`, - }, }, } // BuildConfig holds some build related configuration. type BuildConfig struct { - UseResourceCacheWhen string // never, fallback, always. Default is fallback + // When to use the resource file cache. + // One of never, fallback, always. Default is fallback + UseResourceCacheWhen string + + // When enabled, will duplicate bundled resource files across languages that + // doesn't have a translated version. + DuplicateResourceFiles bool // When enabled, will collect and write a hugo_stats.json with some build // related aggregated data (e.g. CSS class names). @@ -373,7 +366,6 @@ func (c *CacheBuster) CompileConfig(logger loggers.Logger) error { return match } - } return compileErr } @@ -416,7 +408,6 @@ func DecodeServer(cfg Provider) (Server, error) { Status: 404, }, } - } return *s, nil diff --git a/config/commonConfig_test.go b/config/commonConfig_test.go index 8aa1318dd..425d3e970 100644 --- a/config/commonConfig_test.go +++ b/config/commonConfig_test.go @@ -148,21 +148,13 @@ func TestBuildConfigCacheBusters(t *testing.T) { l := loggers.NewDefault() c.Assert(conf.CompileConfig(l), qt.IsNil) - m, err := conf.MatchCacheBuster(l, "assets/foo/main.js") - c.Assert(err, qt.IsNil) + m, _ := conf.MatchCacheBuster(l, "tailwind.config.js") c.Assert(m, qt.IsNotNil) - c.Assert(m("scripts"), qt.IsTrue) - c.Assert(m("asdf"), qt.IsFalse) - - m, _ = conf.MatchCacheBuster(l, "tailwind.config.js") c.Assert(m("css"), qt.IsTrue) c.Assert(m("js"), qt.IsFalse) - m, err = conf.MatchCacheBuster(l, "assets/foo.json") - c.Assert(err, qt.IsNil) - c.Assert(m, qt.IsNotNil) - c.Assert(m("json"), qt.IsTrue) - + m, _ = conf.MatchCacheBuster(l, "foo.bar") + c.Assert(m, qt.IsNil) } func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) { diff --git a/config/configProvider.go b/config/configProvider.go index 11099e407..2536639ea 100644 --- a/config/configProvider.go +++ b/config/configProvider.go @@ -17,6 +17,7 @@ import ( "time" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/langs" @@ -30,6 +31,7 @@ type AllProvider interface { LanguagePrefix() string BaseURL() urls.BaseURL BaseURLLiveReload() urls.BaseURL + PathParser() paths.PathParser Environment() string IsMultihost() bool IsMultiLingual() bool @@ -54,6 +56,7 @@ type AllProvider interface { BuildFuture() bool BuildDrafts() bool Running() bool + Watching() bool PrintUnusedTemplates() bool EnableMissingTranslationPlaceholders() bool TemplateMetrics() bool diff --git a/config/env.go b/config/env.go index 1e9266b17..0ad5ecaea 100644 --- a/config/env.go +++ b/config/env.go @@ -18,6 +18,12 @@ import ( "runtime" "strconv" "strings" + + "github.com/pbnjay/memory" +) + +const ( + gigabyte = 1 << 30 ) // GetNumWorkerMultiplier returns the base value used to calculate the number @@ -33,6 +39,37 @@ func GetNumWorkerMultiplier() int { return runtime.NumCPU() } +// GetMemoryLimit returns the upper memory limit in bytes for Hugo's in-memory caches. +// Note that this does not represent "all of the memory" that Hugo will use, +// so it needs to be set to a lower number than the available system memory. +// It will read from the HUGO_MEMORYLIMIT (in Gigabytes) environment variable. +// If that is not set, it will set aside a quarter of the total system memory. +func GetMemoryLimit() uint64 { + if mem := os.Getenv("HUGO_MEMORYLIMIT"); mem != "" { + if v := stringToGibabyte(mem); v > 0 { + return v + } + + } + + // There is a FreeMemory function, but as the kernel in most situations + // will take whatever memory that is left and use for caching etc., + // that value is not something that we can use. + m := memory.TotalMemory() + if m != 0 { + return uint64(m / 4) + } + + return 2 * gigabyte +} + +func stringToGibabyte(f string) uint64 { + if v, err := strconv.ParseFloat(f, 32); err == nil && v > 0 { + return uint64(v * gigabyte) + } + return 0 +} + // SetEnvVars sets vars on the form key=value in the oldVars slice. func SetEnvVars(oldVars *[]string, keyValues ...string) { for i := 0; i < len(keyValues); i += 2 { diff --git a/config/namespace.go b/config/namespace.go index 3ecd01014..b518c6c01 100644 --- a/config/namespace.go +++ b/config/namespace.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,7 +20,6 @@ import ( ) func DecodeNamespace[S, C any](configSource any, buildConfig func(any) (C, any, error)) (*ConfigNamespace[S, C], error) { - // Calculate the hash of the input (not including any defaults applied later). // This allows us to introduce new config options without breaking the hash. h := identity.HashString(configSource) diff --git a/config/namespace_test.go b/config/namespace_test.go index 008237c13..9bd23e08e 100644 --- a/config/namespace_test.go +++ b/config/namespace_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26,7 +26,7 @@ func TestNamespace(t *testing.T) { c := qt.New(t) c.Assert(true, qt.Equals, true) - //ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) + // ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) ns, err := DecodeNamespace[[]*tstNsExt]( map[string]interface{}{"foo": "bar"}, @@ -46,23 +46,15 @@ func TestNamespace(t *testing.T) { c.Assert(ns.SourceHash, qt.Equals, "14368731254619220105") c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"}) c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil)) - } type ( tstNsExt struct { Foo string } - tstNsInt struct { - Foo string - } ) func (t *tstNsExt) Init() error { t.Foo = strings.ToUpper(t.Foo) return nil } -func (t *tstNsInt) Compile(ext *tstNsExt) error { - t.Foo = ext.Foo + " qux" - return nil -} diff --git a/config/testconfig/testconfig.go b/config/testconfig/testconfig.go index 4aafd69f0..8f70e6cb7 100644 --- a/config/testconfig/testconfig.go +++ b/config/testconfig/testconfig.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -36,7 +36,7 @@ func GetTestConfigs(fs afero.Fs, cfg config.Provider) *allconfig.Configs { // Make sure that the workingDir exists. workingDir := cfg.GetString("workingDir") if workingDir != "" { - if err := fs.MkdirAll(workingDir, 0777); err != nil { + if err := fs.MkdirAll(workingDir, 0o777); err != nil { panic(err) } } @@ -46,7 +46,6 @@ func GetTestConfigs(fs afero.Fs, cfg config.Provider) *allconfig.Configs { panic(err) } return configs - } func GetTestConfig(fs afero.Fs, cfg config.Provider) config.AllProvider { diff --git a/create/content.go b/create/content.go index 10442c396..5c2327532 100644 --- a/create/content.go +++ b/create/content.go @@ -16,6 +16,7 @@ package create import ( "bytes" + "errors" "fmt" "io" "os" @@ -25,10 +26,9 @@ import ( "github.com/gohugoio/hugo/hugofs/glob" "github.com/gohugoio/hugo/common/hexec" + "github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/paths" - "errors" - "github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs" @@ -53,7 +53,7 @@ draft: true // NewContent creates a new content file in h (or a full bundle if the archetype is a directory) // in targetPath. func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error { - if h.BaseFs.Content.Dirs == nil { + if _, err := h.BaseFs.Content.Fs.Stat(""); err != nil { return errors.New("no existing content directory configured for this project") } @@ -103,7 +103,6 @@ func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error } return b.buildFile() - } filename, err := withBuildLock() @@ -116,7 +115,6 @@ func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error } return nil - } type contentBuilder struct { @@ -128,12 +126,12 @@ type contentBuilder struct { cf hugolib.ContentFactory // Builder state - archetypeFilename string - targetPath string - kind string - isDir bool - dirMap archetypeMap - force bool + archetypeFi hugofs.FileMetaInfo + targetPath string + kind string + isDir bool + dirMap archetypeMap + force bool } func (b *contentBuilder) buildDir() error { @@ -146,7 +144,10 @@ func (b *contentBuilder) buildDir() error { var baseDir string for _, fi := range b.dirMap.contentFiles { - targetFilename := filepath.Join(b.targetPath, strings.TrimPrefix(fi.Meta().Path, b.archetypeFilename)) + + targetFilename := filepath.Join(b.targetPath, strings.TrimPrefix(fi.Meta().PathInfo.Path(), b.archetypeFi.Meta().PathInfo.Path())) + + // ===> post/my-post/pages/bio.md abs, err := b.cf.CreateContentPlaceHolder(targetFilename, b.force) if err != nil { return err @@ -170,7 +171,6 @@ func (b *contentBuilder) buildDir() error { } return false }) - } if err := b.h.Build(hugolib.BuildCfg{NoBuildLock: true, SkipRender: true, ContentInclusionFilter: contentInclusionFilter}); err != nil { @@ -178,22 +178,20 @@ func (b *contentBuilder) buildDir() error { } for i, filename := range contentTargetFilenames { - if err := b.applyArcheType(filename, b.dirMap.contentFiles[i].Meta().Path); err != nil { + if err := b.applyArcheType(filename, b.dirMap.contentFiles[i]); err != nil { return err } } // Copy the rest as is. - for _, f := range b.dirMap.otherFiles { - meta := f.Meta() - filename := meta.Path + for _, fi := range b.dirMap.otherFiles { + meta := fi.Meta() in, err := meta.Open() if err != nil { return fmt.Errorf("failed to open non-content file: %w", err) } - - targetFilename := filepath.Join(baseDir, b.targetPath, strings.TrimPrefix(filename, b.archetypeFilename)) + targetFilename := filepath.Join(baseDir, b.targetPath, strings.TrimPrefix(fi.Meta().Filename, b.archetypeFi.Meta().Filename)) targetDir := filepath.Dir(targetFilename) if err := b.sourceFs.MkdirAll(targetDir, 0o777); err != nil && !os.IsExist(err) { @@ -225,7 +223,7 @@ func (b *contentBuilder) buildFile() (string, error) { return "", err } - usesSite, err := b.usesSiteVar(b.archetypeFilename) + usesSite, err := b.usesSiteVar(b.archetypeFi) if err != nil { return "", err } @@ -243,7 +241,7 @@ func (b *contentBuilder) buildFile() (string, error) { return "", err } - if err := b.applyArcheType(contentPlaceholderAbsFilename, b.archetypeFilename); err != nil { + if err := b.applyArcheType(contentPlaceholderAbsFilename, b.archetypeFi); err != nil { return "", err } @@ -264,15 +262,14 @@ func (b *contentBuilder) setArcheTypeFilenameToUse(ext string) { for _, p := range pathsToCheck { fi, err := b.archeTypeFs.Stat(p) if err == nil { - b.archetypeFilename = p + b.archetypeFi = fi.(hugofs.FileMetaInfo) b.isDir = fi.IsDir() return } } - } -func (b *contentBuilder) applyArcheType(contentFilename, archetypeFilename string) error { +func (b *contentBuilder) applyArcheType(contentFilename string, archetypeFi hugofs.FileMetaInfo) error { p := b.h.GetContentPage(contentFilename) if p == nil { panic(fmt.Sprintf("[BUG] no Page found for %q", contentFilename)) @@ -284,32 +281,39 @@ func (b *contentBuilder) applyArcheType(contentFilename, archetypeFilename strin } defer f.Close() - if archetypeFilename == "" { + if archetypeFi == nil { return b.cf.ApplyArchetypeTemplate(f, p, b.kind, DefaultArchetypeTemplateTemplate) } - return b.cf.ApplyArchetypeFilename(f, p, b.kind, archetypeFilename) - + return b.cf.ApplyArchetypeFi(f, p, b.kind, archetypeFi) } func (b *contentBuilder) mapArcheTypeDir() error { var m archetypeMap - walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } + seen := map[hstrings.Tuple]bool{} - if fi.IsDir() { + walkFn := func(path string, fim hugofs.FileMetaInfo) error { + if fim.IsDir() { return nil } - fil := fi.(hugofs.FileMetaInfo) + pi := fim.Meta().PathInfo - if files.IsContentFile(path) { - m.contentFiles = append(m.contentFiles, fil) + if pi.IsContent() { + pathLang := hstrings.Tuple{First: pi.PathNoIdentifier(), Second: fim.Meta().Lang} + if seen[pathLang] { + // Duplicate content file, e.g. page.md and page.html. + // In the regular build, we will filter out the duplicates, but + // for archetype folders these are ambiguous and we need to + // fail. + return fmt.Errorf("duplicate content file found in archetype folder: %q; having both e.g. %s.md and %s.html is ambigous", path, pi.BaseNameNoIdentifier(), pi.BaseNameNoIdentifier()) + } + seen[pathLang] = true + m.contentFiles = append(m.contentFiles, fim) if !m.siteUsed { - m.siteUsed, err = b.usesSiteVar(path) + var err error + m.siteUsed, err = b.usesSiteVar(fim) if err != nil { return err } @@ -317,7 +321,7 @@ func (b *contentBuilder) mapArcheTypeDir() error { return nil } - m.otherFiles = append(m.otherFiles, fil) + m.otherFiles = append(m.otherFiles, fim) return nil } @@ -325,13 +329,13 @@ func (b *contentBuilder) mapArcheTypeDir() error { walkCfg := hugofs.WalkwayConfig{ WalkFn: walkFn, Fs: b.archeTypeFs, - Root: b.archetypeFilename, + Root: filepath.FromSlash(b.archetypeFi.Meta().PathInfo.Path()), } w := hugofs.NewWalkway(walkCfg) if err := w.Walk(); err != nil { - return fmt.Errorf("failed to walk archetype dir %q: %w", b.archetypeFilename, err) + return fmt.Errorf("failed to walk archetype dir %q: %w", b.archetypeFi.Meta().Filename, err) } b.dirMap = m @@ -370,17 +374,21 @@ func (b *contentBuilder) openInEditorIfConfigured(filename string) error { return cmd.Run() } -func (b *contentBuilder) usesSiteVar(filename string) (bool, error) { - if filename == "" { +func (b *contentBuilder) usesSiteVar(fi hugofs.FileMetaInfo) (bool, error) { + if fi == nil { return false, nil } - bb, err := afero.ReadFile(b.archeTypeFs, filename) + f, err := fi.Meta().Open() if err != nil { - return false, fmt.Errorf("failed to open archetype file: %w", err) + return false, err + } + defer f.Close() + bb, err := io.ReadAll(f) + if err != nil { + return false, fmt.Errorf("failed to read archetype file: %w", err) } return bytes.Contains(bb, []byte(".Site")) || bytes.Contains(bb, []byte("site.")), nil - } type archetypeMap struct { diff --git a/create/content_test.go b/create/content_test.go index 77c6ca6c9..63045cbea 100644 --- a/create/content_test.go +++ b/create/content_test.go @@ -114,58 +114,6 @@ func TestNewContentFromFile(t *testing.T) { } } -func TestNewContentFromDir(t *testing.T) { - mm := afero.NewMemMapFs() - c := qt.New(t) - - archetypeDir := filepath.Join("archetypes", "my-bundle") - c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil) - - archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle") - c.Assert(mm.MkdirAll(archetypeThemeDir, 0o755), qt.IsNil) - - contentFile := ` -File: %s -Site Lang: {{ .Site.Language.Lang }} -Name: {{ replace .Name "-" " " | title }} -i18n: {{ T "hugo" }} -` - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(initFs(mm), qt.IsNil) - cfg, fs := newTestCfg(c, mm) - - conf := testconfig.GetTestConfigs(fs.Source, cfg) - h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs}) - c.Assert(err, qt.IsNil) - c.Assert(len(h.Sites), qt.Equals, 2) - - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNil) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo2.xml")), `hugo2: {{ printf "no template handling in here" }}`) - - // Content files should get the correct site context. - // TODO(bep) archetype check i18n - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.nn.md")), `File: index.nn.md`, `Site Lang: nn`, `Name: My Post`, `i18n: Hugo Rokkar!`) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/pages/bio.md")), `File: bio.md`, `Site Lang: en`, `Name: Bio`) - - c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post", false), qt.IsNil) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Theme Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) -} - func TestNewContentFromDirSiteFunction(t *testing.T) { mm := afero.NewMemMapFs() c := qt.New(t) @@ -206,83 +154,6 @@ site RegularPages: {{ len site.RegularPages }} // Regular files should fall back to the default archetype (we have no regular file archetype). c.Assert(create.NewContent(h, "my-bundle", "mypage.md", false), qt.IsNil) cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "mypage.md")), `draft: true`) - -} - -func TestNewContentFromDirNoSite(t *testing.T) { - mm := afero.NewMemMapFs() - c := qt.New(t) - - archetypeDir := filepath.Join("archetypes", "my-bundle") - c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil) - - archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle") - c.Assert(mm.MkdirAll(archetypeThemeDir, 0o755), qt.IsNil) - - contentFile := ` -File: %s -Name: {{ replace .Name "-" " " | title }} -i18n: {{ T "hugo" }} -` - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil) - - c.Assert(initFs(mm), qt.IsNil) - cfg, fs := newTestCfg(c, mm) - conf := testconfig.GetTestConfigs(fs.Source, cfg) - h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs}) - c.Assert(err, qt.IsNil) - c.Assert(len(h.Sites), qt.Equals, 2) - - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNil) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo2.xml")), `hugo2: {{ printf "no template handling in here" }}`) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `File: index.md`, `Name: My Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.nn.md")), `File: index.nn.md`, `Name: My Post`, `i18n: Hugo Rokkar!`) - - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/pages/bio.md")), `File: bio.md`, `Name: Bio`) - - c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post", false), qt.IsNil) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Name: My Theme Post`, `i18n: Hugo Rocks!`) - cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) -} - -func TestNewContentForce(t *testing.T) { - mm := afero.NewMemMapFs() - c := qt.New(t) - - archetypeDir := filepath.Join("archetypes", "my-bundle") - c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(""), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(""), 0o755), qt.IsNil) - - c.Assert(initFs(mm), qt.IsNil) - cfg, fs := newTestCfg(c, mm) - - conf := testconfig.GetTestConfigs(fs.Source, cfg) - h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs}) - c.Assert(err, qt.IsNil) - c.Assert(len(h.Sites), qt.Equals, 2) - - // from file - c.Assert(create.NewContent(h, "post", "post/my-post.md", false), qt.IsNil) - c.Assert(create.NewContent(h, "post", "post/my-post.md", false), qt.IsNotNil) - c.Assert(create.NewContent(h, "post", "post/my-post.md", true), qt.IsNil) - - // from dir - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNil) - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", false), qt.IsNotNil) - c.Assert(create.NewContent(h, "my-bundle", "post/my-post", true), qt.IsNil) } func initFs(fs afero.Fs) error { @@ -308,7 +179,7 @@ func initFs(fs afero.Fs) error { afero.WriteFile(fs, filename, []byte(`--- title: Test --- -`), 0666) +`), 0o666) } // create archetype files diff --git a/create/skeletons/skeletons.go b/create/skeletons/skeletons.go index 7f7fb1bb7..aec79c149 100644 --- a/create/skeletons/skeletons.go +++ b/create/skeletons/skeletons.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -83,7 +83,7 @@ func copyFiles(createpath string, sourceFs afero.Fs, skeleton embed.FS) error { return fs.WalkDir(skeleton, ".", func(path string, d fs.DirEntry, err error) error { _, slug, _ := strings.Cut(path, "/") if d.IsDir() { - return sourceFs.MkdirAll(filepath.Join(createpath, slug), 0777) + return sourceFs.MkdirAll(filepath.Join(createpath, slug), 0o777) } else { if filepath.Base(path) != ".gitkeep" { data, _ := fs.ReadFile(skeleton, path) diff --git a/deploy/deploy.go b/deploy/deploy.go index 26fac8975..7c496c72d 100644 --- a/deploy/deploy.go +++ b/deploy/deploy.go @@ -22,6 +22,7 @@ import ( "context" "crypto/md5" "encoding/hex" + "errors" "fmt" "io" "mime" @@ -33,8 +34,6 @@ import ( "strings" "sync" - "errors" - "github.com/dustin/go-humanize" "github.com/gobwas/glob" "github.com/gohugoio/hugo/common/loggers" @@ -75,7 +74,6 @@ const metaMD5Hash = "md5chksum" // the meta key to store md5hash in // New constructs a new *Deployer. func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Deployer, error) { - dcfg := cfg.GetConfigSection(deploymentConfigKey).(DeployConfig) targetName := dcfg.Target @@ -675,8 +673,6 @@ func (d *Deployer) findDiffs(localFiles map[string]*localFile, remoteFiles map[s } else if !bytes.Equal(lf.MD5(), remoteFile.MD5) { upload = true reason = reasonMD5Differs - } else { - // Nope! Leave uploaded = false. } found[path] = true } else { diff --git a/deploy/deploy_test.go b/deploy/deploy_test.go index 66eece10b..d220ab7c2 100644 --- a/deploy/deploy_test.go +++ b/deploy/deploy_test.go @@ -31,6 +31,7 @@ import ( "testing" "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/media" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" @@ -241,7 +242,7 @@ func TestWalkLocal(t *testing.T) { for _, name := range tc.Given { dir, _ := path.Split(name) if dir != "" { - if err := fs.MkdirAll(dir, 0755); err != nil { + if err := fs.MkdirAll(dir, 0o755); err != nil { t.Fatal(err) } } @@ -530,7 +531,7 @@ func initFsTests(t *testing.T) []*fsTest { membucket := memblob.OpenBucket(nil) t.Cleanup(func() { membucket.Close() }) - filefs := afero.NewBasePathFs(afero.NewOsFs(), tmpfsdir) + filefs := hugofs.NewBasePathFs(afero.NewOsFs(), tmpfsdir) filebucket, err := fileblob.OpenBucket(tmpbucketdir, nil) if err != nil { t.Fatal(err) diff --git a/deps/deps.go b/deps/deps.go index 4d1812015..8778bff80 100644 --- a/deps/deps.go +++ b/deps/deps.go @@ -11,6 +11,8 @@ import ( "sync/atomic" "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" + "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/config" @@ -59,6 +61,9 @@ type Deps struct { // The configuration to use Conf config.AllProvider `json:"-"` + // The memory cache to use. + MemCache *dynacache.Cache + // The translation func to use Translate func(ctx context.Context, translationID string, templateData any) string `json:"-"` @@ -149,6 +154,10 @@ func (d *Deps) Init() error { d.ExecHelper = hexec.New(d.Conf.GetConfigSection("security").(security.Config)) } + if d.MemCache == nil { + d.MemCache = dynacache.New(dynacache.Options{Running: d.Conf.Running(), Log: d.Log}) + } + if d.PathSpec == nil { hashBytesReceiverFunc := func(name string, match bool) { if !match { @@ -190,13 +199,16 @@ func (d *Deps) Init() error { } var common *resources.SpecCommon - var imageCache *resources.ImageCache if d.ResourceSpec != nil { common = d.ResourceSpec.SpecCommon - imageCache = d.ResourceSpec.ImageCache } - resourceSpec, err := resources.NewSpec(d.PathSpec, common, imageCache, d.BuildState, d.Log, d, d.ExecHelper) + fileCaches, err := filecache.NewCaches(d.PathSpec) + if err != nil { + return fmt.Errorf("failed to create file caches from configuration: %w", err) + } + + resourceSpec, err := resources.NewSpec(d.PathSpec, common, fileCaches, d.MemCache, d.BuildState, d.Log, d, d.ExecHelper) if err != nil { return fmt.Errorf("failed to create resource spec: %w", err) } @@ -307,6 +319,9 @@ func (d *Deps) TextTmpl() tpl.TemplateParseFinder { } func (d *Deps) Close() error { + if d.MemCache != nil { + d.MemCache.Stop() + } return d.BuildClosers.Close() } @@ -2,9 +2,8 @@ module github.com/gohugoio/hugo require ( github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69 - github.com/PuerkitoBio/purell v1.1.1 github.com/alecthomas/chroma/v2 v2.12.0 - github.com/armon/go-radix v1.0.0 + github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c github.com/aws/aws-sdk-go v1.48.6 github.com/bep/clocks v0.5.0 github.com/bep/debounce v1.2.0 @@ -15,10 +14,10 @@ require ( github.com/bep/golibsass v1.1.1 github.com/bep/gowebp v0.3.0 github.com/bep/helpers v0.4.0 - github.com/bep/lazycache v0.2.0 + github.com/bep/lazycache v0.4.0 github.com/bep/logg v0.4.0 github.com/bep/mclib v1.20400.20402 - github.com/bep/overlayfs v0.6.0 + github.com/bep/overlayfs v0.9.1 github.com/bep/simplecobra v0.4.0 github.com/bep/tmc v0.5.1 github.com/clbanning/mxj/v2 v2.7.0 @@ -53,6 +52,7 @@ require ( github.com/muesli/smartcrop v0.3.0 github.com/niklasfasching/go-org v1.7.0 github.com/olekukonko/tablewriter v0.0.5 + github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 github.com/pelletier/go-toml/v2 v2.1.1 github.com/rogpeppe/go-internal v1.12.0 github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd @@ -60,17 +60,17 @@ require ( github.com/spf13/afero v1.11.0 github.com/spf13/cast v1.6.0 github.com/spf13/cobra v1.8.0 - github.com/spf13/fsync v0.9.0 + github.com/spf13/fsync v0.10.0 github.com/spf13/pflag v1.0.5 github.com/tdewolff/minify/v2 v2.20.13 github.com/tdewolff/parse/v2 v2.7.8 github.com/yuin/goldmark v1.6.0 github.com/yuin/goldmark-emoji v1.0.2 - go.uber.org/atomic v1.11.0 go.uber.org/automaxprocs v1.5.3 gocloud.dev v0.34.0 golang.org/x/exp v0.0.0-20221031165847-c99f073a8326 golang.org/x/image v0.14.0 + golang.org/x/mod v0.14.0 golang.org/x/net v0.20.0 golang.org/x/sync v0.6.0 golang.org/x/text v0.14.0 @@ -92,7 +92,6 @@ require ( github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 // indirect - github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect github.com/aws/aws-sdk-go-v2 v1.20.0 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.11 // indirect github.com/aws/aws-sdk-go-v2/config v1.18.32 // indirect @@ -124,7 +123,7 @@ require ( github.com/google/wire v0.5.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect - github.com/hashicorp/golang-lru/v2 v2.0.1 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/invopop/yaml v0.2.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect @@ -142,7 +141,6 @@ require ( github.com/russross/blackfriday/v2 v2.1.0 // indirect go.opencensus.io v0.24.0 // indirect golang.org/x/crypto v0.18.0 // indirect - golang.org/x/mod v0.14.0 // indirect golang.org/x/oauth2 v0.15.0 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/time v0.5.0 // indirect @@ -158,4 +156,4 @@ require ( software.sslmate.com/src/go-pkcs12 v0.2.0 // indirect ) -go 1.18 +go 1.20 @@ -1,14 +1,51 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.110.10 h1:LXy9GEO+timppncPIAZoOj3l58LIU9k+kn48AN7IO3Y= cloud.google.com/go v0.110.10/go.mod h1:v1OoFqYxiBkUrruItNM3eT4lLByNjxmJSV/xDKJNnic= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.35.1 h1:B59ahL//eDfx2IIKFBeT5Atm9wnNmj3+8xG/W4WB//w= cloud.google.com/go/storage v1.35.1/go.mod h1:M6M/3V/D3KpzMTJyPOR/HU6n2Si5QdaXYEsng2xgOs8= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0 h1:8q4SaHjFsClSvuVne0ID/5Ka8u3fcIHyqkLjcFpNRHQ= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg= @@ -28,16 +65,13 @@ github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69 h1:+tu3HOoMXB7RX github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69/go.mod h1:L1AbZdiDllfyYH5l5OkAaZtk7VkWe89bPJFmnDBNHxg= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink= github.com/alecthomas/chroma/v2 v2.12.0 h1:Wh8qLEgMMsN7mgyG8/qIpegky2Hvzr4By6gEF7cmWgw= github.com/alecthomas/chroma/v2 v2.12.0/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw= github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= -github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c h1:651/eoCRnQ7YtSjAnSzRucrJz+3iGEFt+ysraELS81M= +github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk= github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go-v2 v1.20.0 h1:INUDpYLt4oiPOJl0XwZDK2OVAVf0Rzo+MGVTv9f+gy8= @@ -96,14 +130,14 @@ github.com/bep/gowebp v0.3.0 h1:MhmMrcf88pUY7/PsEhMgEP0T6fDUnRTMpN8OclDrbrY= github.com/bep/gowebp v0.3.0/go.mod h1:ZhFodwdiFp8ehGJpF4LdPl6unxZm9lLFjxD3z2h2AgI= github.com/bep/helpers v0.4.0 h1:ab9veaAiWY4ST48Oxp5usaqivDmYdB744fz+tcZ3Ifs= github.com/bep/helpers v0.4.0/go.mod h1:/QpHdmcPagDw7+RjkLFCvnlUc8lQ5kg4KDrEkb2Yyco= -github.com/bep/lazycache v0.2.0 h1:HKrlZTrDxHIrNKqmnurH42ryxkngCMYLfBpyu40VcwY= -github.com/bep/lazycache v0.2.0/go.mod h1:xUIsoRD824Vx0Q/n57+ZO7kmbEhMBOnTjM/iPixNGbg= +github.com/bep/lazycache v0.4.0 h1:X8yVyWNVupPd4e1jV7efi3zb7ZV/qcjKQgIQ5aPbkYI= +github.com/bep/lazycache v0.4.0/go.mod h1:NmRm7Dexh3pmR1EignYR8PjO2cWybFQ68+QgY3VMCSc= github.com/bep/logg v0.4.0 h1:luAo5mO4ZkhA5M1iDVDqDqnBBnlHjmtZF6VAyTp+nCQ= github.com/bep/logg v0.4.0/go.mod h1:Ccp9yP3wbR1mm++Kpxet91hAZBEQgmWgFgnXX3GkIV0= github.com/bep/mclib v1.20400.20402 h1:olpCE2WSPpOAbFE1R4hnftSEmQ34+xzy2HRzd0m69rA= github.com/bep/mclib v1.20400.20402/go.mod h1:pkrk9Kyfqg34Uj6XlDq9tdEFJBiL1FvCoCgVKRzw1EY= -github.com/bep/overlayfs v0.6.0 h1:sgLcq/qtIzbaQNl2TldGXOkHvqeZB025sPvHOQL+DYo= -github.com/bep/overlayfs v0.6.0/go.mod h1:NFjSmn3kCqG7KX2Lmz8qT8VhPPCwZap3UNogXawoQHM= +github.com/bep/overlayfs v0.9.1 h1:SL54SV8A3zRkmQ+83Jj4TLE88jadHd5d1L4NpfmqJJs= +github.com/bep/overlayfs v0.9.1/go.mod h1:aYY9W7aXQsGcA7V9x/pzeR8LjEgIxbtisZm8Q7zPz40= github.com/bep/simplecobra v0.4.0 h1:ufX/6WcOtEVJdCd7hsztTWURlZkOaWYOD+zCqrM8qUE= github.com/bep/simplecobra v0.4.0/go.mod h1:evSM6iQqRwqpV7W4H4DlYFfe9mZ0x6Hj5GEOnIV7dI4= github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI= @@ -111,6 +145,9 @@ github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0= github.com/bep/workers v1.0.0 h1:U+H8YmEaBCEaFZBst7GcRVEoqeRC9dzH2dWOwGmOchg= github.com/bep/workers v1.0.0/go.mod h1:7kIESOB86HfR2379pwoMWNy8B50D7r99fRLUyPSNyCs= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME= github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= github.com/cli/safeexec v1.0.0/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q= @@ -118,6 +155,8 @@ github.com/cli/safeexec v1.0.1 h1:e/C79PbXF4yYTN/wauC4tviMxEV13BwljGj0N9j+N00= github.com/cli/safeexec v1.0.1/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -135,6 +174,8 @@ github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+m github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanw/esbuild v0.19.12 h1:p5WGo4o6TCN+kt+uZtYSGS3ZHPa+iIZ0SX+ys8UnP10= github.com/evanw/esbuild v0.19.12/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= @@ -154,6 +195,9 @@ github.com/getkin/kin-openapi v0.122.0 h1:WB9Jbl0Hp/T79/JF9xlSW5Kl9uYdk/AWD0yAd9 github.com/getkin/kin-openapi v0.122.0/go.mod h1:PCWw/lfBrJY4HcdqE3jj+QFkaFK8ABoqo7PvqVhXXqw= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= @@ -177,29 +221,47 @@ github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95/go.mo github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -208,7 +270,22 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-replayers/grpcreplay v1.1.0 h1:S5+I3zYyZ+GQz68OfbURDdt/+cSMqCK1wrvNx7WBzTE= github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= @@ -219,15 +296,22 @@ github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8= github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/hairyhenderson/go-codeowners v0.4.0 h1:Wx/tRXb07sCyHeC8mXfio710Iu35uAy5KYiBdLHdv4Q= github.com/hairyhenderson/go-codeowners v0.4.0/go.mod h1:iJgZeCt+W/GzXo5uchFCqvVHZY2T4TAIpvuVlKVkLxc= -github.com/hashicorp/golang-lru/v2 v2.0.1 h1:5pv5N1lT1fjLg2VQ5KWc7kmucp2x/kvFOnxuVTqZ6x4= -github.com/hashicorp/golang-lru/v2 v2.0.1/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= @@ -241,6 +325,10 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGw github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= @@ -283,6 +371,8 @@ github.com/niklasfasching/go-org v1.7.0 h1:vyMdcMWWTe/XmANk19F4k8XGBYg0GQ/gJGMim github.com/niklasfasching/go-org v1.7.0/go.mod h1:WuVm4d45oePiE0eX25GqTDQIt/qPW1T9DGkRscqLW5o= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= +github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= @@ -292,11 +382,13 @@ github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzL github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= @@ -308,21 +400,25 @@ github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0 github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo= github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U= github.com/shogo82148/go-shuffle v0.0.0-20180218125048-27e6095f230d/go.mod h1:2htx6lmL0NGLHlO8ZCf+lQBGBHIbEujyywxJArf+2Yc= +github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY= -github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0= +github.com/spf13/fsync v0.10.0 h1:j+zUMN41zWj3sEqueD4mAsPDQwyOvMeJCcrawdmbqXk= +github.com/spf13/fsync v0.10.0/go.mod h1:y+B41vYq5i6Boa3Z+BVoPbDeOvxVkNU5OBXhoT8i4TQ= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= @@ -335,66 +431,174 @@ github.com/tdewolff/parse/v2 v2.7.8/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1 github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo= github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.3.7/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.6.0 h1:boZcn2GTjpsynOsC0iJHnBWa4Bi0qzfJjthwauItG68= github.com/yuin/goldmark v1.6.0/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark-emoji v1.0.2 h1:c/RgTShNgHTtc6xdz2KKI74jJr6rWi7FPgnP9GAsO5s= github.com/yuin/goldmark-emoji v1.0.2/go.mod h1:RhP/RWpexdp+KHs7ghKnifRoIs/Bq4nDS7tRbCkOwKY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= -go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= gocloud.dev v0.34.0 h1:LzlQY+4l2cMtuNfwT2ht4+fiXwWf/NmPTnXUlLmGif4= gocloud.dev v0.34.0/go.mod h1:psKOachbnvY3DAOPbsFVmLIErwsbWPUG2H5i65D38vE= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20221031165847-c99f073a8326 h1:QfTh0HpN6hlw6D3vu8DAwC8pBIwikq0AI1evdm+FksE= golang.org/x/exp v0.0.0-20221031165847-c99f073a8326/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4= golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ= golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -404,21 +608,68 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= @@ -428,15 +679,71 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.152.0 h1:t0r1vPnfMc260S2Ci+en7kfCZaLOPs5KI0sVV/6jZrY= google.golang.org/api v0.152.0/go.mod h1:3qNJX5eOmhiWYc67jRA/3GsDw97UFb5ivv7Y2PrriAY= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 h1:wpZ8pe2x1Q3f2KyT5f8oP/fa9rHAKgFPr/HZdNuS+PQ= google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:J7XzRzVy1+IPwWHZUzoD0IccYZIrXILAQpc+Qy9CMhY= google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 h1:JpwMPBpFN3uKhdaekDpiNlImDdkUAyiJ6ez/uxGaUSo= @@ -444,10 +751,21 @@ google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17/go. google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f h1:ultW7fxlIvee4HYrtnaRPon9HpEgFk5zYpmfMgtKB5I= google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f/go.mod h1:L9KNLi232K1/xB6f7AlSX692koaRnKaWSR0stBki0Yc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -458,6 +776,7 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= @@ -480,8 +799,16 @@ gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= howett.net/plist v1.0.0 h1:7CrbWYbPPO/PyNy38b2EB/+gYbjCe2DXBxgtOOZbSQM= howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= software.sslmate.com/src/go-pkcs12 v0.2.0 h1:nlFkj7bTysH6VkC4fGphtjXRbezREPgrHuJG20hBGPE= software.sslmate.com/src/go-pkcs12 v0.2.0/go.mod h1:23rNcYsMabIc1otwLpTkCCPwUq6kQsTyowttG/as0kQ= diff --git a/helpers/content.go b/helpers/content.go index a3abb334d..889294382 100644 --- a/helpers/content.go +++ b/helpers/content.go @@ -30,7 +30,6 @@ import ( "github.com/spf13/afero" "github.com/gohugoio/hugo/markup/converter" - "github.com/gohugoio/hugo/markup/converter/hooks" "github.com/gohugoio/hugo/markup" @@ -38,19 +37,15 @@ import ( ) var ( - openingPTag = []byte("<p>") - closingPTag = []byte("</p>") - paragraphIndicator = []byte("<p") - closingIndicator = []byte("</") + openingPTag = []byte("<p>") + closingPTag = []byte("</p>") ) // ContentSpec provides functionality to render markdown content. type ContentSpec struct { Converters markup.ConverterProvider anchorNameSanitizer converter.AnchorNameSanitizer - getRenderer func(t hooks.RendererType, id any) any - - Cfg config.AllProvider + Cfg config.AllProvider } // NewContentSpec returns a ContentSpec initialized diff --git a/helpers/content_test.go b/helpers/content_test.go index 72e3eeb49..e2bf501d2 100644 --- a/helpers/content_test.go +++ b/helpers/content_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,8 +24,6 @@ import ( "github.com/gohugoio/hugo/helpers" ) -const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>" - func TestTrimShortHTML(t *testing.T) { tests := []struct { input, output []byte @@ -68,7 +66,6 @@ func BenchmarkTestTruncateWordsToWholeSentence(b *testing.B) { } func TestTruncateWordsToWholeSentence(t *testing.T) { - type test struct { input, expected string max int @@ -101,7 +98,6 @@ func TestTruncateWordsToWholeSentence(t *testing.T) { } func TestTruncateWordsByRune(t *testing.T) { - type test struct { input, expected string max int diff --git a/helpers/general.go b/helpers/general.go index b16aec0b0..35e35a7e0 100644 --- a/helpers/general.go +++ b/helpers/general.go @@ -196,6 +196,7 @@ func ReaderContains(r io.Reader, subslice []byte) bool { func GetTitleFunc(style string) func(s string) string { switch strings.ToLower(style) { case "go": + //lint:ignore SA1019 keep for now. return strings.Title case "chicago": tc := transform.NewTitleConverter(transform.ChicagoStyle) @@ -263,10 +264,11 @@ func MD5String(f string) string { return hex.EncodeToString(h.Sum([]byte{})) } -// MD5FromFileFast creates a MD5 hash from the given file. It only reads parts of +// MD5FromReaderFast creates a MD5 hash from the given file. It only reads parts of // the file for speed, so don't use it if the files are very subtly different. // It will not close the file. -func MD5FromFileFast(r io.ReadSeeker) (string, error) { +// It will return the MD5 hash and the size of r in bytes. +func MD5FromReaderFast(r io.ReadSeeker) (string, int64, error) { const ( // Do not change once set in stone! maxChunks = 8 @@ -284,7 +286,7 @@ func MD5FromFileFast(r io.ReadSeeker) (string, error) { if err == io.EOF { break } - return "", err + return "", 0, err } } @@ -294,12 +296,14 @@ func MD5FromFileFast(r io.ReadSeeker) (string, error) { h.Write(buff) break } - return "", err + return "", 0, err } h.Write(buff) } - return hex.EncodeToString(h.Sum(nil)), nil + size, _ := r.Seek(0, io.SeekEnd) + + return hex.EncodeToString(h.Sum(nil)), size, nil } // MD5FromReader creates a MD5 hash from the given reader. @@ -328,3 +332,21 @@ func PrintFs(fs afero.Fs, path string, w io.Writer) { return nil }) } + +// FormatByteCount pretty formats b. +func FormatByteCount(bc uint64) string { + const ( + Gigabyte = 1 << 30 + Megabyte = 1 << 20 + Kilobyte = 1 << 10 + ) + switch { + case bc > Gigabyte || -bc > Gigabyte: + return fmt.Sprintf("%.2f GB", float64(bc)/Gigabyte) + case bc > Megabyte || -bc > Megabyte: + return fmt.Sprintf("%.2f MB", float64(bc)/Megabyte) + case bc > Kilobyte || -bc > Kilobyte: + return fmt.Sprintf("%.2f KB", float64(bc)/Kilobyte) + } + return fmt.Sprintf("%d B", bc) +} diff --git a/helpers/general_test.go b/helpers/general_test.go index 1463458fa..54607d699 100644 --- a/helpers/general_test.go +++ b/helpers/general_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -259,19 +259,19 @@ func TestUniqueStringsSorted(t *testing.T) { func TestFastMD5FromFile(t *testing.T) { fs := afero.NewMemMapFs() - if err := afero.WriteFile(fs, "small.txt", []byte("abc"), 0777); err != nil { + if err := afero.WriteFile(fs, "small.txt", []byte("abc"), 0o777); err != nil { t.Fatal(err) } - if err := afero.WriteFile(fs, "small2.txt", []byte("abd"), 0777); err != nil { + if err := afero.WriteFile(fs, "small2.txt", []byte("abd"), 0o777); err != nil { t.Fatal(err) } - if err := afero.WriteFile(fs, "bigger.txt", []byte(strings.Repeat("a bc d e", 100)), 0777); err != nil { + if err := afero.WriteFile(fs, "bigger.txt", []byte(strings.Repeat("a bc d e", 100)), 0o777); err != nil { t.Fatal(err) } - if err := afero.WriteFile(fs, "bigger2.txt", []byte(strings.Repeat("c d e f g", 100)), 0777); err != nil { + if err := afero.WriteFile(fs, "bigger2.txt", []byte(strings.Repeat("c d e f g", 100)), 0o777); err != nil { t.Fatal(err) } @@ -292,19 +292,19 @@ func TestFastMD5FromFile(t *testing.T) { defer bf1.Close() defer bf2.Close() - m1, err := helpers.MD5FromFileFast(sf1) + m1, _, err := helpers.MD5FromReaderFast(sf1) c.Assert(err, qt.IsNil) c.Assert(m1, qt.Equals, "e9c8989b64b71a88b4efb66ad05eea96") - m2, err := helpers.MD5FromFileFast(sf2) + m2, _, err := helpers.MD5FromReaderFast(sf2) c.Assert(err, qt.IsNil) c.Assert(m2, qt.Not(qt.Equals), m1) - m3, err := helpers.MD5FromFileFast(bf1) + m3, _, err := helpers.MD5FromReaderFast(bf1) c.Assert(err, qt.IsNil) c.Assert(m3, qt.Not(qt.Equals), m2) - m4, err := helpers.MD5FromFileFast(bf2) + m4, _, err := helpers.MD5FromReaderFast(bf2) c.Assert(err, qt.IsNil) c.Assert(m4, qt.Not(qt.Equals), m3) @@ -320,7 +320,7 @@ func BenchmarkMD5FromFileFast(b *testing.B) { b.Run(fmt.Sprintf("full=%t", full), func(b *testing.B) { for i := 0; i < b.N; i++ { b.StopTimer() - if err := afero.WriteFile(fs, "file.txt", []byte(strings.Repeat("1234567890", 2000)), 0777); err != nil { + if err := afero.WriteFile(fs, "file.txt", []byte(strings.Repeat("1234567890", 2000)), 0o777); err != nil { b.Fatal(err) } f, err := fs.Open("file.txt") @@ -333,7 +333,7 @@ func BenchmarkMD5FromFileFast(b *testing.B) { b.Fatal(err) } } else { - if _, err := helpers.MD5FromFileFast(f); err != nil { + if _, _, err := helpers.MD5FromReaderFast(f); err != nil { b.Fatal(err) } } @@ -350,7 +350,7 @@ func BenchmarkUniqueStrings(b *testing.B) { for i := 0; i < b.N; i++ { result := helpers.UniqueStrings(input) if len(result) != 6 { - b.Fatal(fmt.Sprintf("invalid count: %d", len(result))) + b.Fatalf("invalid count: %d", len(result)) } } }) @@ -369,7 +369,7 @@ func BenchmarkUniqueStrings(b *testing.B) { result := helpers.UniqueStringsReuse(inputc) if len(result) != 6 { - b.Fatal(fmt.Sprintf("invalid count: %d", len(result))) + b.Fatalf("invalid count: %d", len(result)) } } }) @@ -388,7 +388,7 @@ func BenchmarkUniqueStrings(b *testing.B) { result := helpers.UniqueStringsSorted(inputc) if len(result) != 6 { - b.Fatal(fmt.Sprintf("invalid count: %d", len(result))) + b.Fatalf("invalid count: %d", len(result)) } } }) diff --git a/helpers/path.go b/helpers/path.go index 3172d3452..4a6c9a688 100644 --- a/helpers/path.go +++ b/helpers/path.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,12 +23,12 @@ import ( "regexp" "sort" "strings" - "unicode" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/common/hugio" @@ -41,7 +41,11 @@ import ( // whilst preserving the original casing of the string. // E.g. Social Media -> Social-Media func (p *PathSpec) MakePath(s string) string { - return p.UnicodeSanitize(s) + s = paths.Sanitize(s) + if p.Cfg.RemovePathAccents() { + s = text.RemoveAccentsString(s) + } + return s } // MakePathsSanitized applies MakePathSanitized on every item in the slice @@ -59,74 +63,13 @@ func (p *PathSpec) MakePathSanitized(s string) string { return strings.ToLower(p.MakePath(s)) } -// ToSlashTrimLeading is just a filepath.ToSlaas with an added / prefix trimmer. -func ToSlashTrimLeading(s string) string { - return strings.TrimPrefix(filepath.ToSlash(s), "/") -} - // MakeTitle converts the path given to a suitable title, trimming whitespace // and replacing hyphens with whitespace. func MakeTitle(inpath string) string { return strings.Replace(strings.TrimSpace(inpath), "-", " ", -1) } -// From https://golang.org/src/net/url/url.go -func ishex(c rune) bool { - switch { - case '0' <= c && c <= '9': - return true - case 'a' <= c && c <= 'f': - return true - case 'A' <= c && c <= 'F': - return true - } - return false -} - -// UnicodeSanitize sanitizes string to be used in Hugo URL's, allowing only -// a predefined set of special Unicode characters. -// If RemovePathAccents configuration flag is enabled, Unicode accents -// are also removed. -// Hyphens in the original input are maintained. -// Spaces will be replaced with a single hyphen, and sequential replacement hyphens will be reduced to one. -func (p *PathSpec) UnicodeSanitize(s string) string { - if p.Cfg.RemovePathAccents() { - s = text.RemoveAccentsString(s) - } - - source := []rune(s) - target := make([]rune, 0, len(source)) - var ( - prependHyphen bool - wasHyphen bool - ) - - for i, r := range source { - isAllowed := r == '.' || r == '/' || r == '\\' || r == '_' || r == '#' || r == '+' || r == '~' || r == '-' || r == '@' - isAllowed = isAllowed || unicode.IsLetter(r) || unicode.IsDigit(r) || unicode.IsMark(r) - isAllowed = isAllowed || (r == '%' && i+2 < len(source) && ishex(source[i+1]) && ishex(source[i+2])) - - if isAllowed { - // track explicit hyphen in input; no need to add a new hyphen if - // we just saw one. - wasHyphen = r == '-' - - if prependHyphen { - // if currently have a hyphen, don't prepend an extra one - if !wasHyphen { - target = append(target, '-') - } - prependHyphen = false - } - target = append(target, r) - } else if len(target) > 0 && !wasHyphen && unicode.IsSpace(r) { - prependHyphen = true - } - } - - return string(target) -} - +// MakeTitleInPath converts the path given to a suitable title, trimming whitespace func MakePathRelative(inPath string, possibleDirectories ...string) (string, error) { for _, currentPath := range possibleDirectories { if strings.HasPrefix(inPath, currentPath) { @@ -317,13 +260,12 @@ func FindCWD() (string, error) { return path, nil } -// SymbolicWalk is like filepath.Walk, but it follows symbolic links. -func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error { +// Walk walks the file tree rooted at root, calling walkFn for each file or +// directory in the tree, including root. +func Walk(fs afero.Fs, root string, walker hugofs.WalkFunc) error { if _, isOs := fs.(*afero.OsFs); isOs { - // Mainly to track symlinks. fs = hugofs.NewBaseFileDecorator(fs) } - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ Fs: fs, Root: root, @@ -333,16 +275,6 @@ func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error { return w.Walk() } -// LstatIfPossible can be used to call Lstat if possible, else Stat. -func LstatIfPossible(fs afero.Fs, path string) (os.FileInfo, error) { - if lstater, ok := fs.(afero.Lstater); ok { - fi, _, err := lstater.LstatIfPossible(path) - return fi, err - } - - return fs.Stat(path) -} - // SafeWriteToDisk is the same as WriteToDisk // but it also checks to see if file/directory already exists. func SafeWriteToDisk(inpath string, r io.Reader, fs afero.Fs) (err error) { @@ -382,7 +314,7 @@ func OpenFileForWriting(fs afero.Fs, filename string) (afero.File, error) { if !herrors.IsNotExist(err) { return nil, err } - if err = fs.MkdirAll(filepath.Dir(filename), 0777); err != nil { // before umask + if err = fs.MkdirAll(filepath.Dir(filename), 0o777); err != nil { // before umask return nil, err } f, err = fs.Create(filename) @@ -402,7 +334,7 @@ func GetCacheDir(fs afero.Fs, cacheDir string) (string, error) { return "", err } if !exists { - err := fs.MkdirAll(cacheDir, 0777) // Before umask + err := fs.MkdirAll(cacheDir, 0o777) // Before umask if err != nil { return "", fmt.Errorf("failed to create cache dir: %w", err) } @@ -417,7 +349,7 @@ func GetCacheDir(fs afero.Fs, cacheDir string) (string, error) { userCacheDir, err := os.UserCacheDir() if err == nil { cacheDir := filepath.Join(userCacheDir, hugoCacheBase) - if err := fs.Mkdir(cacheDir, 0777); err == nil || os.IsExist(err) { + if err := fs.Mkdir(cacheDir, 0o777); err == nil || os.IsExist(err) { return cacheDir, nil } } @@ -494,12 +426,3 @@ func IsEmpty(path string, fs afero.Fs) (bool, error) { func Exists(path string, fs afero.Fs) (bool, error) { return afero.Exists(fs, path) } - -// AddTrailingSlash adds a trailing Unix styled slash (/) if not already -// there. -func AddTrailingSlash(path string) string { - if !strings.HasSuffix(path, "/") { - path += "/" - } - return path -} diff --git a/helpers/path_test.go b/helpers/path_test.go index 45b692923..6f3699589 100644 --- a/helpers/path_test.go +++ b/helpers/path_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -134,7 +134,6 @@ func TestMakePathRelative(t *testing.T) { } func TestGetDottedRelativePath(t *testing.T) { - // on Windows this will receive both kinds, both country and western ... for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} { doTestGetDottedRelativePath(f, t) } @@ -258,7 +257,7 @@ func createNonZeroSizedFileInTempDir(t *testing.T) *os.File { f := createZeroSizedFileInTempDir(t) byteString := []byte("byteString") - err := os.WriteFile(f.Name(), byteString, 0644) + err := os.WriteFile(f.Name(), byteString, 0o644) if err != nil { t.Error(err) } diff --git a/helpers/pathspec.go b/helpers/pathspec.go index c9bb49038..88571b93c 100644 --- a/helpers/pathspec.go +++ b/helpers/pathspec.go @@ -74,9 +74,5 @@ func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.AllProvider, lo // PermalinkForBaseURL creates a permalink from the given link and baseURL. func (p *PathSpec) PermalinkForBaseURL(link, baseURL string) string { - link = strings.TrimPrefix(link, "/") - if !strings.HasSuffix(baseURL, "/") { - baseURL += "/" - } - return baseURL + link + return baseURL + strings.TrimPrefix(link, "/") } diff --git a/helpers/processing_stats.go b/helpers/processing_stats.go index 3e3e9a3ca..540060aa2 100644 --- a/helpers/processing_stats.go +++ b/helpers/processing_stats.go @@ -31,7 +31,6 @@ type ProcessingStats struct { ProcessedImages uint64 Files uint64 Aliases uint64 - Sitemaps uint64 Cleaned uint64 } @@ -48,7 +47,6 @@ func (s *ProcessingStats) toVals() []processingStatsTitleVal { {"Static files", s.Static}, {"Processed images", s.ProcessedImages}, {"Aliases", s.Aliases}, - {"Sitemaps", s.Sitemaps}, {"Cleaned", s.Cleaned}, } } diff --git a/helpers/url.go b/helpers/url.go index 7d86c529c..d5a613029 100644 --- a/helpers/url.go +++ b/helpers/url.go @@ -20,55 +20,8 @@ import ( "strings" "github.com/gohugoio/hugo/common/paths" - - "github.com/PuerkitoBio/purell" ) -func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string { - s, err := purell.NormalizeURLString(in, f) - if err != nil { - return in - } - - // Temporary workaround for the bug fix and resulting - // behavioral change in purell.NormalizeURLString(): - // a leading '/' was inadvertently added to relative links, - // but no longer, see #878. - // - // I think the real solution is to allow Hugo to - // make relative URL with relative path, - // e.g. "../../post/hello-again/", as wished by users - // in issues #157, #622, etc., without forcing - // relative URLs to begin with '/'. - // Once the fixes are in, let's remove this kludge - // and restore SanitizeURL() to the way it was. - // -- @anthonyfok, 2015-02-16 - // - // Begin temporary kludge - u, err := url.Parse(s) - if err != nil { - panic(err) - } - if len(u.Path) > 0 && !strings.HasPrefix(u.Path, "/") { - u.Path = "/" + u.Path - } - return u.String() - // End temporary kludge - - // return s - -} - -// SanitizeURL sanitizes the input URL string. -func SanitizeURL(in string) string { - return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator) -} - -// SanitizeURLKeepTrailingSlash is the same as SanitizeURL, but will keep any trailing slash. -func SanitizeURLKeepTrailingSlash(in string) string { - return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator) -} - // URLize is similar to MakePath, but with Unicode handling // Example: // diff --git a/helpers/url_test.go b/helpers/url_test.go index 448756b5b..ce1b24487 100644 --- a/helpers/url_test.go +++ b/helpers/url_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,7 +20,6 @@ import ( qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/helpers" ) func TestURLize(t *testing.T) { @@ -193,7 +192,6 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, canonify bool expected string }{ - // Issue 9994 {"/foo/bar", "https://example.org/foo/", false, "MULTI/foo/bar"}, {"foo/bar", "https://example.org/foo/", false, "/fooMULTI/foo/bar"}, @@ -211,7 +209,7 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, {"test/", "http://base/sub/", false, "/subMULTI/test/"}, {"/test/", "http://base/sub/", true, "MULTI/test/"}, {"", "http://base/ace/", false, "/aceMULTI/"}, - {"", "http://base/ace", false, "/aceMULTI"}, + {"", "http://base/ace", false, "/aceMULTI/"}, {"http://abs", "http://base/", false, "http://abs"}, {"//schemaless", "http://base/", false, "//schemaless"}, } @@ -231,7 +229,6 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, for i, test := range tests { c.Run(fmt.Sprintf("%v/defaultInSubDir=%t;addLanguage=%t;multilingual=%t/%s", test, defaultInSubDir, addLanguage, multilingual, lang), func(c *qt.C) { - v.Set("baseURL", test.baseURL) v.Set("canonifyURLs", test.canonify) defaultContentLanguage := lang @@ -255,36 +252,6 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, c.Assert(output, qt.Equals, expected, qt.Commentf("[%d] %s", i, test.input)) }) - - } -} - -func TestSanitizeURL(t *testing.T) { - tests := []struct { - input string - expected string - }{ - {"http://foo.bar/", "http://foo.bar"}, - {"http://foo.bar", "http://foo.bar"}, // issue #1105 - {"http://foo.bar/zoo/", "http://foo.bar/zoo"}, // issue #931 - } - - for i, test := range tests { - o1 := helpers.SanitizeURL(test.input) - o2 := helpers.SanitizeURLKeepTrailingSlash(test.input) - - expected2 := test.expected - - if strings.HasSuffix(test.input, "/") && !strings.HasSuffix(expected2, "/") { - expected2 += "/" - } - - if o1 != test.expected { - t.Errorf("[%d] 1: Expected %#v, got %#v\n", i, test.expected, o1) - } - if o2 != expected2 { - t.Errorf("[%d] 2: Expected %#v, got %#v\n", i, expected2, o2) - } } } diff --git a/htesting/test_helpers.go b/htesting/test_helpers.go index 21b4b831e..ff14de58d 100644 --- a/htesting/test_helpers.go +++ b/htesting/test_helpers.go @@ -20,8 +20,11 @@ import ( "runtime" "strconv" "strings" + "testing" "time" + qt "github.com/frankban/quicktest" + "github.com/spf13/afero" ) @@ -124,6 +127,11 @@ func GoMinorVersion() int { return extractMinorVersionFromGoTag(runtime.Version()) } +// IsWindows reports whether this runs on Windows. +func IsWindows() bool { + return runtime.GOOS == "windows" +} + var goMinorVersionRe = regexp.MustCompile(`go1.(\d*)`) func extractMinorVersionFromGoTag(tag string) int { @@ -140,5 +148,33 @@ func extractMinorVersionFromGoTag(tag string) int { // a commit hash, not useful. return -1 +} + +// NewPinnedRunner creates a new runner that will only Run tests matching the given regexp. +// This is added mostly to use in combination with https://marketplace.visualstudio.com/items?itemName=windmilleng.vscode-go-autotest +func NewPinnedRunner(t testing.TB, pinnedTestRe string) *PinnedRunner { + if pinnedTestRe == "" { + pinnedTestRe = ".*" + } + pinnedTestRe = strings.ReplaceAll(pinnedTestRe, "_", " ") + re := regexp.MustCompile("(?i)" + pinnedTestRe) + return &PinnedRunner{ + c: qt.New(t), + re: re, + } +} + +type PinnedRunner struct { + c *qt.C + re *regexp.Regexp +} +func (r *PinnedRunner) Run(name string, f func(c *qt.C)) bool { + if !r.re.MatchString(name) { + if IsGitHubAction() { + r.c.Fatal("found pinned test when running in CI") + } + return true + } + return r.c.Run(name, f) } diff --git a/hugofs/component_fs.go b/hugofs/component_fs.go new file mode 100644 index 000000000..c55f15957 --- /dev/null +++ b/hugofs/component_fs.go @@ -0,0 +1,284 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugofs + +import ( + iofs "io/fs" + "os" + "path" + "runtime" + "sort" + + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hstrings" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs/files" + "github.com/spf13/afero" + "golang.org/x/text/unicode/norm" +) + +// NewComponentFs creates a new component filesystem. +func NewComponentFs(opts ComponentFsOptions) *componentFs { + if opts.Component == "" { + panic("ComponentFsOptions.PathParser.Component must be set") + } + if opts.Fs == nil { + panic("ComponentFsOptions.Fs must be set") + } + bfs := NewBasePathFs(opts.Fs, opts.Component) + return &componentFs{Fs: bfs, opts: opts} +} + +var _ FilesystemUnwrapper = (*componentFs)(nil) + +// componentFs is a filesystem that holds one of the Hugo components, e.g. content, layouts etc. +type componentFs struct { + afero.Fs + + opts ComponentFsOptions +} + +func (fs *componentFs) UnwrapFilesystem() afero.Fs { + return fs.Fs +} + +type componentFsDir struct { + *noOpRegularFileOps + DirOnlyOps + name string // the name passed to Open + fs *componentFs +} + +// ReadDir reads count entries from this virtual directorie and +// sorts the entries according to the component filesystem rules. +func (f *componentFsDir) ReadDir(count int) ([]iofs.DirEntry, error) { + fis, err := f.DirOnlyOps.(iofs.ReadDirFile).ReadDir(-1) + if err != nil { + return nil, err + } + + // Filter out any symlinks. + n := 0 + for _, fi := range fis { + // IsDir will always be false for symlinks. + keep := fi.IsDir() + if !keep { + // This is unfortunate, but is the only way to determine if it is a symlink. + info, err := fi.Info() + if err != nil { + if herrors.IsNotExist(err) { + continue + } + return nil, err + } + if info.Mode()&os.ModeSymlink == 0 { + keep = true + } + } + if keep { + fis[n] = fi + n++ + } + } + + fis = fis[:n] + + for _, fi := range fis { + s := path.Join(f.name, fi.Name()) + _ = f.fs.applyMeta(fi, s) + + } + + sort.Slice(fis, func(i, j int) bool { + fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo) + if fimi.IsDir() != fimj.IsDir() { + return fimi.IsDir() + } + fimim, fimjm := fimi.Meta(), fimj.Meta() + + if fimim.ModuleOrdinal != fimjm.ModuleOrdinal { + switch f.fs.opts.Component { + case files.ComponentFolderI18n: + // The way the language files gets loaded means that + // we need to provide the least important files first (e.g. the theme files). + return fimim.ModuleOrdinal > fimjm.ModuleOrdinal + default: + return fimim.ModuleOrdinal < fimjm.ModuleOrdinal + } + } + + pii, pij := fimim.PathInfo, fimjm.PathInfo + if pii != nil { + basei, basej := pii.Base(), pij.Base() + exti, extj := pii.Ext(), pij.Ext() + if f.fs.opts.Component == files.ComponentFolderContent { + // Pull bundles to the top. + if pii.IsBundle() != pij.IsBundle() { + return pii.IsBundle() + } + } + + if exti != extj { + // This pulls .md above .html. + return exti > extj + } + + if basei != basej { + return basei < basej + } + } + + if fimim.Weight != fimjm.Weight { + return fimim.Weight > fimjm.Weight + } + + return fimi.Name() < fimj.Name() + }) + + if f.fs.opts.Component == files.ComponentFolderContent { + // Finally filter out any duplicate content files, e.g. page.md and page.html. + n := 0 + seen := map[hstrings.Tuple]bool{} + for _, fi := range fis { + fim := fi.(FileMetaInfo) + pi := fim.Meta().PathInfo + keep := fim.IsDir() || !pi.IsContent() + + if !keep { + baseLang := hstrings.Tuple{First: pi.Base(), Second: fim.Meta().Lang} + if !seen[baseLang] { + keep = true + seen[baseLang] = true + } + } + + if keep { + fis[n] = fi + n++ + } + } + + fis = fis[:n] + } + + return fis, nil +} + +func (f *componentFsDir) Stat() (iofs.FileInfo, error) { + fi, err := f.DirOnlyOps.Stat() + if err != nil { + return nil, err + } + return f.fs.applyMeta(fi, f.name), nil +} + +func (fs *componentFs) Stat(name string) (os.FileInfo, error) { + fi, err := fs.Fs.Stat(name) + if err != nil { + return nil, err + } + return fs.applyMeta(fi, name), nil +} + +func (fs *componentFs) applyMeta(fi FileNameIsDir, name string) FileMetaInfo { + if runtime.GOOS == "darwin" { + name = norm.NFC.String(name) + } + fim := fi.(FileMetaInfo) + meta := fim.Meta() + meta.PathInfo = fs.opts.PathParser.Parse(fs.opts.Component, name) + if !fim.IsDir() { + if fileLang := meta.PathInfo.Lang(); fileLang != "" { + // A valid lang set in filename. + // Give priority to myfile.sv.txt inside the sv filesystem. + meta.Weight++ + meta.Lang = fileLang + } + } + + if meta.Lang == "" { + meta.Lang = fs.opts.DefaultContentLanguage + } + + langIdx, found := fs.opts.PathParser.LanguageIndex[meta.Lang] + if !found { + panic("no language found for " + meta.Lang) + } + meta.LangIndex = langIdx + + if fi.IsDir() { + meta.OpenFunc = func() (afero.File, error) { + return fs.Open(name) + } + } + + return fim +} + +func (f *componentFsDir) Readdir(count int) ([]os.FileInfo, error) { + panic("not supported: Use ReadDir") +} + +func (f *componentFsDir) Readdirnames(count int) ([]string, error) { + dirsi, err := f.DirOnlyOps.(iofs.ReadDirFile).ReadDir(count) + if err != nil { + return nil, err + } + + dirs := make([]string, len(dirsi)) + for i, d := range dirsi { + dirs[i] = d.Name() + } + return dirs, nil +} + +type ComponentFsOptions struct { + // The filesystem where one or more components are mounted. + Fs afero.Fs + + // The component name, e.g. "content", "layouts" etc. + Component string + + DefaultContentLanguage string + + // The parser used to parse paths provided by this filesystem. + PathParser paths.PathParser +} + +func (fs *componentFs) Open(name string) (afero.File, error) { + f, err := fs.Fs.Open(name) + if err != nil { + return nil, err + } + + fi, err := f.Stat() + if err != nil { + if err != errIsDir { + f.Close() + return nil, err + } + } else if !fi.IsDir() { + return f, nil + } + + return &componentFsDir{ + DirOnlyOps: f, + name: name, + fs: fs, + }, nil +} + +func (fs *componentFs) ReadDir(name string) ([]os.FileInfo, error) { + panic("not implemented") +} diff --git a/hugofs/decorators.go b/hugofs/decorators.go index 47b4266df..405c81ce4 100644 --- a/hugofs/decorators.go +++ b/hugofs/decorators.go @@ -15,63 +15,25 @@ package hugofs import ( "fmt" + "io/fs" "os" "path/filepath" - "strings" - "github.com/gohugoio/hugo/common/herrors" "github.com/spf13/afero" ) -var ( - _ FilesystemUnwrapper = (*baseFileDecoratorFs)(nil) -) +var _ FilesystemUnwrapper = (*baseFileDecoratorFs)(nil) func decorateDirs(fs afero.Fs, meta *FileMeta) afero.Fs { ffs := &baseFileDecoratorFs{Fs: fs} - decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { + decorator := func(fi FileNameIsDir, name string) (FileNameIsDir, error) { if !fi.IsDir() { // Leave regular files as they are. return fi, nil } - return decorateFileInfo(fi, fs, nil, "", "", meta), nil - } - - ffs.decorate = decorator - - return ffs -} - -func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs { - ffs := &baseFileDecoratorFs{Fs: fs} - - decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { - path := createPath(name) - - return decorateFileInfo(fi, fs, nil, "", path, nil), nil - } - - ffs.decorate = decorator - - return ffs -} - -// DecorateBasePathFs adds Path info to files and directories in the -// provided BasePathFs, using the base as base. -func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs { - basePath, _ := base.RealPath("") - if !strings.HasSuffix(basePath, filepathSeparator) { - basePath += filepathSeparator - } - - ffs := &baseFileDecoratorFs{Fs: base} - - decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { - path := strings.TrimPrefix(name, basePath) - - return decorateFileInfo(fi, base, nil, "", path, nil), nil + return decorateFileInfo(fi, nil, "", meta), nil } ffs.decorate = decorator @@ -84,7 +46,7 @@ func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs { func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs { ffs := &baseFileDecoratorFs{Fs: fs} - decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) { + decorator := func(fi FileNameIsDir, filename string) (FileNameIsDir, error) { // Store away the original in case it's a symlink. meta := NewFileMeta() meta.Name = fi.Name() @@ -92,38 +54,24 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero if fi.IsDir() { meta.JoinStatFunc = func(name string) (FileMetaInfo, error) { joinedFilename := filepath.Join(filename, name) - fi, _, err := lstatIfPossible(fs, joinedFilename) + fi, err := fs.Stat(joinedFilename) if err != nil { return nil, err } - - fi, err = ffs.decorate(fi, joinedFilename) + fim, err := ffs.decorate(fi, joinedFilename) if err != nil { return nil, err } - return fi.(FileMetaInfo), nil - } - } - - isSymlink := isSymlink(fi) - if isSymlink { - meta.OriginalFilename = filename - var link string - var err error - link, fi, err = evalSymlinks(fs, filename) - if err != nil { - return nil, err + return fim.(FileMetaInfo), nil } - filename = link - meta.IsSymlink = true } opener := func() (afero.File, error) { return ffs.open(filename) } - fim := decorateFileInfo(fi, ffs, opener, filename, "", meta) + fim := decorateFileInfo(fi, opener, filename, meta) for _, cb := range callbacks { cb(fim) @@ -136,23 +84,9 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero return ffs } -func evalSymlinks(fs afero.Fs, filename string) (string, os.FileInfo, error) { - link, err := filepath.EvalSymlinks(filename) - if err != nil { - return "", nil, err - } - - fi, err := fs.Stat(link) - if err != nil { - return "", nil, err - } - - return link, fi, nil -} - type baseFileDecoratorFs struct { afero.Fs - decorate func(fi os.FileInfo, filename string) (os.FileInfo, error) + decorate func(fi FileNameIsDir, name string) (FileNameIsDir, error) } func (fs *baseFileDecoratorFs) UnwrapFilesystem() afero.Fs { @@ -165,29 +99,11 @@ func (fs *baseFileDecoratorFs) Stat(name string) (os.FileInfo, error) { return nil, err } - return fs.decorate(fi, name) -} - -func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - var ( - fi os.FileInfo - err error - ok bool - ) - - if lstater, isLstater := fs.Fs.(afero.Lstater); isLstater { - fi, ok, err = lstater.LstatIfPossible(name) - } else { - fi, err = fs.Fs.Stat(name) - } - + fim, err := fs.decorate(fi, name) if err != nil { - return nil, false, err + return nil, err } - - fi, err = fs.decorate(fi, name) - - return fi, ok, err + return fim.(os.FileInfo), nil } func (fs *baseFileDecoratorFs) Open(name string) (afero.File, error) { @@ -207,35 +123,32 @@ type baseFileDecoratorFile struct { fs *baseFileDecoratorFs } -func (l *baseFileDecoratorFile) Readdir(c int) (ofi []os.FileInfo, err error) { - dirnames, err := l.File.Readdirnames(c) +func (l *baseFileDecoratorFile) ReadDir(n int) ([]fs.DirEntry, error) { + fis, err := l.File.(fs.ReadDirFile).ReadDir(-1) if err != nil { return nil, err } - fisp := make([]os.FileInfo, 0, len(dirnames)) + fisp := make([]fs.DirEntry, len(fis)) - for _, dirname := range dirnames { - filename := dirname - - if l.Name() != "" && l.Name() != filepathSeparator { - filename = filepath.Join(l.Name(), dirname) + for i, fi := range fis { + filename := fi.Name() + if l.Name() != "" { + filename = filepath.Join(l.Name(), fi.Name()) } - // We need to resolve any symlink info. - fi, _, err := lstatIfPossible(l.fs.Fs, filename) - if err != nil { - if herrors.IsNotExist(err) { - continue - } - return nil, err - } - fi, err = l.fs.decorate(fi, filename) + fid, err := l.fs.decorate(fi, filename) if err != nil { return nil, fmt.Errorf("decorate: %w", err) } - fisp = append(fisp, fi) + + fisp[i] = fid.(fs.DirEntry) + } return fisp, err } + +func (l *baseFileDecoratorFile) Readdir(c int) (ofi []os.FileInfo, err error) { + panic("not supported: Use ReadDir") +} diff --git a/hugofs/language_merge.go b/hugofs/dirsmerger.go index a2fa411a9..392353e27 100644 --- a/hugofs/language_merge.go +++ b/hugofs/dirsmerger.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,12 +14,14 @@ package hugofs import ( - "os" + "io/fs" + + "github.com/bep/overlayfs" ) // LanguageDirsMerger implements the overlayfs.DirsMerger func, which is used // to merge two directories. -var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) []os.FileInfo { +var LanguageDirsMerger overlayfs.DirsMerger = func(lofi, bofi []fs.DirEntry) []fs.DirEntry { for _, fi1 := range bofi { fim1 := fi1.(FileMetaInfo) var found bool @@ -37,3 +39,27 @@ var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) []os.FileInfo { return lofi } + +// AppendDirsMerger merges two directories keeping all regular files +// with the first slice as the base. +// Duplicate directories in the secnond slice will be ignored. +// This strategy is used for the i18n and data fs where we need all entries. +var AppendDirsMerger overlayfs.DirsMerger = func(lofi, bofi []fs.DirEntry) []fs.DirEntry { + for _, fi1 := range bofi { + var found bool + // Remove duplicate directories. + if fi1.IsDir() { + for _, fi2 := range lofi { + if fi2.IsDir() && fi2.Name() == fi1.Name() { + found = true + break + } + } + } + if !found { + lofi = append(lofi, fi1) + } + } + + return lofi +} diff --git a/hugofs/fileinfo.go b/hugofs/fileinfo.go index 773352ea8..6d6122c0c 100644 --- a/hugofs/fileinfo.go +++ b/hugofs/fileinfo.go @@ -16,21 +16,25 @@ package hugofs import ( "errors" + "fmt" + "io" + "io/fs" "os" "path/filepath" "reflect" "runtime" "sort" - "strings" + "sync" "time" "github.com/gohugoio/hugo/hugofs/glob" - "github.com/gohugoio/hugo/hugofs/files" "golang.org/x/text/unicode/norm" + "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hreflect" "github.com/gohugoio/hugo/common/htime" + "github.com/gohugoio/hugo/common/paths" "github.com/spf13/afero" ) @@ -39,48 +43,37 @@ func NewFileMeta() *FileMeta { return &FileMeta{} } -// PathFile returns the relative file path for the file source. -func (f *FileMeta) PathFile() string { - if f.BaseDir == "" { - return "" - } - return strings.TrimPrefix(strings.TrimPrefix(f.Filename, f.BaseDir), filepathSeparator) -} - type FileMeta struct { - Name string - Filename string - Path string - PathWalk string - OriginalFilename string - BaseDir string - - SourceRoot string - MountRoot string - Module string - - Weight int - IsOrdered bool - IsSymlink bool - IsRootFile bool - IsProject bool - Watch bool - - Classifier files.ContentClass - - SkipDir bool - - Lang string - TranslationBaseName string - TranslationBaseNameWithExt string - Translations []string - - Fs afero.Fs + PathInfo *paths.Path + Name string + Filename string + + BaseDir string + SourceRoot string + Module string + ModuleOrdinal int + Component string + + Weight int + IsProject bool + Watch bool + + // The lang associated with this file. This may be + // either the language set in the filename or + // the language defined in the source mount configuration. + Lang string + // The language index for the above lang. This is the index + // in the sorted list of languages/sites. + LangIndex int + OpenFunc func() (afero.File, error) JoinStatFunc func(name string) (FileMetaInfo, error) // Include only files or directories that match. InclusionFilter *glob.FilenameFilter + + // Rename the name part of the file (not the directory). + Rename func(name string, toFrom bool) string } func (m *FileMeta) Copy() *FileMeta { @@ -120,6 +113,15 @@ func (f *FileMeta) Open() (afero.File, error) { return f.OpenFunc() } +func (f *FileMeta) ReadAll() ([]byte, error) { + file, err := f.Open() + if err != nil { + return nil, err + } + defer file.Close() + return io.ReadAll(file) +} + func (f *FileMeta) JoinStat(name string) (FileMetaInfo, error) { if f.JoinStatFunc == nil { return nil, os.ErrNotExist @@ -128,50 +130,123 @@ func (f *FileMeta) JoinStat(name string) (FileMetaInfo, error) { } type FileMetaInfo interface { - os.FileInfo - // Meta is for internal use. + fs.DirEntry + MetaProvider + + // This is a real hybrid as it also implements the fs.FileInfo interface. + FileInfoOptionals +} + +type MetaProvider interface { Meta() *FileMeta } -type fileInfoMeta struct { - os.FileInfo +type FileInfoOptionals interface { + Size() int64 + Mode() fs.FileMode + ModTime() time.Time + Sys() any +} - m *FileMeta +type FileNameIsDir interface { + Name() string + IsDir() bool } -type filenameProvider interface { - Filename() string +type FileInfoProvider interface { + FileInfo() FileMetaInfo } -var _ filenameProvider = (*fileInfoMeta)(nil) +// DirOnlyOps is a subset of the afero.File interface covering +// the methods needed for directory operations. +type DirOnlyOps interface { + io.Closer + Name() string + Readdir(count int) ([]os.FileInfo, error) + Readdirnames(n int) ([]string, error) + Stat() (os.FileInfo, error) +} + +type dirEntryMeta struct { + fs.DirEntry + m *FileMeta + + fi fs.FileInfo + fiInit sync.Once +} + +func (fi *dirEntryMeta) Meta() *FileMeta { + return fi.m +} // Filename returns the full filename. -func (fi *fileInfoMeta) Filename() string { +func (fi *dirEntryMeta) Filename() string { return fi.m.Filename } -// Name returns the file's name. Note that we follow symlinks, -// if supported by the file system, and the Name given here will be the -// name of the symlink, which is what Hugo needs in all situations. -func (fi *fileInfoMeta) Name() string { +func (fi *dirEntryMeta) fileInfo() fs.FileInfo { + var err error + fi.fiInit.Do(func() { + fi.fi, err = fi.DirEntry.Info() + }) + if err != nil { + panic(err) + } + return fi.fi +} + +func (fi *dirEntryMeta) Size() int64 { + return fi.fileInfo().Size() +} + +func (fi *dirEntryMeta) Mode() fs.FileMode { + return fi.fileInfo().Mode() +} + +func (fi *dirEntryMeta) ModTime() time.Time { + return fi.fileInfo().ModTime() +} + +func (fi *dirEntryMeta) Sys() any { + return fi.fileInfo().Sys() +} + +// Name returns the file's name. +func (fi *dirEntryMeta) Name() string { if name := fi.m.Name; name != "" { return name } - return fi.FileInfo.Name() + return fi.DirEntry.Name() } -func (fi *fileInfoMeta) Meta() *FileMeta { - return fi.m +// dirEntry is an adapter from os.FileInfo to fs.DirEntry +type dirEntry struct { + fs.FileInfo } -func NewFileMetaInfo(fi os.FileInfo, m *FileMeta) FileMetaInfo { +var _ fs.DirEntry = dirEntry{} + +func (d dirEntry) Type() fs.FileMode { return d.FileInfo.Mode().Type() } + +func (d dirEntry) Info() (fs.FileInfo, error) { return d.FileInfo, nil } + +func NewFileMetaInfo(fi FileNameIsDir, m *FileMeta) FileMetaInfo { if m == nil { panic("FileMeta must be set") } - if fim, ok := fi.(FileMetaInfo); ok { + if fim, ok := fi.(MetaProvider); ok { m.Merge(fim.Meta()) } - return &fileInfoMeta{FileInfo: fi, m: m} + switch v := fi.(type) { + case fs.DirEntry: + return &dirEntryMeta{DirEntry: v, m: m} + case fs.FileInfo: + return &dirEntryMeta{DirEntry: dirEntry{v}, m: m} + case nil: + return &dirEntryMeta{DirEntry: dirEntry{}, m: m} + default: + panic(fmt.Sprintf("Unsupported type: %T", fi)) + } } type dirNameOnlyFileInfo struct { @@ -212,7 +287,6 @@ func newDirNameOnlyFileInfo(name string, meta *FileMeta, fileOpener func() (afer m.Filename = name } m.OpenFunc = fileOpener - m.IsOrdered = false return NewFileMetaInfo( &dirNameOnlyFileInfo{name: base, modTime: htime.Now()}, @@ -220,16 +294,10 @@ func newDirNameOnlyFileInfo(name string, meta *FileMeta, fileOpener func() (afer ) } -func decorateFileInfo( - fi os.FileInfo, - fs afero.Fs, opener func() (afero.File, error), - filename, filepath string, inMeta *FileMeta, -) FileMetaInfo { +func decorateFileInfo(fi FileNameIsDir, opener func() (afero.File, error), filename string, inMeta *FileMeta) FileMetaInfo { var meta *FileMeta var fim FileMetaInfo - filepath = strings.TrimPrefix(filepath, filepathSeparator) - var ok bool if fim, ok = fi.(FileMetaInfo); ok { meta = fim.Meta() @@ -241,14 +309,8 @@ func decorateFileInfo( if opener != nil { meta.OpenFunc = opener } - if fs != nil { - meta.Fs = fs - } - nfilepath := normalizeFilename(filepath) + nfilename := normalizeFilename(filename) - if nfilepath != "" { - meta.Path = nfilepath - } if nfilename != "" { meta.Filename = nfilename } @@ -258,14 +320,11 @@ func decorateFileInfo( return fim } -func isSymlink(fi os.FileInfo) bool { - return fi != nil && fi.Mode()&os.ModeSymlink == os.ModeSymlink -} - -func fileInfosToFileMetaInfos(fis []os.FileInfo) []FileMetaInfo { +func DirEntriesToFileMetaInfos(fis []fs.DirEntry) []FileMetaInfo { fims := make([]FileMetaInfo, len(fis)) for i, v := range fis { - fims[i] = v.(FileMetaInfo) + fim := v.(FileMetaInfo) + fims[i] = fim } return fims } @@ -281,17 +340,49 @@ func normalizeFilename(filename string) string { return filename } -func fileInfosToNames(fis []os.FileInfo) []string { - names := make([]string, len(fis)) - for i, d := range fis { - names[i] = d.Name() - } - return names -} - -func sortFileInfos(fis []os.FileInfo) { +func sortDirEntries(fis []fs.DirEntry) { sort.Slice(fis, func(i, j int) bool { fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo) return fimi.Meta().Filename < fimj.Meta().Filename }) } + +// AddFileInfoToError adds file info to the given error. +func AddFileInfoToError(err error, fi FileMetaInfo, fs afero.Fs) error { + if err == nil { + return nil + } + + meta := fi.Meta() + filename := meta.Filename + + // Check if it's already added. + for _, ferr := range herrors.UnwrapFileErrors(err) { + pos := ferr.Position() + errfilename := pos.Filename + if errfilename == "" { + pos.Filename = filename + ferr.UpdatePosition(pos) + } + + if errfilename == "" || errfilename == filename { + if filename != "" && ferr.ErrorContext() == nil { + f, ioerr := fs.Open(filename) + if ioerr != nil { + return err + } + defer f.Close() + ferr.UpdateContent(f, nil) + } + return err + } + } + + lineMatcher := herrors.NopLineMatcher + + if textSegmentErr, ok := err.(*herrors.TextSegmentError); ok { + lineMatcher = herrors.ContainsMatcher(textSegmentErr.Segment) + } + + return herrors.NewFileErrorFromFile(err, filename, fs, lineMatcher) +} diff --git a/hugofs/fileinfo_test.go b/hugofs/fileinfo_test.go index 8d6a2ff7a..715798b34 100644 --- a/hugofs/fileinfo_test.go +++ b/hugofs/fileinfo_test.go @@ -25,7 +25,6 @@ func TestFileMeta(t *testing.T) { c.Run("Merge", func(c *qt.C) { src := &FileMeta{ Filename: "fs1", - Path: "ps1", } dst := &FileMeta{ Filename: "fd1", @@ -33,19 +32,16 @@ func TestFileMeta(t *testing.T) { dst.Merge(src) - c.Assert(dst.Path, qt.Equals, "ps1") c.Assert(dst.Filename, qt.Equals, "fd1") }) c.Run("Copy", func(c *qt.C) { src := &FileMeta{ Filename: "fs1", - Path: "ps1", } dst := src.Copy() c.Assert(dst, qt.Not(qt.Equals), src) c.Assert(dst, qt.DeepEquals, src) }) - } diff --git a/hugofs/filename_filter_fs.go b/hugofs/filename_filter_fs.go index c101309c2..5bae4b876 100644 --- a/hugofs/filename_filter_fs.go +++ b/hugofs/filename_filter_fs.go @@ -14,6 +14,7 @@ package hugofs import ( + "io/fs" "os" "strings" "syscall" @@ -45,17 +46,6 @@ func (fs *filenameFilterFs) UnwrapFilesystem() afero.Fs { return fs.fs } -func (fs *filenameFilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fi, b, err := fs.fs.(afero.Lstater).LstatIfPossible(name) - if err != nil { - return nil, false, err - } - if !fs.filter.Match(name, fi.IsDir()) { - return nil, false, os.ErrNotExist - } - return fi, b, nil -} - func (fs *filenameFilterFs) Open(name string) (afero.File, error) { fi, err := fs.fs.Stat(name) if err != nil { @@ -87,8 +77,14 @@ func (fs *filenameFilterFs) OpenFile(name string, flag int, perm os.FileMode) (a } func (fs *filenameFilterFs) Stat(name string) (os.FileInfo, error) { - fi, _, err := fs.LstatIfPossible(name) - return fi, err + fi, err := fs.fs.Stat(name) + if err != nil { + return nil, err + } + if !fs.filter.Match(name, fi.IsDir()) { + return nil, os.ErrNotExist + } + return fi, nil } type filenameFilterDir struct { @@ -97,31 +93,35 @@ type filenameFilterDir struct { filter *glob.FilenameFilter } -func (f *filenameFilterDir) Readdir(count int) ([]os.FileInfo, error) { - fis, err := f.File.Readdir(-1) +func (f *filenameFilterDir) ReadDir(n int) ([]fs.DirEntry, error) { + des, err := f.File.(fs.ReadDirFile).ReadDir(n) if err != nil { return nil, err } - - var result []os.FileInfo - for _, fi := range fis { - fim := fi.(FileMetaInfo) - if f.filter.Match(strings.TrimPrefix(fim.Meta().Filename, f.base), fim.IsDir()) { - result = append(result, fi) + i := 0 + for _, de := range des { + fim := de.(FileMetaInfo) + rel := strings.TrimPrefix(fim.Meta().Filename, f.base) + if f.filter.Match(rel, de.IsDir()) { + des[i] = de + i++ } } + return des[:i], nil +} - return result, nil +func (f *filenameFilterDir) Readdir(count int) ([]os.FileInfo, error) { + panic("not supported: Use ReadDir") } func (f *filenameFilterDir) Readdirnames(count int) ([]string, error) { - dirsi, err := f.Readdir(count) + des, err := f.ReadDir(count) if err != nil { return nil, err } - dirs := make([]string, len(dirsi)) - for i, d := range dirsi { + dirs := make([]string, len(des)) + for i, d := range des { dirs[i] = d.Name() } return dirs, nil diff --git a/hugofs/filename_filter_fs_test.go b/hugofs/filename_filter_fs_test.go index b3e97a6a6..7b31f0f82 100644 --- a/hugofs/filename_filter_fs_test.go +++ b/hugofs/filename_filter_fs_test.go @@ -36,12 +36,12 @@ func TestFilenameFilterFs(t *testing.T) { for _, letter := range []string{"a", "b", "c"} { for i := 1; i <= 3; i++ { - c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.txt", i)), []byte("some text file for"+letter), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.json", i)), []byte("some json file for"+letter), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.txt", i)), []byte("some text file for"+letter), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.json", i)), []byte("some json file for"+letter), 0o755), qt.IsNil) } } - fs = afero.NewBasePathFs(fs, base) + fs = NewBasePathFs(fs, base) filter, err := glob.NewFilenameFilter(nil, []string{"/b/**.txt"}) c.Assert(err, qt.IsNil) @@ -69,15 +69,16 @@ func TestFilenameFilterFs(t *testing.T) { assertExists("/b/my1.txt", false) dirB, err := fs.Open("/b") - defer dirB.Close() c.Assert(err, qt.IsNil) + defer dirB.Close() dirBEntries, err := dirB.Readdirnames(-1) + c.Assert(err, qt.IsNil) c.Assert(dirBEntries, qt.DeepEquals, []string{"my1.json", "my2.json", "my3.json"}) dirC, err := fs.Open("/c") - defer dirC.Close() c.Assert(err, qt.IsNil) + defer dirC.Close() dirCEntries, err := dirC.Readdirnames(-1) + c.Assert(err, qt.IsNil) c.Assert(dirCEntries, qt.DeepEquals, []string{"my1.json", "my1.txt", "my2.json", "my2.txt", "my3.json", "my3.txt"}) - } diff --git a/hugofs/files/classifier.go b/hugofs/files/classifier.go index bdac2d686..a8d231f73 100644 --- a/hugofs/files/classifier.go +++ b/hugofs/files/classifier.go @@ -14,16 +14,10 @@ package files import ( - "bufio" - "fmt" - "io" "os" "path/filepath" "sort" "strings" - "unicode" - - "github.com/spf13/afero" ) const ( @@ -80,99 +74,14 @@ func IsIndexContentFile(filename string) bool { return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.") } -func IsHTMLFile(filename string) bool { - return htmlFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")] +func IsHTML(ext string) bool { + return htmlFileExtensionsSet[ext] } func IsContentExt(ext string) bool { return contentFileExtensionsSet[ext] } -type ContentClass string - -const ( - ContentClassLeaf ContentClass = "leaf" - ContentClassBranch ContentClass = "branch" - ContentClassFile ContentClass = "zfile" // Sort below - ContentClassContent ContentClass = "zcontent" -) - -func (c ContentClass) IsBundle() bool { - return c == ContentClassLeaf || c == ContentClassBranch -} - -func ClassifyContentFile(filename string, open func() (afero.File, error)) ContentClass { - if !IsContentFile(filename) { - return ContentClassFile - } - - if IsHTMLFile(filename) { - // We need to look inside the file. If the first non-whitespace - // character is a "<", then we treat it as a regular file. - // Eearlier we created pages for these files, but that had all sorts - // of troubles, and isn't what it says in the documentation. - // See https://github.com/gohugoio/hugo/issues/7030 - if open == nil { - panic(fmt.Sprintf("no file opener provided for %q", filename)) - } - - f, err := open() - if err != nil { - return ContentClassFile - } - ishtml := isHTMLContent(f) - f.Close() - if ishtml { - return ContentClassFile - } - - } - - if strings.HasPrefix(filename, "_index.") { - return ContentClassBranch - } - - if strings.HasPrefix(filename, "index.") { - return ContentClassLeaf - } - - return ContentClassContent -} - -var htmlComment = []rune{'<', '!', '-', '-'} - -func isHTMLContent(r io.Reader) bool { - br := bufio.NewReader(r) - i := 0 - for { - c, _, err := br.ReadRune() - if err != nil { - break - } - - if i > 0 { - if i >= len(htmlComment) { - return false - } - - if c != htmlComment[i] { - return true - } - - i++ - continue - } - - if !unicode.IsSpace(c) { - if i == 0 && c != '<' { - return false - } - i++ - } - } - return true -} - const ( ComponentFolderArchetypes = "archetypes" ComponentFolderStatic = "static" diff --git a/hugofs/files/classifier_test.go b/hugofs/files/classifier_test.go index 84036b870..f2fad56ca 100644 --- a/hugofs/files/classifier_test.go +++ b/hugofs/files/classifier_test.go @@ -15,7 +15,6 @@ package files import ( "path/filepath" - "strings" "testing" qt "github.com/frankban/quicktest" @@ -31,16 +30,6 @@ func TestIsContentFile(t *testing.T) { c.Assert(IsContentExt("json"), qt.Equals, false) } -func TestIsHTMLContent(t *testing.T) { - c := qt.New(t) - - c.Assert(isHTMLContent(strings.NewReader(" <html>")), qt.Equals, true) - c.Assert(isHTMLContent(strings.NewReader(" <!--\n---")), qt.Equals, false) - c.Assert(isHTMLContent(strings.NewReader(" <!--")), qt.Equals, true) - c.Assert(isHTMLContent(strings.NewReader(" ---<")), qt.Equals, false) - c.Assert(isHTMLContent(strings.NewReader(" foo <")), qt.Equals, false) -} - func TestComponentFolders(t *testing.T) { c := qt.New(t) diff --git a/hugofs/filter_fs.go b/hugofs/filter_fs.go deleted file mode 100644 index 1b020738a..000000000 --- a/hugofs/filter_fs.go +++ /dev/null @@ -1,344 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugofs - -import ( - "fmt" - "io" - "os" - "path/filepath" - "sort" - "strings" - "syscall" - "time" - - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/spf13/afero" -) - -var ( - _ afero.Fs = (*FilterFs)(nil) - _ afero.Lstater = (*FilterFs)(nil) - _ afero.File = (*filterDir)(nil) -) - -func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) { - applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) { - for i, fi := range fis { - if fi.IsDir() { - filename := filepath.Join(name, fi.Name()) - fis[i] = decorateFileInfo(fi, fs, fs.getOpener(filename), "", "", nil) - continue - } - - meta := fi.(FileMetaInfo).Meta() - lang := meta.Lang - - fileLang, translationBaseName, translationBaseNameWithExt := langInfoFrom(langs, fi.Name()) - weight := meta.Weight - - if fileLang != "" { - if fileLang == lang { - // Give priority to myfile.sv.txt inside the sv filesystem. - weight++ - } - lang = fileLang - } - - fim := NewFileMetaInfo( - fi, - &FileMeta{ - Lang: lang, - Weight: weight, - TranslationBaseName: translationBaseName, - TranslationBaseNameWithExt: translationBaseNameWithExt, - Classifier: files.ClassifyContentFile(fi.Name(), meta.OpenFunc), - }) - - fis[i] = fim - } - } - - all := func(fis []os.FileInfo) { - // Maps translation base name to a list of language codes. - translations := make(map[string][]string) - trackTranslation := func(meta *FileMeta) { - name := meta.TranslationBaseNameWithExt - translations[name] = append(translations[name], meta.Lang) - } - for _, fi := range fis { - if fi.IsDir() { - continue - } - meta := fi.(FileMetaInfo).Meta() - - trackTranslation(meta) - - } - - for _, fi := range fis { - fim := fi.(FileMetaInfo) - langs := translations[fim.Meta().TranslationBaseNameWithExt] - if len(langs) > 0 { - fim.Meta().Translations = sortAndRemoveStringDuplicates(langs) - } - } - } - - return &FilterFs{ - fs: fs, - applyPerSource: applyMeta, - applyAll: all, - }, nil -} - -func NewFilterFs(fs afero.Fs) (afero.Fs, error) { - applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) { - for i, fi := range fis { - if fi.IsDir() { - fis[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename), "", "", nil) - } - } - } - - ffs := &FilterFs{ - fs: fs, - applyPerSource: applyMeta, - } - - return ffs, nil -} - -var ( - _ FilesystemUnwrapper = (*FilterFs)(nil) -) - -// FilterFs is an ordered composite filesystem. -type FilterFs struct { - fs afero.Fs - - applyPerSource func(fs *FilterFs, name string, fis []os.FileInfo) - applyAll func(fis []os.FileInfo) -} - -func (fs *FilterFs) Chmod(n string, m os.FileMode) error { - return syscall.EPERM -} - -func (fs *FilterFs) Chtimes(n string, a, m time.Time) error { - return syscall.EPERM -} - -func (fs *FilterFs) Chown(n string, uid, gid int) error { - return syscall.EPERM -} - -func (fs *FilterFs) UnwrapFilesystem() afero.Fs { - return fs.fs -} - -func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fi, b, err := lstatIfPossible(fs.fs, name) - if err != nil { - return nil, false, err - } - - if fi.IsDir() { - return decorateFileInfo(fi, fs, fs.getOpener(name), "", "", nil), false, nil - } - - parent := filepath.Dir(name) - fs.applyFilters(parent, -1, fi) - - return fi, b, nil -} - -func (fs *FilterFs) Mkdir(n string, p os.FileMode) error { - return syscall.EPERM -} - -func (fs *FilterFs) MkdirAll(n string, p os.FileMode) error { - return syscall.EPERM -} - -func (fs *FilterFs) Name() string { - return "WeightedFileSystem" -} - -func (fs *FilterFs) Open(name string) (afero.File, error) { - f, err := fs.fs.Open(name) - if err != nil { - return nil, err - } - - return &filterDir{ - File: f, - ffs: fs, - }, nil -} - -func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { - return fs.fs.Open(name) -} - -func (fs *FilterFs) ReadDir(name string) ([]os.FileInfo, error) { - panic("not implemented") -} - -func (fs *FilterFs) Remove(n string) error { - return syscall.EPERM -} - -func (fs *FilterFs) RemoveAll(p string) error { - return syscall.EPERM -} - -func (fs *FilterFs) Rename(o, n string) error { - return syscall.EPERM -} - -func (fs *FilterFs) Stat(name string) (os.FileInfo, error) { - fi, _, err := fs.LstatIfPossible(name) - return fi, err -} - -func (fs *FilterFs) Create(n string) (afero.File, error) { - return nil, syscall.EPERM -} - -func (fs *FilterFs) getOpener(name string) func() (afero.File, error) { - return func() (afero.File, error) { - return fs.Open(name) - } -} - -func (fs *FilterFs) applyFilters(name string, count int, fis ...os.FileInfo) ([]os.FileInfo, error) { - if fs.applyPerSource != nil { - fs.applyPerSource(fs, name, fis) - } - - seen := make(map[string]bool) - var duplicates []int - for i, dir := range fis { - if !dir.IsDir() { - continue - } - if seen[dir.Name()] { - duplicates = append(duplicates, i) - } else { - seen[dir.Name()] = true - } - } - - // Remove duplicate directories, keep first. - if len(duplicates) > 0 { - for i := len(duplicates) - 1; i >= 0; i-- { - idx := duplicates[i] - fis = append(fis[:idx], fis[idx+1:]...) - } - } - - if fs.applyAll != nil { - fs.applyAll(fis) - } - - if count > 0 && len(fis) >= count { - return fis[:count], nil - } - - return fis, nil -} - -type filterDir struct { - afero.File - ffs *FilterFs -} - -func (f *filterDir) Readdir(count int) ([]os.FileInfo, error) { - fis, err := f.File.Readdir(-1) - if err != nil { - return nil, err - } - return f.ffs.applyFilters(f.Name(), count, fis...) -} - -func (f *filterDir) Readdirnames(count int) ([]string, error) { - dirsi, err := f.Readdir(count) - if err != nil { - return nil, err - } - - dirs := make([]string, len(dirsi)) - for i, d := range dirsi { - dirs[i] = d.Name() - } - return dirs, nil -} - -// Try to extract the language from the given filename. -// Any valid language identifier in the name will win over the -// language set on the file system, e.g. "mypost.en.md". -func langInfoFrom(languages map[string]int, name string) (string, string, string) { - var lang string - - baseName := filepath.Base(name) - ext := filepath.Ext(baseName) - translationBaseName := baseName - - if ext != "" { - translationBaseName = strings.TrimSuffix(translationBaseName, ext) - } - - fileLangExt := filepath.Ext(translationBaseName) - fileLang := strings.TrimPrefix(fileLangExt, ".") - - if _, found := languages[fileLang]; found { - lang = fileLang - translationBaseName = strings.TrimSuffix(translationBaseName, fileLangExt) - } - - translationBaseNameWithExt := translationBaseName - - if ext != "" { - translationBaseNameWithExt += ext - } - - return lang, translationBaseName, translationBaseNameWithExt -} - -func printFs(fs afero.Fs, path string, w io.Writer) { - if fs == nil { - return - } - afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error { - fmt.Println("p:::", path) - return nil - }) -} - -func sortAndRemoveStringDuplicates(s []string) []string { - ss := sort.StringSlice(s) - ss.Sort() - i := 0 - for j := 1; j < len(s); j++ { - if !ss.Less(i, j) { - continue - } - i++ - s[i] = s[j] - } - - return s[:i+1] -} diff --git a/hugofs/filter_fs_test.go b/hugofs/filter_fs_test.go deleted file mode 100644 index 524d957d6..000000000 --- a/hugofs/filter_fs_test.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugofs - -import ( - "path/filepath" - "testing" - - qt "github.com/frankban/quicktest" -) - -func TestLangInfoFrom(t *testing.T) { - langs := map[string]int{ - "sv": 10, - "en": 20, - } - - c := qt.New(t) - - tests := []struct { - input string - expected []string - }{ - {"page.sv.md", []string{"sv", "page", "page.md"}}, - {"page.en.md", []string{"en", "page", "page.md"}}, - {"page.no.md", []string{"", "page.no", "page.no.md"}}, - {filepath.FromSlash("tc-lib-color/class-Com.Tecnick.Color.Css"), []string{"", "class-Com.Tecnick.Color", "class-Com.Tecnick.Color.Css"}}, - {filepath.FromSlash("class-Com.Tecnick.Color.sv.Css"), []string{"sv", "class-Com.Tecnick.Color", "class-Com.Tecnick.Color.Css"}}, - } - - for _, test := range tests { - v1, v2, v3 := langInfoFrom(langs, test.input) - c.Assert([]string{v1, v2, v3}, qt.DeepEquals, test.expected) - } -} diff --git a/hugofs/fs.go b/hugofs/fs.go index 5b8a3adb2..fc0ea71c6 100644 --- a/hugofs/fs.go +++ b/hugofs/fs.go @@ -111,7 +111,7 @@ func newFs(source, destination afero.Fs, workingDir, publishDir string) *Fs { // If this does not exist, it will be created later. absPublishDir := paths.AbsPathify(workingDir, publishDir) - pubFs := afero.NewBasePathFs(destination, absPublishDir) + pubFs := NewBasePathFs(destination, absPublishDir) return &Fs{ Source: source, @@ -126,16 +126,16 @@ func newFs(source, destination afero.Fs, workingDir, publishDir string) *Fs { func getWorkingDirFsReadOnly(base afero.Fs, workingDir string) afero.Fs { if workingDir == "" { - return afero.NewReadOnlyFs(base) + return NewReadOnlyFs(base) } - return afero.NewBasePathFs(afero.NewReadOnlyFs(base), workingDir) + return NewBasePathFs(NewReadOnlyFs(base), workingDir) } func getWorkingDirFsWritable(base afero.Fs, workingDir string) afero.Fs { if workingDir == "" { return base } - return afero.NewBasePathFs(base, workingDir) + return NewBasePathFs(base, workingDir) } func isWrite(flag int) bool { @@ -171,14 +171,11 @@ func MakeReadableAndRemoveAllModulePkgDir(fs afero.Fs, dir string) (int, error) func IsOsFs(fs afero.Fs) bool { var isOsFs bool WalkFilesystems(fs, func(fs afero.Fs) bool { - switch base := fs.(type) { + switch fs.(type) { case *afero.MemMapFs: isOsFs = false case *afero.OsFs: isOsFs = true - case *afero.BasePathFs: - _, supportsLstat, _ := base.LstatIfPossible("asdfasdfasdf") - isOsFs = supportsLstat } return isOsFs }) @@ -225,3 +222,30 @@ func WalkFilesystems(fs afero.Fs, fn WalkFn) bool { return false } + +var _ FilesystemUnwrapper = (*filesystemsWrapper)(nil) + +// NewBasePathFs creates a new BasePathFs. +func NewBasePathFs(source afero.Fs, path string) afero.Fs { + return WrapFilesystem(afero.NewBasePathFs(source, path), source) +} + +// NewReadOnlyFs creates a new ReadOnlyFs. +func NewReadOnlyFs(source afero.Fs) afero.Fs { + return WrapFilesystem(afero.NewReadOnlyFs(source), source) +} + +// WrapFilesystem is typically used to wrap a afero.BasePathFs to allow +// access to the underlying filesystem if needed. +func WrapFilesystem(container, content afero.Fs) afero.Fs { + return filesystemsWrapper{Fs: container, content: content} +} + +type filesystemsWrapper struct { + afero.Fs + content afero.Fs +} + +func (w filesystemsWrapper) UnwrapFilesystem() afero.Fs { + return w.content +} diff --git a/hugofs/fs_test.go b/hugofs/fs_test.go index b2ed2e86e..660ddd14c 100644 --- a/hugofs/fs_test.go +++ b/hugofs/fs_test.go @@ -28,8 +28,8 @@ func TestIsOsFs(t *testing.T) { c.Assert(IsOsFs(Os), qt.Equals, true) c.Assert(IsOsFs(&afero.MemMapFs{}), qt.Equals, false) - c.Assert(IsOsFs(afero.NewBasePathFs(&afero.MemMapFs{}, "/public")), qt.Equals, false) - c.Assert(IsOsFs(afero.NewBasePathFs(Os, t.TempDir())), qt.Equals, true) + c.Assert(IsOsFs(NewBasePathFs(&afero.MemMapFs{}, "/public")), qt.Equals, false) + c.Assert(IsOsFs(NewBasePathFs(Os, t.TempDir())), qt.Equals, true) } func TestNewDefault(t *testing.T) { @@ -43,9 +43,8 @@ func TestNewDefault(t *testing.T) { c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs)) c.Assert(f.Os, qt.IsNotNil) c.Assert(f.WorkingDirReadOnly, qt.IsNotNil) - c.Assert(f.WorkingDirReadOnly, hqt.IsSameType, new(afero.BasePathFs)) - c.Assert(IsOsFs(f.Source), qt.IsTrue) c.Assert(IsOsFs(f.WorkingDirReadOnly), qt.IsTrue) + c.Assert(IsOsFs(f.Source), qt.IsTrue) c.Assert(IsOsFs(f.PublishDir), qt.IsTrue) c.Assert(IsOsFs(f.Os), qt.IsTrue) } diff --git a/hugofs/glob.go b/hugofs/glob.go index 1b649a283..6a6d999ce 100644 --- a/hugofs/glob.go +++ b/hugofs/glob.go @@ -31,6 +31,9 @@ func Glob(fs afero.Fs, pattern string, handle func(fi FileMetaInfo) (bool, error return nil } root := glob.ResolveRootDir(pattern) + if !strings.HasPrefix(root, "/") { + root = "/" + root + } pattern = strings.ToLower(pattern) g, err := glob.GetGlob(pattern) @@ -44,7 +47,7 @@ func Glob(fs afero.Fs, pattern string, handle func(fi FileMetaInfo) (bool, error // Signals that we're done. done := errors.New("done") - wfn := func(p string, info FileMetaInfo, err error) error { + wfn := func(p string, info FileMetaInfo) error { p = glob.NormalizePath(p) if info.IsDir() { if !hasSuperAsterisk { @@ -69,11 +72,13 @@ func Glob(fs afero.Fs, pattern string, handle func(fi FileMetaInfo) (bool, error return nil } - w := NewWalkway(WalkwayConfig{ - Root: root, - Fs: fs, - WalkFn: wfn, - }) + w := NewWalkway( + WalkwayConfig{ + Root: root, + Fs: fs, + WalkFn: wfn, + FailOnNotExist: true, + }) err = w.Walk() diff --git a/hugofs/glob/filename_filter.go b/hugofs/glob/filename_filter.go index 8e8af554b..6f283de48 100644 --- a/hugofs/glob/filename_filter.go +++ b/hugofs/glob/filename_filter.go @@ -27,6 +27,8 @@ type FilenameFilter struct { dirInclusions []glob.Glob exclusions []glob.Glob isWindows bool + + nested []*FilenameFilter } func normalizeFilenameGlobPattern(s string) string { @@ -101,11 +103,32 @@ func (f *FilenameFilter) Match(filename string, isDir bool) bool { if f == nil { return true } - return f.doMatch(filename, isDir) - /*if f.shouldInclude == nil { - fmt.Printf("Match: %q (%t) => %t\n", filename, isDir, isMatch) + if !f.doMatch(filename, isDir) { + return false + } + + for _, nested := range f.nested { + if !nested.Match(filename, isDir) { + return false + } + } + + return true +} + +// Append appends a filter to the chain. The receiver will be copied if needed. +func (f *FilenameFilter) Append(other *FilenameFilter) *FilenameFilter { + if f == nil { + return other } - return isMatch*/ + + clone := *f + nested := make([]*FilenameFilter, len(clone.nested)+1) + copy(nested, clone.nested) + nested[len(nested)-1] = other + clone.nested = nested + + return &clone } func (f *FilenameFilter) doMatch(filename string, isDir bool) bool { diff --git a/hugofs/glob/filename_filter_test.go b/hugofs/glob/filename_filter_test.go index 8437af858..6398e8a1e 100644 --- a/hugofs/glob/filename_filter_test.go +++ b/hugofs/glob/filename_filter_test.go @@ -36,6 +36,7 @@ func TestFilenameFilter(t *testing.T) { c.Assert(excludeAlmostAllJSON.Match("", true), qt.Equals, true) excludeAllButFooJSON, err := NewFilenameFilter([]string{"/a/**/foo.json"}, []string{"**.json"}) + c.Assert(err, qt.IsNil) c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/data/my.json"), false), qt.Equals, false) c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/c/d/e/foo.json"), false), qt.Equals, true) c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/c"), true), qt.Equals, true) @@ -71,5 +72,4 @@ func TestFilenameFilter(t *testing.T) { funcFilter := NewFilenameFilterForInclusionFunc(func(s string) bool { return strings.HasSuffix(s, ".json") }) c.Assert(funcFilter.Match("ab.json", false), qt.Equals, true) c.Assert(funcFilter.Match("ab.bson", false), qt.Equals, false) - } diff --git a/hugofs/glob/glob.go b/hugofs/glob/glob.go index dc9b4fb5b..42aa1fa3b 100644 --- a/hugofs/glob/glob.go +++ b/hugofs/glob/glob.go @@ -69,7 +69,8 @@ func (gc *globCache) GetGlob(pattern string) (glob.Glob, error) { eg = globErr{ globDecorator{ g: g, - isWindows: gc.isWindows}, + isWindows: gc.isWindows, + }, err, } @@ -121,15 +122,6 @@ func (g globDecorator) Match(s string) bool { return g.g.Match(s) } -type globDecoratorDouble struct { - lowerCase glob.Glob - originalCase glob.Glob -} - -func (g globDecoratorDouble) Match(s string) bool { - return g.lowerCase.Match(s) || g.originalCase.Match(s) -} - func GetGlob(pattern string) (glob.Glob, error) { return defaultGlobCache.GetGlob(pattern) } diff --git a/hugofs/glob_test.go b/hugofs/glob_test.go index a6ae85fc8..722e0b441 100644 --- a/hugofs/glob_test.go +++ b/hugofs/glob_test.go @@ -14,6 +14,7 @@ package hugofs import ( + "os" "path/filepath" "testing" @@ -28,14 +29,21 @@ func TestGlob(t *testing.T) { fs := NewBaseFileDecorator(afero.NewMemMapFs()) create := func(filename string) { - err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte("content "+filename), 0777) + filename = filepath.FromSlash(filename) + dir := filepath.Dir(filename) + if dir != "." { + err := fs.MkdirAll(dir, 0o777) + c.Assert(err, qt.IsNil) + } + err := afero.WriteFile(fs, filename, []byte("content "+filename), 0o777) c.Assert(err, qt.IsNil) } collect := func(pattern string) []string { var paths []string h := func(fi FileMetaInfo) (bool, error) { - paths = append(paths, fi.Meta().Path) + p := fi.Meta().PathInfo.Path() + paths = append(paths, p) return false, nil } err := Glob(fs, pattern, h) @@ -43,17 +51,22 @@ func TestGlob(t *testing.T) { return paths } - create("root.json") - create("jsonfiles/d1.json") - create("jsonfiles/d2.json") - create("jsonfiles/sub/d3.json") - create("jsonfiles/d1.xml") - create("a/b/c/e/f.json") - create("UPPER/sub/style.css") - create("root/UPPER/sub/style.css") + create("/root.json") + create("/jsonfiles/d1.json") + create("/jsonfiles/d2.json") + create("/jsonfiles/sub/d3.json") + create("/jsonfiles/d1.xml") + create("/a/b/c/e/f.json") + create("/UPPER/sub/style.css") + create("/root/UPPER/sub/style.css") - c.Assert(collect(filepath.FromSlash("/jsonfiles/*.json")), qt.HasLen, 2) + afero.Walk(fs, "/", func(path string, info os.FileInfo, err error) error { + c.Assert(err, qt.IsNil) + return nil + }) + c.Assert(collect(filepath.FromSlash("/jsonfiles/*.json")), qt.HasLen, 2) + c.Assert(collect("/*.json"), qt.HasLen, 1) c.Assert(collect("**.json"), qt.HasLen, 5) c.Assert(collect("**"), qt.HasLen, 8) c.Assert(collect(""), qt.HasLen, 0) @@ -63,5 +76,4 @@ func TestGlob(t *testing.T) { c.Assert(collect("root/UPPER/sub/style.css"), qt.HasLen, 1) c.Assert(collect("UPPER/sub/style.css"), qt.HasLen, 1) - } diff --git a/hugofs/hasbytes_fs.go b/hugofs/hasbytes_fs.go index 3d32a828f..238fbc9c4 100644 --- a/hugofs/hasbytes_fs.go +++ b/hugofs/hasbytes_fs.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -67,7 +67,6 @@ func (fs *hasBytesFs) wrapFile(f afero.File) afero.File { }, hasBytesCallback: fs.hasBytesCallback, } - } func (fs *hasBytesFs) Name() string { diff --git a/hugofs/noop_fs.go b/hugofs/noop_fs.go index 87f2cc9ff..e9def7c99 100644 --- a/hugofs/noop_fs.go +++ b/hugofs/noop_fs.go @@ -22,7 +22,7 @@ import ( ) var ( - errNoOp = errors.New("this is a filesystem that does nothing and this operation is not supported") + errNoOp = errors.New("this operation is not supported") _ afero.Fs = (*noOpFs)(nil) // NoOpFs provides a no-op filesystem that implements the afero.Fs @@ -30,8 +30,7 @@ var ( NoOpFs = &noOpFs{} ) -type noOpFs struct { -} +type noOpFs struct{} func (fs noOpFs) Create(name string) (afero.File, error) { panic(errNoOp) @@ -84,3 +83,47 @@ func (fs noOpFs) Chtimes(name string, atime time.Time, mtime time.Time) error { func (fs *noOpFs) Chown(name string, uid int, gid int) error { panic(errNoOp) } + +// noOpRegularFileOps implements the non-directory operations of a afero.File +// panicking for all operations. +type noOpRegularFileOps struct{} + +func (f *noOpRegularFileOps) Read(p []byte) (n int, err error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) ReadAt(p []byte, off int64) (n int, err error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) Seek(offset int64, whence int) (int64, error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) Write(p []byte) (n int, err error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) WriteAt(p []byte, off int64) (n int, err error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) Readdir(count int) ([]os.FileInfo, error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) Readdirnames(n int) ([]string, error) { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) Sync() error { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) Truncate(size int64) error { + panic(errNoOp) +} + +func (f *noOpRegularFileOps) WriteString(s string) (ret int, err error) { + panic(errNoOp) +} diff --git a/hugofs/nosymlink_fs.go b/hugofs/nosymlink_fs.go deleted file mode 100644 index af559844f..000000000 --- a/hugofs/nosymlink_fs.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugofs - -import ( - "errors" - "os" - "path/filepath" - - "github.com/gohugoio/hugo/common/loggers" - "github.com/spf13/afero" -) - -var ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem") - -// NewNoSymlinkFs creates a new filesystem that prevents symlinks. -func NewNoSymlinkFs(fs afero.Fs, logger loggers.Logger, allowFiles bool) afero.Fs { - return &noSymlinkFs{Fs: fs, logger: logger, allowFiles: allowFiles} -} - -var ( - _ FilesystemUnwrapper = (*noSymlinkFs)(nil) -) - -// noSymlinkFs is a filesystem that prevents symlinking. -type noSymlinkFs struct { - allowFiles bool // block dirs only - logger loggers.Logger - afero.Fs -} - -type noSymlinkFile struct { - fs *noSymlinkFs - afero.File -} - -func (f *noSymlinkFile) Readdir(count int) ([]os.FileInfo, error) { - fis, err := f.File.Readdir(count) - - filtered := fis[:0] - for _, x := range fis { - filename := filepath.Join(f.Name(), x.Name()) - if _, err := f.fs.checkSymlinkStatus(filename, x); err != nil { - // Log a warning and drop the file from the list - logUnsupportedSymlink(filename, f.fs.logger) - } else { - filtered = append(filtered, x) - } - } - - return filtered, err -} - -func (f *noSymlinkFile) Readdirnames(count int) ([]string, error) { - dirs, err := f.Readdir(count) - if err != nil { - return nil, err - } - return fileInfosToNames(dirs), nil -} - -func (fs *noSymlinkFs) UnwrapFilesystem() afero.Fs { - return fs.Fs -} - -func (fs *noSymlinkFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - return fs.stat(name) -} - -func (fs *noSymlinkFs) Stat(name string) (os.FileInfo, error) { - fi, _, err := fs.stat(name) - return fi, err -} - -func (fs *noSymlinkFs) stat(name string) (os.FileInfo, bool, error) { - var ( - fi os.FileInfo - wasLstat bool - err error - ) - - if lstater, ok := fs.Fs.(afero.Lstater); ok { - fi, wasLstat, err = lstater.LstatIfPossible(name) - } else { - fi, err = fs.Fs.Stat(name) - } - - if err != nil { - return nil, false, err - } - - fi, err = fs.checkSymlinkStatus(name, fi) - - return fi, wasLstat, err -} - -func (fs *noSymlinkFs) checkSymlinkStatus(name string, fi os.FileInfo) (os.FileInfo, error) { - var metaIsSymlink bool - - if fim, ok := fi.(FileMetaInfo); ok { - meta := fim.Meta() - metaIsSymlink = meta.IsSymlink - } - - if metaIsSymlink { - if fs.allowFiles && !fi.IsDir() { - return fi, nil - } - return nil, ErrPermissionSymlink - } - - // Also support non-decorated filesystems, e.g. the Os fs. - if isSymlink(fi) { - // Need to determine if this is a directory or not. - _, sfi, err := evalSymlinks(fs.Fs, name) - if err != nil { - return nil, err - } - if fs.allowFiles && !sfi.IsDir() { - // Return the original FileInfo to get the expected Name. - return fi, nil - } - return nil, ErrPermissionSymlink - } - - return fi, nil -} - -func (fs *noSymlinkFs) Open(name string) (afero.File, error) { - if _, _, err := fs.stat(name); err != nil { - return nil, err - } - return fs.wrapFile(fs.Fs.Open(name)) -} - -func (fs *noSymlinkFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { - if _, _, err := fs.stat(name); err != nil { - return nil, err - } - return fs.wrapFile(fs.Fs.OpenFile(name, flag, perm)) -} - -func (fs *noSymlinkFs) wrapFile(f afero.File, err error) (afero.File, error) { - if err != nil { - return nil, err - } - - return &noSymlinkFile{File: f, fs: fs}, nil -} diff --git a/hugofs/nosymlink_test.go b/hugofs/nosymlink_test.go deleted file mode 100644 index d0a8baaaa..000000000 --- a/hugofs/nosymlink_test.go +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugofs - -import ( - "os" - "path/filepath" - "testing" - - "github.com/bep/logg" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/htesting" - - "github.com/spf13/afero" - - qt "github.com/frankban/quicktest" -) - -func prepareSymlinks(t *testing.T) (string, func()) { - c := qt.New(t) - - workDir, clean, err := htesting.CreateTempDir(Os, "hugo-symlink-test") - c.Assert(err, qt.IsNil) - wd, _ := os.Getwd() - - blogDir := filepath.Join(workDir, "blog") - blogSubDir := filepath.Join(blogDir, "sub") - c.Assert(os.MkdirAll(blogSubDir, 0777), qt.IsNil) - blogFile1 := filepath.Join(blogDir, "a.txt") - blogFile2 := filepath.Join(blogSubDir, "b.txt") - afero.WriteFile(Os, filepath.Join(blogFile1), []byte("content1"), 0777) - afero.WriteFile(Os, filepath.Join(blogFile2), []byte("content2"), 0777) - os.Chdir(workDir) - c.Assert(os.Symlink("blog", "symlinkdedir"), qt.IsNil) - os.Chdir(blogDir) - c.Assert(os.Symlink("sub", "symsub"), qt.IsNil) - c.Assert(os.Symlink("a.txt", "symlinkdedfile.txt"), qt.IsNil) - - return workDir, func() { - clean() - os.Chdir(wd) - } -} - -func TestNoSymlinkFs(t *testing.T) { - if skipSymlink() { - t.Skip("Skip; os.Symlink needs administrator rights on Windows") - } - c := qt.New(t) - workDir, clean := prepareSymlinks(t) - defer clean() - - blogDir := filepath.Join(workDir, "blog") - blogFile1 := filepath.Join(blogDir, "a.txt") - - logger := loggers.NewDefault() - - for _, bfs := range []afero.Fs{NewBaseFileDecorator(Os), Os} { - for _, allowFiles := range []bool{false, true} { - logger.Reset() - fs := NewNoSymlinkFs(bfs, logger, allowFiles) - ls := fs.(afero.Lstater) - symlinkedDir := filepath.Join(workDir, "symlinkdedir") - symlinkedFilename := "symlinkdedfile.txt" - symlinkedFile := filepath.Join(blogDir, symlinkedFilename) - - assertFileErr := func(err error) { - if allowFiles { - c.Assert(err, qt.IsNil) - } else { - c.Assert(err, qt.Equals, ErrPermissionSymlink) - } - } - - assertFileStat := func(name string, fi os.FileInfo, err error) { - t.Helper() - assertFileErr(err) - if err == nil { - c.Assert(fi, qt.Not(qt.IsNil)) - c.Assert(fi.Name(), qt.Equals, name) - } - } - - // Check Stat and Lstat - for _, stat := range []func(name string) (os.FileInfo, error){ - func(name string) (os.FileInfo, error) { - return fs.Stat(name) - }, - func(name string) (os.FileInfo, error) { - fi, _, err := ls.LstatIfPossible(name) - return fi, err - }, - } { - _, err := stat(symlinkedDir) - c.Assert(err, qt.Equals, ErrPermissionSymlink) - fi, err := stat(symlinkedFile) - assertFileStat(symlinkedFilename, fi, err) - - fi, err = stat(filepath.Join(workDir, "blog")) - c.Assert(err, qt.IsNil) - c.Assert(fi, qt.Not(qt.IsNil)) - - fi, err = stat(blogFile1) - c.Assert(err, qt.IsNil) - c.Assert(fi, qt.Not(qt.IsNil)) - } - - // Check Open - _, err := fs.Open(symlinkedDir) - c.Assert(err, qt.Equals, ErrPermissionSymlink) - _, err = fs.OpenFile(symlinkedDir, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666) - c.Assert(err, qt.Equals, ErrPermissionSymlink) - _, err = fs.OpenFile(symlinkedFile, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666) - assertFileErr(err) - _, err = fs.Open(symlinkedFile) - assertFileErr(err) - f, err := fs.Open(blogDir) - c.Assert(err, qt.IsNil) - f.Close() - f, err = fs.Open(blogFile1) - c.Assert(err, qt.IsNil) - f.Close() - - // Check readdir - f, err = fs.Open(workDir) - c.Assert(err, qt.IsNil) - // There is at least one unsupported symlink inside workDir - _, err = f.Readdir(-1) - c.Assert(err, qt.IsNil) - f.Close() - c.Assert(logger.LoggCount(logg.LevelWarn), qt.Equals, 1) - - } - } -} diff --git a/hugofs/openfiles_fs.go b/hugofs/openfiles_fs.go new file mode 100644 index 000000000..f363c95f6 --- /dev/null +++ b/hugofs/openfiles_fs.go @@ -0,0 +1,110 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugofs + +import ( + "io/fs" + "os" + "sync" + + "github.com/spf13/afero" +) + +var _ FilesystemUnwrapper = (*OpenFilesFs)(nil) + +// OpenFilesFs is a wrapper around afero.Fs that keeps track of open files. +type OpenFilesFs struct { + afero.Fs + + mu sync.Mutex + openFiles map[string]int +} + +func (fs *OpenFilesFs) UnwrapFilesystem() afero.Fs { + return fs.Fs +} + +func (fs *OpenFilesFs) Create(name string) (afero.File, error) { + f, err := fs.Fs.Create(name) + if err != nil { + return nil, err + } + return fs.trackAndWrapFile(f), nil +} + +func (fs *OpenFilesFs) Open(name string) (afero.File, error) { + f, err := fs.Fs.Open(name) + if err != nil { + return nil, err + } + return fs.trackAndWrapFile(f), nil +} + +func (fs *OpenFilesFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { + f, err := fs.Fs.OpenFile(name, flag, perm) + if err != nil { + return nil, err + } + return fs.trackAndWrapFile(f), nil +} + +func (fs *OpenFilesFs) trackAndWrapFile(f afero.File) afero.File { + fs.mu.Lock() + defer fs.mu.Unlock() + + if fs.openFiles == nil { + fs.openFiles = make(map[string]int) + } + + fs.openFiles[f.Name()]++ + + return &openFilesFsFile{fs: fs, File: f} +} + +type openFilesFsFile struct { + fs *OpenFilesFs + afero.File +} + +func (f *openFilesFsFile) ReadDir(count int) ([]fs.DirEntry, error) { + return f.File.(fs.ReadDirFile).ReadDir(count) +} + +func (f *openFilesFsFile) Close() (err error) { + f.fs.mu.Lock() + defer f.fs.mu.Unlock() + + err = f.File.Close() + + if f.fs.openFiles == nil { + return + } + + name := f.Name() + + f.fs.openFiles[name]-- + + if f.fs.openFiles[name] <= 0 { + delete(f.fs.openFiles, name) + } + + return +} + +func (fs *OpenFilesFs) OpenFiles() map[string]int { + fs.mu.Lock() + defer fs.mu.Unlock() + + return fs.openFiles +} diff --git a/hugofs/rootmapping_fs.go b/hugofs/rootmapping_fs.go index a37e21a8b..1efb8ee5f 100644 --- a/hugofs/rootmapping_fs.go +++ b/hugofs/rootmapping_fs.go @@ -14,13 +14,20 @@ package hugofs import ( + "errors" "fmt" + iofs "io/fs" "os" + "path" "path/filepath" "strings" "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/paths" + + "github.com/bep/overlayfs" "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/hugofs/glob" radix "github.com/armon/go-radix" "github.com/spf13/afero" @@ -28,17 +35,31 @@ import ( var filepathSeparator = string(filepath.Separator) +var _ ReverseLookupProvder = (*RootMappingFs)(nil) + // NewRootMappingFs creates a new RootMappingFs on top of the provided with // root mappings with some optional metadata about the root. // Note that From represents a virtual root that maps to the actual filename in To. func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) { rootMapToReal := radix.New() + realMapToRoot := radix.New() var virtualRoots []RootMapping + addMapping := func(key string, rm RootMapping, to *radix.Tree) { + var mappings []RootMapping + v, found := to.Get(key) + if found { + // There may be more than one language pointing to the same root. + mappings = v.([]RootMapping) + } + mappings = append(mappings, rm) + to.Insert(key, mappings) + } + for _, rm := range rms { (&rm).clean() - fromBase := files.ResolveComponentFolder(rm.From) + rm.FromBase = files.ResolveComponentFolder(rm.From) if len(rm.To) < 2 { panic(fmt.Sprintf("invalid root mapping; from/to: %s/%s", rm.From, rm.To)) @@ -46,21 +67,80 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) { fi, err := fs.Stat(rm.To) if err != nil { - if herrors.IsNotExist(err) { + if os.IsNotExist(err) { continue } return nil, err } - // Extract "blog" from "content/blog" - rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator) + if rm.Meta == nil { rm.Meta = NewFileMeta() } - rm.Meta.SourceRoot = rm.To - rm.Meta.BaseDir = rm.ToBasedir - rm.Meta.MountRoot = rm.path + if !fi.IsDir() { + // We do allow single file mounts. + // However, the file system logic will be much simpler with just directories. + // So, convert this mount into a directory mount with a nameTo filter and renamer. + dirFrom, nameFrom := filepath.Split(rm.From) + dirTo, nameTo := filepath.Split(rm.To) + dirFrom, dirTo = strings.TrimSuffix(dirFrom, filepathSeparator), strings.TrimSuffix(dirTo, filepathSeparator) + rm.From = dirFrom + + fi, err = fs.Stat(rm.To) + if err != nil { + if herrors.IsNotExist(err) { + continue + } + return nil, err + } + + rm.fiSingleFile = NewFileMetaInfo(fi, rm.Meta.Copy()) + rm.To = dirTo + + rm.Meta.Rename = func(name string, toFrom bool) string { + if toFrom { + if name == nameTo { + return nameFrom + } + return name + } + + if name == nameFrom { + return nameTo + } + + return name + } + nameToFilename := filepathSeparator + nameTo + + rm.Meta.InclusionFilter = rm.Meta.InclusionFilter.Append(glob.NewFilenameFilterForInclusionFunc( + func(filename string) bool { + return strings.HasPrefix(nameToFilename, filename) + }, + )) + + // Refresh the FileInfo object. + fi, err = fs.Stat(rm.To) + if err != nil { + if herrors.IsNotExist(err) { + continue + } + return nil, err + } + } + + if rm.FromBase == "" { + panic(" rm.FromBase is empty") + } + + // Extract "blog" from "content/blog" + rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, rm.FromBase), filepathSeparator) + + rm.Meta.SourceRoot = fi.(MetaProvider).Meta().Filename + rm.Meta.BaseDir = rm.ToBase rm.Meta.Module = rm.Module + rm.Meta.ModuleOrdinal = rm.ModuleOrdinal + rm.Meta.Component = rm.FromBase rm.Meta.IsProject = rm.IsProject meta := rm.Meta.Copy() @@ -72,15 +152,13 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) { rm.fi = NewFileMetaInfo(fi, meta) - key := filepathSeparator + rm.From - var mappings []RootMapping - v, found := rootMapToReal.Get(key) - if found { - // There may be more than one language pointing to the same root. - mappings = v.([]RootMapping) + addMapping(filepathSeparator+rm.From, rm, rootMapToReal) + rev := rm.To + if !strings.HasPrefix(rev, filepathSeparator) { + rev = filepathSeparator + rev } - mappings = append(mappings, rm) - rootMapToReal.Insert(key, mappings) + + addMapping(rev, rm, realMapToRoot) virtualRoots = append(virtualRoots, rm) } @@ -90,6 +168,7 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) { rfs := &RootMappingFs{ Fs: fs, rootMapToReal: rootMapToReal, + realMapToRoot: realMapToRoot, } return rfs, nil @@ -103,9 +182,9 @@ func newRootMappingFsFromFromTo( rms := make([]RootMapping, len(fromTo)/2) for i, j := 0, 0; j < len(fromTo); i, j = i+1, j+2 { rms[i] = RootMapping{ - From: fromTo[j], - To: fromTo[j+1], - ToBasedir: baseDir, + From: fromTo[j], + To: fromTo[j+1], + ToBase: baseDir, } } @@ -114,16 +193,18 @@ func newRootMappingFsFromFromTo( // RootMapping describes a virtual file or directory mount. type RootMapping struct { - From string // The virtual mount. - To string // The source directory or file. - ToBasedir string // The base of To. May be empty if an absolute path was provided. - Module string // The module path/ID. - IsProject bool // Whether this is a mount in the main project. - Meta *FileMeta // File metadata (lang etc.) - - fi FileMetaInfo - path string // The virtual mount point, e.g. "blog". + From string // The virtual mount. + FromBase string // The base directory of the virtual mount. + To string // The source directory or file. + ToBase string // The base of To. May be empty if an absolute path was provided. + Module string // The module path/ID. + ModuleOrdinal int // The module ordinal starting with 0 which is the project. + IsProject bool // Whether this is a mount in the main project. + Meta *FileMeta // File metadata (lang etc.) + fi FileMetaInfo + fiSingleFile FileMetaInfo // Also set when this mounts represents a single file with a rename func. + path string // The virtual mount point, e.g. "blog". } type keyRootMappings struct { @@ -150,9 +231,7 @@ func (r RootMapping) trimFrom(name string) string { return strings.TrimPrefix(name, r.From) } -var ( - _ FilesystemUnwrapper = (*RootMappingFs)(nil) -) +var _ FilesystemUnwrapper = (*RootMappingFs)(nil) // A RootMappingFs maps several roots into one. Note that the root of this filesystem // is directories only, and they will be returned in Readdir and Readdirnames @@ -160,9 +239,10 @@ var ( type RootMappingFs struct { afero.Fs rootMapToReal *radix.Tree + realMapToRoot *radix.Tree } -func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) { +func (fs *RootMappingFs) Mounts(base string) ([]FileMetaInfo, error) { base = filepathSeparator + fs.cleanName(base) roots := fs.getRootsWithPrefix(base) @@ -172,17 +252,14 @@ func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) { fss := make([]FileMetaInfo, len(roots)) for i, r := range roots { - bfs := afero.NewBasePathFs(fs.Fs, r.To) - bfs = decoratePath(bfs, func(name string) string { - p := strings.TrimPrefix(name, r.To) - if r.path != "" { - // Make sure it's mounted to a any sub path, e.g. blog - p = filepath.Join(r.path, p) - } - p = strings.TrimLeft(p, filepathSeparator) - return p - }) + if r.fiSingleFile != nil { + // A single file mount. + fss[i] = r.fiSingleFile + continue + } + + bfs := NewBasePathFs(fs.Fs, r.To) fs := bfs if r.Meta.InclusionFilter != nil { fs = newFilenameFilterFs(fs, r.To, r.Meta.InclusionFilter) @@ -229,18 +306,9 @@ func (fs RootMappingFs) Filter(f func(m RootMapping) bool) *RootMappingFs { return &fs } -// LstatIfPossible returns the os.FileInfo structure describing a given file. -func (fs *RootMappingFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fis, err := fs.doLstat(name) - if err != nil { - return nil, false, err - } - return fis[0], false, nil -} - // Open opens the named file for reading. func (fs *RootMappingFs) Open(name string) (afero.File, error) { - fis, err := fs.doLstat(name) + fis, err := fs.doStat(name) if err != nil { return nil, err } @@ -251,8 +319,68 @@ func (fs *RootMappingFs) Open(name string) (afero.File, error) { // Stat returns the os.FileInfo structure describing a given file. If there is // an error, it will be of type *os.PathError. func (fs *RootMappingFs) Stat(name string) (os.FileInfo, error) { - fi, _, err := fs.LstatIfPossible(name) - return fi, err + fis, err := fs.doStat(name) + if err != nil { + return nil, err + } + + return fis[0], nil +} + +type ComponentPath struct { + Component string + Path string + Lang string +} + +func (c ComponentPath) ComponentPathJoined() string { + return path.Join(c.Component, c.Path) +} + +type ReverseLookupProvder interface { + ReverseLookup(filename string, checkExists bool) ([]ComponentPath, error) +} + +// func (fs *RootMappingFs) ReverseStat(filename string) ([]FileMetaInfo, error) +func (fs *RootMappingFs) ReverseLookup(in string, checkExists bool) ([]ComponentPath, error) { + in = fs.cleanName(in) + key := filepathSeparator + in + + s, roots := fs.getRootsReverse(key) + + if len(roots) == 0 { + return nil, nil + } + + var cps []ComponentPath + + base := strings.TrimPrefix(key, s) + dir, name := filepath.Split(base) + + for _, first := range roots { + if first.Meta.Rename != nil { + name = first.Meta.Rename(name, true) + } + + // Now we know that this file _could_ be in this fs. + filename := filepathSeparator + filepath.Join(first.path, dir, name) + + if checkExists { + // Confirm that it exists. + _, err := fs.Stat(first.FromBase + filename) + if err != nil { + continue + } + } + + cps = append(cps, ComponentPath{ + Component: first.FromBase, + Path: paths.ToSlashTrimLeading(filename), + Lang: first.Meta.Lang, + }) + } + + return cps, nil } func (fs *RootMappingFs) hasPrefix(prefix string) bool { @@ -275,21 +403,22 @@ func (fs *RootMappingFs) getRoot(key string) []RootMapping { } func (fs *RootMappingFs) getRoots(key string) (string, []RootMapping) { - s, v, found := fs.rootMapToReal.LongestPrefix(key) - if !found || (s == filepathSeparator && key != filepathSeparator) { + return fs.getRootsIn(key, fs.rootMapToReal) +} + +func (fs *RootMappingFs) getRootsReverse(key string) (string, []RootMapping) { + return fs.getRootsIn(key, fs.realMapToRoot) +} + +func (fs *RootMappingFs) getRootsIn(key string, tree *radix.Tree) (string, []RootMapping) { + s, v, found := tree.LongestPrefix(key) + + if !found { return "", nil } return s, v.([]RootMapping) } -func (fs *RootMappingFs) debug() { - fmt.Println("debug():") - fs.rootMapToReal.Walk(func(s string, v any) bool { - fmt.Println("Key", s) - return false - }) -} - func (fs *RootMappingFs) getRootsWithPrefix(prefix string) []RootMapping { var roots []RootMapping fs.rootMapToReal.WalkPrefix(prefix, func(b string, v any) bool { @@ -316,61 +445,63 @@ func (fs *RootMappingFs) getAncestors(prefix string) []keyRootMappings { } func (fs *RootMappingFs) newUnionFile(fis ...FileMetaInfo) (afero.File, error) { - meta := fis[0].Meta() - f, err := meta.Open() - if err != nil { - return nil, err - } if len(fis) == 1 { - return f, nil + return fis[0].Meta().Open() } - rf := &rootMappingFile{File: f, fs: fs, name: meta.Name, meta: meta} - if len(fis) == 1 { - return rf, err - } - - next, err := fs.newUnionFile(fis[1:]...) - if err != nil { - return nil, err + openers := make([]func() (afero.File, error), len(fis)) + for i := len(fis) - 1; i >= 0; i-- { + fi := fis[i] + openers[i] = func() (afero.File, error) { + meta := fi.Meta() + f, err := meta.Open() + if err != nil { + return nil, err + } + return &rootMappingDir{DirOnlyOps: f, fs: fs, name: meta.Name, meta: meta}, nil + } } - uf := &afero.UnionFile{Base: rf, Layer: next} - - uf.Merger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) { + merge := func(lofi, bofi []iofs.DirEntry) []iofs.DirEntry { // Ignore duplicate directory entries - seen := make(map[string]bool) - var result []os.FileInfo - - for _, fis := range [][]os.FileInfo{bofi, lofi} { - for _, fi := range fis { - - if fi.IsDir() && seen[fi.Name()] { + for _, fi1 := range bofi { + var found bool + for _, fi2 := range lofi { + if !fi2.IsDir() { continue } - - if fi.IsDir() { - seen[fi.Name()] = true + if fi1.Name() == fi2.Name() { + found = true + break } - - result = append(result, fi) + } + if !found { + lofi = append(lofi, fi1) } } - return result, nil + return lofi } - return uf, nil + info := func() (os.FileInfo, error) { + return fis[0], nil + } + + return overlayfs.OpenDir(merge, info, openers...) } func (fs *RootMappingFs) cleanName(name string) string { - return strings.Trim(filepath.Clean(name), filepathSeparator) + name = strings.Trim(filepath.Clean(name), filepathSeparator) + if name == "." { + name = "" + } + return name } -func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) { - prefix = filepathSeparator + fs.cleanName(prefix) +func (rfs *RootMappingFs) collectDirEntries(prefix string) ([]iofs.DirEntry, error) { + prefix = filepathSeparator + rfs.cleanName(prefix) - var fis []os.FileInfo + var fis []iofs.DirEntry seen := make(map[string]bool) // Prevent duplicate directories level := strings.Count(prefix, filepathSeparator) @@ -380,15 +511,17 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) if err != nil { return err } - direntries, err := f.Readdir(-1) + direntries, err := f.(iofs.ReadDirFile).ReadDir(-1) if err != nil { f.Close() return err } for _, fi := range direntries { + meta := fi.(FileMetaInfo).Meta() meta.Merge(rm.Meta) + if !rm.Meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), fi.IsDir()) { continue } @@ -400,11 +533,14 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) } seen[name] = true opener := func() (afero.File, error) { - return fs.Open(filepath.Join(rm.From, name)) + return rfs.Open(filepath.Join(rm.From, name)) } fi = newDirNameOnlyFileInfo(name, meta, opener) + } else if rm.Meta.Rename != nil { + if n := rm.Meta.Rename(fi.Name(), true); n != fi.Name() { + fi.(MetaProvider).Meta().Name = n + } } - fis = append(fis, fi) } @@ -414,7 +550,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) } // First add any real files/directories. - rms := fs.getRoot(prefix) + rms := rfs.getRoot(prefix) for _, rm := range rms { if err := collectDir(rm, rm.fi); err != nil { return nil, err @@ -423,7 +559,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) // Next add any file mounts inside the given directory. prefixInside := prefix + filepathSeparator - fs.rootMapToReal.WalkPrefix(prefixInside, func(s string, v any) bool { + rfs.rootMapToReal.WalkPrefix(prefixInside, func(s string, v any) bool { if (strings.Count(s, filepathSeparator) - level) != 1 { // This directory is not part of the current, but we // need to include the first name part to make it @@ -437,7 +573,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) } seen[name] = true opener := func() (afero.File, error) { - return fs.Open(path) + return rfs.Open(path) } fi := newDirNameOnlyFileInfo(name, nil, opener) @@ -460,7 +596,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) seen[name] = true opener := func() (afero.File, error) { - return fs.Open(rm.From) + return rfs.Open(rm.From) } fi := newDirNameOnlyFileInfo(name, rm.Meta, opener) @@ -473,7 +609,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) }) // Finally add any ancestor dirs with files in this directory. - ancestors := fs.getAncestors(prefix) + ancestors := rfs.getAncestors(prefix) for _, root := range ancestors { subdir := strings.TrimPrefix(prefix, root.key) for _, rm := range root.roots { @@ -491,7 +627,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) return fis, nil } -func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) { +func (fs *RootMappingFs) doStat(name string) ([]FileMetaInfo, error) { name = fs.cleanName(name) key := filepathSeparator + name @@ -504,7 +640,7 @@ func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) { return []FileMetaInfo{newDirNameOnlyFileInfo(name, nil, fs.virtualDirOpener(name))}, nil } - // Find any real files or directories with this key. + // Find any real directories with this key. _, roots := fs.getRoots(key) if roots == nil { return nil, &os.PathError{Op: "LStat", Path: name, Err: os.ErrNotExist} @@ -515,7 +651,7 @@ func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) { for _, rm := range roots { var fi FileMetaInfo - fi, _, err = fs.statRoot(rm, name) + fi, err = fs.statRoot(rm, name) if err == nil { fis = append(fis, fi) } @@ -565,33 +701,52 @@ func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) { return []FileMetaInfo{roots[0].fi}, nil } -func (fs *RootMappingFs) statRoot(root RootMapping, name string) (FileMetaInfo, bool, error) { - if !root.Meta.InclusionFilter.Match(root.trimFrom(name), root.fi.IsDir()) { - return nil, false, os.ErrNotExist +func (fs *RootMappingFs) statRoot(root RootMapping, filename string) (FileMetaInfo, error) { + dir, name := filepath.Split(filename) + if root.Meta.Rename != nil { + if n := root.Meta.Rename(name, false); n != name { + filename = filepath.Join(dir, n) + } + } + + if !root.Meta.InclusionFilter.Match(root.trimFrom(filename), root.fi.IsDir()) { + return nil, os.ErrNotExist } - filename := root.filename(name) - fi, b, err := lstatIfPossible(fs.Fs, filename) + filename = root.filename(filename) + fi, err := fs.Fs.Stat(filename) if err != nil { - return nil, b, err + return nil, err } var opener func() (afero.File, error) if fi.IsDir() { - // Make sure metadata gets applied in Readdir. + // Make sure metadata gets applied in ReadDir. opener = fs.realDirOpener(filename, root.Meta) } else { + if root.Meta.Rename != nil { + if n := root.Meta.Rename(fi.Name(), true); n != fi.Name() { + meta := fi.(MetaProvider).Meta() + + meta.Name = n + + } + } + // Opens the real file directly. opener = func() (afero.File, error) { return fs.Fs.Open(filename) } + } - return decorateFileInfo(fi, fs.Fs, opener, "", "", root.Meta), b, nil + fim := decorateFileInfo(fi, opener, "", root.Meta) + + return fim, nil } func (fs *RootMappingFs) virtualDirOpener(name string) func() (afero.File, error) { - return func() (afero.File, error) { return &rootMappingFile{name: name, fs: fs}, nil } + return func() (afero.File, error) { return &rootMappingDir{name: name, fs: fs}, nil } } func (fs *RootMappingFs) realDirOpener(name string, meta *FileMeta) func() (afero.File, error) { @@ -600,39 +755,41 @@ func (fs *RootMappingFs) realDirOpener(name string, meta *FileMeta) func() (afer if err != nil { return nil, err } - return &rootMappingFile{name: name, meta: meta, fs: fs, File: f}, nil + return &rootMappingDir{name: name, meta: meta, fs: fs, DirOnlyOps: f}, nil } } -type rootMappingFile struct { - afero.File +var _ iofs.ReadDirFile = (*rootMappingDir)(nil) + +type rootMappingDir struct { + *noOpRegularFileOps + DirOnlyOps fs *RootMappingFs name string meta *FileMeta } -func (f *rootMappingFile) Close() error { - if f.File == nil { +func (f *rootMappingDir) Close() error { + if f.DirOnlyOps == nil { return nil } - return f.File.Close() + return f.DirOnlyOps.Close() } -func (f *rootMappingFile) Name() string { +func (f *rootMappingDir) Name() string { return f.name } -func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) { - if f.File != nil { - - fis, err := f.File.Readdir(count) +func (f *rootMappingDir) ReadDir(count int) ([]iofs.DirEntry, error) { + if f.DirOnlyOps != nil { + fis, err := f.DirOnlyOps.(iofs.ReadDirFile).ReadDir(count) if err != nil { return nil, err } - var result []os.FileInfo + var result []iofs.DirEntry for _, fi := range fis { - fim := decorateFileInfo(fi, f.fs, nil, "", "", f.meta) + fim := decorateFileInfo(fi, nil, "", f.meta) meta := fim.Meta() if f.meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), fim.IsDir()) { result = append(result, fim) @@ -644,10 +801,31 @@ func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) { return f.fs.collectDirEntries(f.name) } -func (f *rootMappingFile) Readdirnames(count int) ([]string, error) { - dirs, err := f.Readdir(count) +// Sentinal error to signal that a file is a directory. +var errIsDir = errors.New("isDir") + +func (f *rootMappingDir) Stat() (iofs.FileInfo, error) { + return nil, errIsDir +} + +func (f *rootMappingDir) Readdir(count int) ([]os.FileInfo, error) { + panic("not supported: use ReadDir") +} + +// Note that Readdirnames preserves the order of the underlying filesystem(s), +// which is usually directory order. +func (f *rootMappingDir) Readdirnames(count int) ([]string, error) { + dirs, err := f.ReadDir(count) if err != nil { return nil, err } - return fileInfosToNames(dirs), nil + return dirEntriesToNames(dirs), nil +} + +func dirEntriesToNames(fis []iofs.DirEntry) []string { + names := make([]string, len(fis)) + for i, d := range fis { + names[i] = d.Name() + } + return names } diff --git a/hugofs/rootmapping_fs_test.go b/hugofs/rootmapping_fs_test.go index b71462a8d..982e6dfaf 100644 --- a/hugofs/rootmapping_fs_test.go +++ b/hugofs/rootmapping_fs_test.go @@ -15,11 +15,12 @@ package hugofs import ( "fmt" - "io" "path/filepath" "sort" "testing" + iofs "io/fs" + "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/hugofs/glob" @@ -35,16 +36,16 @@ func TestLanguageRootMapping(t *testing.T) { fs := NewBaseFileDecorator(afero.NewMemMapFs()) - c.Assert(afero.WriteFile(fs, filepath.Join("content/sv/svdir", "main.txt"), []byte("main sv"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("content/sv/svdir", "main.txt"), []byte("main sv"), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "sv-f.txt"), []byte("some sv blog content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", "en-f.txt"), []byte("some en blog content in a"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent/d1", "sv-d1-f.txt"), []byte("some sv blog content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent/d1", "en-d1-f.txt"), []byte("some en blog content in a"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "sv-f.txt"), []byte("some sv blog content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", "en-f.txt"), []byte("some en blog content in a"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent/d1", "sv-d1-f.txt"), []byte("some sv blog content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent/d1", "en-d1-f.txt"), []byte("some en blog content in a"), 0o755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myotherenblogcontent", "en-f2.txt"), []byte("some en content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvdocs", "sv-docs.txt"), []byte("some sv docs content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/b/myenblogcontent", "en-b-f.txt"), []byte("some en content"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myotherenblogcontent", "en-f2.txt"), []byte("some en content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvdocs", "sv-docs.txt"), []byte("some sv docs content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/b/myenblogcontent", "en-b-f.txt"), []byte("some en content"), 0o755), qt.IsNil) rfs, err := NewRootMappingFs(fs, RootMapping{ @@ -76,12 +77,12 @@ func TestLanguageRootMapping(t *testing.T) { c.Assert(err, qt.IsNil) - collected, err := collectFilenames(rfs, "content", "content") + collected, err := collectPaths(rfs, "content") c.Assert(err, qt.IsNil) c.Assert(collected, qt.DeepEquals, - []string{"blog/d1/en-d1-f.txt", "blog/d1/sv-d1-f.txt", "blog/en-f.txt", "blog/en-f2.txt", "blog/sv-f.txt", "blog/svdir/main.txt", "docs/sv-docs.txt"}, qt.Commentf("%#v", collected)) + []string{"/blog/d1/en-d1-f.txt", "/blog/d1/sv-d1-f.txt", "/blog/en-f.txt", "/blog/en-f2.txt", "/blog/sv-f.txt", "/blog/svdir/main.txt", "/docs/sv-docs.txt"}, qt.Commentf("%#v", collected)) - dirs, err := rfs.Dirs(filepath.FromSlash("content/blog")) + dirs, err := rfs.Mounts(filepath.FromSlash("content/blog")) c.Assert(err, qt.IsNil) c.Assert(len(dirs), qt.Equals, 4) for _, dir := range dirs { @@ -92,7 +93,8 @@ func TestLanguageRootMapping(t *testing.T) { blog, err := rfs.Open(filepath.FromSlash("content/blog")) c.Assert(err, qt.IsNil) - fis, err := blog.Readdir(-1) + fis, err := blog.(iofs.ReadDirFile).ReadDir(-1) + c.Assert(err, qt.IsNil) for _, fi := range fis { f, err := fi.(FileMetaInfo).Meta().Open() c.Assert(err, qt.IsNil) @@ -146,10 +148,10 @@ func TestRootMappingFsDirnames(t *testing.T) { fs := NewBaseFileDecorator(afero.NewMemMapFs()) testfile := "myfile.txt" - c.Assert(fs.Mkdir("f1t", 0755), qt.IsNil) - c.Assert(fs.Mkdir("f2t", 0755), qt.IsNil) - c.Assert(fs.Mkdir("f3t", 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("f2t", testfile), []byte("some content"), 0755), qt.IsNil) + c.Assert(fs.Mkdir("f1t", 0o755), qt.IsNil) + c.Assert(fs.Mkdir("f2t", 0o755), qt.IsNil) + c.Assert(fs.Mkdir("f3t", 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("f2t", testfile), []byte("some content"), 0o755), qt.IsNil) rfs, err := newRootMappingFsFromFromTo("", fs, "static/bf1", "f1t", "static/cf2", "f2t", "static/af3", "f3t") c.Assert(err, qt.IsNil) @@ -177,8 +179,8 @@ func TestRootMappingFsFilename(t *testing.T) { testfilename := filepath.Join(workDir, "f1t/foo/file.txt") - c.Assert(fs.MkdirAll(filepath.Join(workDir, "f1t/foo"), 0777), qt.IsNil) - c.Assert(afero.WriteFile(fs, testfilename, []byte("content"), 0666), qt.IsNil) + c.Assert(fs.MkdirAll(filepath.Join(workDir, "f1t/foo"), 0o777), qt.IsNil) + c.Assert(afero.WriteFile(fs, testfilename, []byte("content"), 0o666), qt.IsNil) rfs, err := newRootMappingFsFromFromTo(workDir, fs, "static/f1", filepath.Join(workDir, "f1t"), "static/f2", filepath.Join(workDir, "f2t")) c.Assert(err, qt.IsNil) @@ -197,14 +199,14 @@ func TestRootMappingFsMount(t *testing.T) { testfile := "test.txt" - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mynoblogcontent", testfile), []byte("some no content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "no.txt"), []byte("no text"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "sv.txt"), []byte("sv text"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mynoblogcontent", testfile), []byte("some no content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "no.txt"), []byte("no text"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "sv.txt"), []byte("sv text"), 0o755), qt.IsNil) - bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs) + bfs := NewBasePathFs(fs, "themes/a") rm := []RootMapping{ // Directories { @@ -224,16 +226,16 @@ func TestRootMappingFsMount(t *testing.T) { }, // Files { - From: "content/singles/p1.md", - To: "singlefiles/no.txt", - ToBasedir: "singlefiles", - Meta: &FileMeta{Lang: "no"}, + From: "content/singles/p1.md", + To: "singlefiles/no.txt", + ToBase: "singlefiles", + Meta: &FileMeta{Lang: "no"}, }, { - From: "content/singles/p1.md", - To: "singlefiles/sv.txt", - ToBasedir: "singlefiles", - Meta: &FileMeta{Lang: "sv"}, + From: "content/singles/p1.md", + To: "singlefiles/sv.txt", + ToBase: "singlefiles", + Meta: &FileMeta{Lang: "sv"}, }, } @@ -254,49 +256,49 @@ func TestRootMappingFsMount(t *testing.T) { // Union with duplicate dir names filtered. c.Assert(dirs1, qt.DeepEquals, []string{"test.txt", "test.txt", "other.txt", "test.txt"}) - files, err := afero.ReadDir(rfs, filepath.FromSlash("content/blog")) - c.Assert(err, qt.IsNil) - c.Assert(len(files), qt.Equals, 4) - - testfilefi := files[1] - c.Assert(testfilefi.Name(), qt.Equals, testfile) - - testfilem := testfilefi.(FileMetaInfo).Meta() - c.Assert(testfilem.Filename, qt.Equals, filepath.FromSlash("themes/a/mynoblogcontent/test.txt")) - - tf, err := testfilem.Open() + d, err := rfs.Open(filepath.FromSlash("content/blog")) c.Assert(err, qt.IsNil) - defer tf.Close() - b, err := io.ReadAll(tf) + files, err := d.(iofs.ReadDirFile).ReadDir(-1) c.Assert(err, qt.IsNil) - c.Assert(string(b), qt.Equals, "some no content") - - // Ambiguous - _, err = rfs.Stat(filepath.FromSlash("content/singles/p1.md")) - c.Assert(err, qt.Not(qt.IsNil)) + c.Assert(len(files), qt.Equals, 4) singlesDir, err := rfs.Open(filepath.FromSlash("content/singles")) c.Assert(err, qt.IsNil) defer singlesDir.Close() - singles, err := singlesDir.Readdir(-1) + singles, err := singlesDir.(iofs.ReadDirFile).ReadDir(-1) c.Assert(err, qt.IsNil) c.Assert(singles, qt.HasLen, 2) for i, lang := range []string{"no", "sv"} { fi := singles[i].(FileMetaInfo) - c.Assert(fi.Meta().PathFile(), qt.Equals, filepath.FromSlash("themes/a/singlefiles/"+lang+".txt")) c.Assert(fi.Meta().Lang, qt.Equals, lang) c.Assert(fi.Name(), qt.Equals, "p1.md") } + + // Test ReverseLookup. + // Single file mounts. + cps, err := rfs.ReverseLookup(filepath.FromSlash("singlefiles/no.txt"), true) + c.Assert(err, qt.IsNil) + c.Assert(cps, qt.DeepEquals, []ComponentPath{ + {Component: "content", Path: "singles/p1.md", Lang: "no"}, + {Component: "content", Path: "singles/p1.md", Lang: "sv"}, + }) + + // File inside directory mount. + cps, err = rfs.ReverseLookup(filepath.FromSlash("mynoblogcontent/test.txt"), true) + c.Assert(err, qt.IsNil) + c.Assert(cps, qt.DeepEquals, []ComponentPath{ + {Component: "content", Path: "blog/test.txt", Lang: "no"}, + }) } func TestRootMappingFsMountOverlap(t *testing.T) { c := qt.New(t) fs := NewBaseFileDecorator(afero.NewMemMapFs()) - c.Assert(afero.WriteFile(fs, filepath.FromSlash("da/a.txt"), []byte("some no content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.FromSlash("db/b.txt"), []byte("some no content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.FromSlash("dc/c.txt"), []byte("some no content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.FromSlash("de/e.txt"), []byte("some no content"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.FromSlash("da/a.txt"), []byte("some no content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.FromSlash("db/b.txt"), []byte("some no content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.FromSlash("dc/c.txt"), []byte("some no content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.FromSlash("de/e.txt"), []byte("some no content"), 0o755), qt.IsNil) rm := []RootMapping{ { @@ -349,24 +351,24 @@ func TestRootMappingFsOs(t *testing.T) { defer clean() testfile := "myfile.txt" - c.Assert(fs.Mkdir(filepath.Join(d, "f1t"), 0755), qt.IsNil) - c.Assert(fs.Mkdir(filepath.Join(d, "f2t"), 0755), qt.IsNil) - c.Assert(fs.Mkdir(filepath.Join(d, "f3t"), 0755), qt.IsNil) + c.Assert(fs.Mkdir(filepath.Join(d, "f1t"), 0o755), qt.IsNil) + c.Assert(fs.Mkdir(filepath.Join(d, "f2t"), 0o755), qt.IsNil) + c.Assert(fs.Mkdir(filepath.Join(d, "f3t"), 0o755), qt.IsNil) // Deep structure deepDir := filepath.Join(d, "d1", "d2", "d3", "d4", "d5") - c.Assert(fs.MkdirAll(deepDir, 0755), qt.IsNil) + c.Assert(fs.MkdirAll(deepDir, 0o755), qt.IsNil) for i := 1; i <= 3; i++ { - c.Assert(fs.MkdirAll(filepath.Join(d, "d1", "d2", "d3", "d4", fmt.Sprintf("d4-%d", i)), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join(d, "d1", "d2", "d3", fmt.Sprintf("f-%d.txt", i)), []byte("some content"), 0755), qt.IsNil) + c.Assert(fs.MkdirAll(filepath.Join(d, "d1", "d2", "d3", "d4", fmt.Sprintf("d4-%d", i)), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(d, "d1", "d2", "d3", fmt.Sprintf("f-%d.txt", i)), []byte("some content"), 0o755), qt.IsNil) } - c.Assert(afero.WriteFile(fs, filepath.Join(d, "f2t", testfile), []byte("some content"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(d, "f2t", testfile), []byte("some content"), 0o755), qt.IsNil) // https://github.com/gohugoio/hugo/issues/6854 mystaticDir := filepath.Join(d, "mystatic", "a", "b", "c") - c.Assert(fs.MkdirAll(mystaticDir, 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join(mystaticDir, "ms-1.txt"), []byte("some content"), 0755), qt.IsNil) + c.Assert(fs.MkdirAll(mystaticDir, 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(mystaticDir, "ms-1.txt"), []byte("some content"), 0o755), qt.IsNil) rfs, err := newRootMappingFsFromFromTo( d, @@ -407,33 +409,30 @@ func TestRootMappingFsOs(t *testing.T) { c.Assert(getDirnames("static/a/b/c"), qt.DeepEquals, []string{"d4", "f-1.txt", "f-2.txt", "f-3.txt", "ms-1.txt"}) c.Assert(getDirnames("static/a/b/c/d4"), qt.DeepEquals, []string{"d4-1", "d4-2", "d4-3", "d5"}) - all, err := collectFilenames(rfs, "static", "static") + all, err := collectPaths(rfs, "static") c.Assert(err, qt.IsNil) - c.Assert(all, qt.DeepEquals, []string{"a/b/c/f-1.txt", "a/b/c/f-2.txt", "a/b/c/f-3.txt", "a/b/c/ms-1.txt", "cf2/myfile.txt"}) + c.Assert(all, qt.DeepEquals, []string{"/a/b/c/f-1.txt", "/a/b/c/f-2.txt", "/a/b/c/f-3.txt", "/a/b/c/ms-1.txt", "/cf2/myfile.txt"}) - fis, err := collectFileinfos(rfs, "static", "static") + fis, err := collectFileinfos(rfs, "static") c.Assert(err, qt.IsNil) - c.Assert(fis[9].Meta().PathFile(), qt.Equals, filepath.FromSlash("d1/d2/d3/f-1.txt")) - dirc := fis[3].Meta() f, err := dirc.Open() c.Assert(err, qt.IsNil) defer f.Close() - fileInfos, err := f.Readdir(-1) + dirEntries, err := f.(iofs.ReadDirFile).ReadDir(-1) c.Assert(err, qt.IsNil) - sortFileInfos(fileInfos) + sortDirEntries(dirEntries) i := 0 - for _, fi := range fileInfos { + for _, fi := range dirEntries { if fi.IsDir() || fi.Name() == "ms-1.txt" { continue } i++ meta := fi.(FileMetaInfo).Meta() c.Assert(meta.Filename, qt.Equals, filepath.Join(d, fmt.Sprintf("/d1/d2/d3/f-%d.txt", i))) - c.Assert(meta.PathFile(), qt.Equals, filepath.FromSlash(fmt.Sprintf("d1/d2/d3/f-%d.txt", i))) } _, err = rfs.Stat(filepath.FromSlash("layouts/d2/d3/f-1.txt")) @@ -452,17 +451,17 @@ func TestRootMappingFsOsBase(t *testing.T) { // Deep structure deepDir := filepath.Join(d, "d1", "d2", "d3", "d4", "d5") - c.Assert(fs.MkdirAll(deepDir, 0755), qt.IsNil) + c.Assert(fs.MkdirAll(deepDir, 0o755), qt.IsNil) for i := 1; i <= 3; i++ { - c.Assert(fs.MkdirAll(filepath.Join(d, "d1", "d2", "d3", "d4", fmt.Sprintf("d4-%d", i)), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join(d, "d1", "d2", "d3", fmt.Sprintf("f-%d.txt", i)), []byte("some content"), 0755), qt.IsNil) + c.Assert(fs.MkdirAll(filepath.Join(d, "d1", "d2", "d3", "d4", fmt.Sprintf("d4-%d", i)), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(d, "d1", "d2", "d3", fmt.Sprintf("f-%d.txt", i)), []byte("some content"), 0o755), qt.IsNil) } mystaticDir := filepath.Join(d, "mystatic", "a", "b", "c") - c.Assert(fs.MkdirAll(mystaticDir, 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join(mystaticDir, "ms-1.txt"), []byte("some content"), 0755), qt.IsNil) + c.Assert(fs.MkdirAll(mystaticDir, 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(mystaticDir, "ms-1.txt"), []byte("some content"), 0o755), qt.IsNil) - bfs := afero.NewBasePathFs(fs, d) + bfs := NewBasePathFs(fs, d) rfs, err := newRootMappingFsFromFromTo( "", @@ -470,6 +469,7 @@ func TestRootMappingFsOsBase(t *testing.T) { "static", "mystatic", "static/a/b/c", filepath.Join("d1", "d2", "d3"), ) + c.Assert(err, qt.IsNil) getDirnames := func(dirname string) []string { dirname = filepath.FromSlash(dirname) @@ -491,13 +491,13 @@ func TestRootMappingFileFilter(t *testing.T) { for _, lang := range []string{"no", "en", "fr"} { for i := 1; i <= 3; i++ { - c.Assert(afero.WriteFile(fs, filepath.Join(lang, fmt.Sprintf("my%s%d.txt", lang, i)), []byte("some text file for"+lang), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(lang, fmt.Sprintf("my%s%d.txt", lang, i)), []byte("some text file for"+lang), 0o755), qt.IsNil) } } for _, lang := range []string{"no", "en", "fr"} { for i := 1; i <= 3; i++ { - c.Assert(afero.WriteFile(fs, filepath.Join(lang, "sub", fmt.Sprintf("mysub%s%d.txt", lang, i)), []byte("some text file for"+lang), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join(lang, "sub", fmt.Sprintf("mysub%s%d.txt", lang, i)), []byte("some text file for"+lang), 0o755), qt.IsNil) } } @@ -545,9 +545,11 @@ func TestRootMappingFileFilter(t *testing.T) { c.Assert(err, qt.IsNil) c.Assert(len(dirEntriesSub), qt.Equals, 3) - dirEntries, err := afero.ReadDir(rfs, "content") + f, err := rfs.Open("content") + c.Assert(err, qt.IsNil) + defer f.Close() + dirEntries, err := f.(iofs.ReadDirFile).ReadDir(-1) c.Assert(err, qt.IsNil) c.Assert(len(dirEntries), qt.Equals, 4) - } diff --git a/hugofs/slice_fs.go b/hugofs/slice_fs.go deleted file mode 100644 index 574a5cb5f..000000000 --- a/hugofs/slice_fs.go +++ /dev/null @@ -1,303 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugofs - -import ( - "fmt" - "os" - "syscall" - "time" - - "errors" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/spf13/afero" -) - -var ( - _ afero.Fs = (*SliceFs)(nil) - _ afero.Lstater = (*SliceFs)(nil) - _ FilesystemsUnwrapper = (*SliceFs)(nil) - _ afero.File = (*sliceDir)(nil) -) - -func NewSliceFs(dirs ...FileMetaInfo) (afero.Fs, error) { - if len(dirs) == 0 { - return NoOpFs, nil - } - - for _, dir := range dirs { - if !dir.IsDir() { - return nil, errors.New("this fs supports directories only") - } - } - - fs := &SliceFs{ - dirs: dirs, - } - - return fs, nil -} - -// SliceFs is an ordered composite filesystem. -type SliceFs struct { - dirs []FileMetaInfo -} - -func (fs *SliceFs) UnwrapFilesystems() []afero.Fs { - var fss []afero.Fs - for _, dir := range fs.dirs { - fss = append(fss, dir.Meta().Fs) - } - return fss -} - -func (fs *SliceFs) Chmod(n string, m os.FileMode) error { - return syscall.EPERM -} - -func (fs *SliceFs) Chtimes(n string, a, m time.Time) error { - return syscall.EPERM -} - -func (fs *SliceFs) Chown(n string, uid, gid int) error { - return syscall.EPERM -} - -func (fs *SliceFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fi, _, err := fs.pickFirst(name) - if err != nil { - return nil, false, err - } - - if fi.IsDir() { - return decorateFileInfo(fi, fs, fs.getOpener(name), "", "", nil), false, nil - } - - return nil, false, fmt.Errorf("lstat: files not supported: %q", name) -} - -func (fs *SliceFs) Mkdir(n string, p os.FileMode) error { - return syscall.EPERM -} - -func (fs *SliceFs) MkdirAll(n string, p os.FileMode) error { - return syscall.EPERM -} - -func (fs *SliceFs) Name() string { - return "SliceFs" -} - -func (fs *SliceFs) Open(name string) (afero.File, error) { - fi, idx, err := fs.pickFirst(name) - if err != nil { - return nil, err - } - - if !fi.IsDir() { - panic("currently only dirs in here") - } - - return &sliceDir{ - lfs: fs, - idx: idx, - dirname: name, - }, nil -} - -func (fs *SliceFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { - panic("not implemented") -} - -func (fs *SliceFs) ReadDir(name string) ([]os.FileInfo, error) { - panic("not implemented") -} - -func (fs *SliceFs) Remove(n string) error { - return syscall.EPERM -} - -func (fs *SliceFs) RemoveAll(p string) error { - return syscall.EPERM -} - -func (fs *SliceFs) Rename(o, n string) error { - return syscall.EPERM -} - -func (fs *SliceFs) Stat(name string) (os.FileInfo, error) { - fi, _, err := fs.LstatIfPossible(name) - return fi, err -} - -func (fs *SliceFs) Create(n string) (afero.File, error) { - return nil, syscall.EPERM -} - -func (fs *SliceFs) getOpener(name string) func() (afero.File, error) { - return func() (afero.File, error) { - return fs.Open(name) - } -} - -func (fs *SliceFs) pickFirst(name string) (os.FileInfo, int, error) { - for i, mfs := range fs.dirs { - meta := mfs.Meta() - fs := meta.Fs - fi, _, err := lstatIfPossible(fs, name) - if err == nil { - // Gotta match! - return fi, i, nil - } - - if !herrors.IsNotExist(err) { - // Real error - return nil, -1, err - } - } - - // Not found - return nil, -1, os.ErrNotExist -} - -func (fs *SliceFs) readDirs(name string, startIdx, count int) ([]os.FileInfo, error) { - collect := func(lfs *FileMeta) ([]os.FileInfo, error) { - d, err := lfs.Fs.Open(name) - if err != nil { - if !herrors.IsNotExist(err) { - return nil, err - } - return nil, nil - } else { - defer d.Close() - dirs, err := d.Readdir(-1) - if err != nil { - return nil, err - } - return dirs, nil - } - } - - var dirs []os.FileInfo - - for i := startIdx; i < len(fs.dirs); i++ { - mfs := fs.dirs[i] - - fis, err := collect(mfs.Meta()) - if err != nil { - return nil, err - } - - dirs = append(dirs, fis...) - - } - - seen := make(map[string]bool) - var duplicates []int - for i, fi := range dirs { - if !fi.IsDir() { - continue - } - - if seen[fi.Name()] { - duplicates = append(duplicates, i) - } else { - // Make sure it's opened by this filesystem. - dirs[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename), "", "", nil) - seen[fi.Name()] = true - } - } - - // Remove duplicate directories, keep first. - if len(duplicates) > 0 { - for i := len(duplicates) - 1; i >= 0; i-- { - idx := duplicates[i] - dirs = append(dirs[:idx], dirs[idx+1:]...) - } - } - - if count > 0 && len(dirs) >= count { - return dirs[:count], nil - } - - return dirs, nil -} - -type sliceDir struct { - lfs *SliceFs - idx int - dirname string -} - -func (f *sliceDir) Close() error { - return nil -} - -func (f *sliceDir) Name() string { - return f.dirname -} - -func (f *sliceDir) Read(p []byte) (n int, err error) { - panic("not implemented") -} - -func (f *sliceDir) ReadAt(p []byte, off int64) (n int, err error) { - panic("not implemented") -} - -func (f *sliceDir) Readdir(count int) ([]os.FileInfo, error) { - return f.lfs.readDirs(f.dirname, f.idx, count) -} - -func (f *sliceDir) Readdirnames(count int) ([]string, error) { - dirsi, err := f.Readdir(count) - if err != nil { - return nil, err - } - - dirs := make([]string, len(dirsi)) - for i, d := range dirsi { - dirs[i] = d.Name() - } - return dirs, nil -} - -func (f *sliceDir) Seek(offset int64, whence int) (int64, error) { - panic("not implemented") -} - -func (f *sliceDir) Stat() (os.FileInfo, error) { - panic("not implemented") -} - -func (f *sliceDir) Sync() error { - panic("not implemented") -} - -func (f *sliceDir) Truncate(size int64) error { - panic("not implemented") -} - -func (f *sliceDir) Write(p []byte) (n int, err error) { - panic("not implemented") -} - -func (f *sliceDir) WriteAt(p []byte, off int64) (n int, err error) { - panic("not implemented") -} - -func (f *sliceDir) WriteString(s string) (ret int, err error) { - panic("not implemented") -} diff --git a/hugofs/walk.go b/hugofs/walk.go index e883f892e..18667a5fc 100644 --- a/hugofs/walk.go +++ b/hugofs/walk.go @@ -15,73 +15,60 @@ package hugofs import ( "fmt" - "os" + "io/fs" "path/filepath" "sort" "strings" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/loggers" - - "errors" + "github.com/gohugoio/hugo/common/paths" "github.com/spf13/afero" ) type ( - WalkFunc func(path string, info FileMetaInfo, err error) error + WalkFunc func(path string, info FileMetaInfo) error WalkHook func(dir FileMetaInfo, path string, readdir []FileMetaInfo) ([]FileMetaInfo, error) ) type Walkway struct { - fs afero.Fs - root string - basePath string - logger loggers.Logger - // May be pre-set - fi FileMetaInfo - dirEntries []FileMetaInfo - - walkFn WalkFunc + // Prevent a walkway to be walked more than once. walked bool - // We may traverse symbolic links and bite ourself. - seen map[string]bool - - // Optional hooks - hookPre WalkHook - hookPost WalkHook + // Config from client. + cfg WalkwayConfig } type WalkwayConfig struct { - Fs afero.Fs - Root string - BasePath string + // The filesystem to walk. + Fs afero.Fs + // The root to start from in Fs. + Root string + + // The logger to use. Logger loggers.Logger // One or both of these may be pre-set. - Info FileMetaInfo - DirEntries []FileMetaInfo + Info FileMetaInfo // The start info. + DirEntries []FileMetaInfo // The start info's dir entries. + // Will be called in order. + HookPre WalkHook // Optional. WalkFn WalkFunc - HookPre WalkHook - HookPost WalkHook + HookPost WalkHook // Optional. + + // Some optional flags. + FailOnNotExist bool // If set, return an error if a directory is not found. + SortDirEntries bool // If set, sort the dir entries by Name before calling the WalkFn, default is ReaDir order. } func NewWalkway(cfg WalkwayConfig) *Walkway { - var fs afero.Fs - if cfg.Info != nil { - fs = cfg.Info.Meta().Fs - } else { - fs = cfg.Fs - } - - basePath := cfg.BasePath - if basePath != "" && !strings.HasSuffix(basePath, filepathSeparator) { - basePath += filepathSeparator + if cfg.Fs == nil { + panic("fs must be set") } logger := cfg.Logger @@ -90,16 +77,8 @@ func NewWalkway(cfg WalkwayConfig) *Walkway { } return &Walkway{ - fs: fs, - root: cfg.Root, - basePath: basePath, - fi: cfg.Info, - dirEntries: cfg.DirEntries, - walkFn: cfg.WalkFn, - hookPre: cfg.HookPre, - hookPost: cfg.HookPost, - logger: logger, - seen: make(map[string]bool), + cfg: cfg, + logger: logger, } } @@ -109,53 +88,16 @@ func (w *Walkway) Walk() error { } w.walked = true - if w.fs == NoOpFs { + if w.cfg.Fs == NoOpFs { return nil } - var fi FileMetaInfo - if w.fi != nil { - fi = w.fi - } else { - info, _, err := lstatIfPossible(w.fs, w.root) - if err != nil { - if herrors.IsNotExist(err) { - return nil - } - - if w.checkErr(w.root, err) { - return nil - } - return w.walkFn(w.root, nil, fmt.Errorf("walk: %q: %w", w.root, err)) - } - fi = info.(FileMetaInfo) - } - - if !fi.IsDir() { - return w.walkFn(w.root, nil, errors.New("file to walk must be a directory")) - } - - return w.walk(w.root, fi, w.dirEntries, w.walkFn) -} - -// if the filesystem supports it, use Lstat, else use fs.Stat -func lstatIfPossible(fs afero.Fs, path string) (os.FileInfo, bool, error) { - if lfs, ok := fs.(afero.Lstater); ok { - fi, b, err := lfs.LstatIfPossible(path) - return fi, b, err - } - fi, err := fs.Stat(path) - return fi, false, err + return w.walk(w.cfg.Root, w.cfg.Info, w.cfg.DirEntries) } // checkErr returns true if the error is handled. func (w *Walkway) checkErr(filename string, err error) bool { - if err == ErrPermissionSymlink { - logUnsupportedSymlink(filename, w.logger) - return true - } - - if herrors.IsNotExist(err) { + if herrors.IsNotExist(err) && !w.cfg.FailOnNotExist { // The file may be removed in process. // This may be a ERROR situation, but it is not possible // to determine as a general case. @@ -166,115 +108,73 @@ func (w *Walkway) checkErr(filename string, err error) bool { return false } -func logUnsupportedSymlink(filename string, logger loggers.Logger) { - logger.Warnf("Unsupported symlink found in %q, skipping.", filename) -} - // walk recursively descends path, calling walkFn. -// It follow symlinks if supported by the filesystem, but only the same path once. -func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo, walkFn WalkFunc) error { - err := walkFn(path, info, nil) +func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo) error { + pathRel := strings.TrimPrefix(path, w.cfg.Root) + + if info == nil { + var err error + fi, err := w.cfg.Fs.Stat(path) + if err != nil { + if path == w.cfg.Root && herrors.IsNotExist(err) { + return nil + } + if w.checkErr(path, err) { + return nil + } + return fmt.Errorf("walk: stat: %s", err) + } + info = fi.(FileMetaInfo) + } + + err := w.cfg.WalkFn(path, info) if err != nil { if info.IsDir() && err == filepath.SkipDir { return nil } return err } + if !info.IsDir() { return nil } - meta := info.Meta() - filename := meta.Filename - if dirEntries == nil { - f, err := w.fs.Open(path) + f, err := w.cfg.Fs.Open(path) if err != nil { if w.checkErr(path, err) { return nil } - return walkFn(path, info, fmt.Errorf("walk: open %q (%q): %w", path, w.root, err)) + return fmt.Errorf("walk: open: path: %q filename: %q: %s", path, info.Meta().Filename, err) } + fis, err := f.(fs.ReadDirFile).ReadDir(-1) - fis, err := f.Readdir(-1) f.Close() if err != nil { - if w.checkErr(filename, err) { + if w.checkErr(path, err) { return nil } - return walkFn(path, info, fmt.Errorf("walk: Readdir: %w", err)) + return fmt.Errorf("walk: Readdir: %w", err) } - dirEntries = fileInfosToFileMetaInfos(fis) + dirEntries = DirEntriesToFileMetaInfos(fis) + for _, fi := range dirEntries { + if fi.Meta().PathInfo == nil { + fi.Meta().PathInfo = paths.Parse("", filepath.Join(pathRel, fi.Name())) + } + } - if !meta.IsOrdered { + if w.cfg.SortDirEntries { sort.Slice(dirEntries, func(i, j int) bool { - fii := dirEntries[i] - fij := dirEntries[j] - - fim, fjm := fii.Meta(), fij.Meta() - - // Pull bundle headers to the top. - ficlass, fjclass := fim.Classifier, fjm.Classifier - if ficlass != fjclass { - return ficlass < fjclass - } - - // With multiple content dirs with different languages, - // there can be duplicate files, and a weight will be added - // to the closest one. - fiw, fjw := fim.Weight, fjm.Weight - if fiw != fjw { - - return fiw > fjw - } - - // When we walk into a symlink, we keep the reference to - // the original name. - fin, fjn := fim.Name, fjm.Name - if fin != "" && fjn != "" { - return fin < fjn - } - - return fii.Name() < fij.Name() + return dirEntries[i].Name() < dirEntries[j].Name() }) } - } - - // First add some metadata to the dir entries - for _, fi := range dirEntries { - fim := fi.(FileMetaInfo) - - meta := fim.Meta() - - // Note that we use the original Name even if it's a symlink. - name := meta.Name - if name == "" { - name = fim.Name() - } - - if name == "" { - panic(fmt.Sprintf("[%s] no name set in %v", path, meta)) - } - pathn := filepath.Join(path, name) - - pathMeta := pathn - if w.basePath != "" { - pathMeta = strings.TrimPrefix(pathn, w.basePath) - } - - meta.Path = normalizeFilename(pathMeta) - meta.PathWalk = pathn - if fim.IsDir() && meta.IsSymlink && w.isSeen(meta.Filename) { - // Prevent infinite recursion - // Possible cyclic reference - meta.SkipDir = true - } } - if w.hookPre != nil { - dirEntries, err = w.hookPre(info, path, dirEntries) + if w.cfg.HookPre != nil { + var err error + dirEntries, err = w.cfg.HookPre(info, path, dirEntries) if err != nil { if err == filepath.SkipDir { return nil @@ -283,24 +183,19 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo } } - for _, fi := range dirEntries { - fim := fi.(FileMetaInfo) - meta := fim.Meta() - - if meta.SkipDir { - continue - } - - err := w.walk(meta.PathWalk, fim, nil, walkFn) + for _, fim := range dirEntries { + nextPath := filepath.Join(path, fim.Name()) + err := w.walk(nextPath, fim, nil) if err != nil { - if !fi.IsDir() || err != filepath.SkipDir { + if !fim.IsDir() || err != filepath.SkipDir { return err } } } - if w.hookPost != nil { - dirEntries, err = w.hookPost(info, path, dirEntries) + if w.cfg.HookPost != nil { + var err error + dirEntries, err = w.cfg.HookPost(info, path, dirEntries) if err != nil { if err == filepath.SkipDir { return nil @@ -310,16 +205,3 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo } return nil } - -func (w *Walkway) isSeen(filename string) bool { - if filename == "" { - return false - } - - if w.seen[filename] { - return true - } - - w.seen[filename] = true - return false -} diff --git a/hugofs/walk_test.go b/hugofs/walk_test.go index 2e162fa72..7366d008d 100644 --- a/hugofs/walk_test.go +++ b/hugofs/walk_test.go @@ -15,17 +15,13 @@ package hugofs import ( "context" + "errors" "fmt" - "os" "path/filepath" - "runtime" "strings" "testing" - "errors" - "github.com/gohugoio/hugo/common/para" - "github.com/gohugoio/hugo/htesting" "github.com/spf13/afero" @@ -37,14 +33,14 @@ func TestWalk(t *testing.T) { fs := NewBaseFileDecorator(afero.NewMemMapFs()) - afero.WriteFile(fs, "b.txt", []byte("content"), 0777) - afero.WriteFile(fs, "c.txt", []byte("content"), 0777) - afero.WriteFile(fs, "a.txt", []byte("content"), 0777) + afero.WriteFile(fs, "b.txt", []byte("content"), 0o777) + afero.WriteFile(fs, "c.txt", []byte("content"), 0o777) + afero.WriteFile(fs, "a.txt", []byte("content"), 0o777) - names, err := collectFilenames(fs, "", "") + names, err := collectPaths(fs, "") c.Assert(err, qt.IsNil) - c.Assert(names, qt.DeepEquals, []string{"a.txt", "b.txt", "c.txt"}) + c.Assert(names, qt.DeepEquals, []string{"/a.txt", "/b.txt", "/c.txt"}) } func TestWalkRootMappingFs(t *testing.T) { @@ -55,9 +51,9 @@ func TestWalkRootMappingFs(t *testing.T) { testfile := "test.txt" - c.Assert(afero.WriteFile(fs, filepath.Join("a/b", testfile), []byte("some content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("c/d", testfile), []byte("some content"), 0755), qt.IsNil) - c.Assert(afero.WriteFile(fs, filepath.Join("e/f", testfile), []byte("some content"), 0755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("a/b", testfile), []byte("some content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("c/d", testfile), []byte("some content"), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(fs, filepath.Join("e/f", testfile), []byte("some content"), 0o755), qt.IsNil) rm := []RootMapping{ { @@ -77,16 +73,16 @@ func TestWalkRootMappingFs(t *testing.T) { rfs, err := NewRootMappingFs(fs, rm...) c.Assert(err, qt.IsNil) - return afero.NewBasePathFs(rfs, "static") + return NewBasePathFs(rfs, "static") } c.Run("Basic", func(c *qt.C) { bfs := prepare(c) - names, err := collectFilenames(bfs, "", "") + names, err := collectPaths(bfs, "") c.Assert(err, qt.IsNil) - c.Assert(names, qt.DeepEquals, []string{"a/test.txt", "b/test.txt", "c/test.txt"}) + c.Assert(names, qt.DeepEquals, []string{"/a/test.txt", "/b/test.txt", "/c/test.txt"}) }) c.Run("Para", func(c *qt.C) { @@ -97,7 +93,7 @@ func TestWalkRootMappingFs(t *testing.T) { for i := 0; i < 8; i++ { r.Run(func() error { - _, err := collectFilenames(bfs, "", "") + _, err := collectPaths(bfs, "") if err != nil { return err } @@ -117,111 +113,35 @@ func TestWalkRootMappingFs(t *testing.T) { }) } -func skipSymlink() bool { - if runtime.GOOS != "windows" { - return false - } - if os.Getenv("GITHUB_ACTION") != "" { - // TODO(bep) figure out why this fails on GitHub Actions. - return true - } - return os.Getenv("CI") == "" -} - -func TestWalkSymbolicLink(t *testing.T) { - if skipSymlink() { - t.Skip("Skip; os.Symlink needs administrator rights on Windows") - } - c := qt.New(t) - workDir, clean, err := htesting.CreateTempDir(Os, "hugo-walk-sym") - c.Assert(err, qt.IsNil) - defer clean() - wd, _ := os.Getwd() - defer func() { - os.Chdir(wd) - }() - - fs := NewBaseFileDecorator(Os) - - blogDir := filepath.Join(workDir, "blog") - docsDir := filepath.Join(workDir, "docs") - blogReal := filepath.Join(blogDir, "real") - blogRealSub := filepath.Join(blogReal, "sub") - c.Assert(os.MkdirAll(blogRealSub, 0777), qt.IsNil) - c.Assert(os.MkdirAll(docsDir, 0777), qt.IsNil) - afero.WriteFile(fs, filepath.Join(blogRealSub, "a.txt"), []byte("content"), 0777) - afero.WriteFile(fs, filepath.Join(docsDir, "b.txt"), []byte("content"), 0777) - - os.Chdir(blogDir) - c.Assert(os.Symlink("real", "symlinked"), qt.IsNil) - os.Chdir(blogReal) - c.Assert(os.Symlink("../real", "cyclic"), qt.IsNil) - os.Chdir(docsDir) - c.Assert(os.Symlink("../blog/real/cyclic", "docsreal"), qt.IsNil) - - t.Run("OS Fs", func(t *testing.T) { - c := qt.New(t) - - names, err := collectFilenames(fs, workDir, workDir) - c.Assert(err, qt.IsNil) - - c.Assert(names, qt.DeepEquals, []string{"blog/real/sub/a.txt", "blog/symlinked/sub/a.txt", "docs/b.txt"}) - }) - - t.Run("BasePath Fs", func(t *testing.T) { - c := qt.New(t) - - docsFs := afero.NewBasePathFs(fs, docsDir) - - names, err := collectFilenames(docsFs, "", "") - c.Assert(err, qt.IsNil) - - // Note: the docsreal folder is considered cyclic when walking from the root, but this works. - c.Assert(names, qt.DeepEquals, []string{"b.txt", "docsreal/sub/a.txt"}) - }) -} - -func collectFilenames(fs afero.Fs, base, root string) ([]string, error) { +func collectPaths(fs afero.Fs, root string) ([]string, error) { var names []string - walkFn := func(path string, info FileMetaInfo, err error) error { - if err != nil { - return err - } - + walkFn := func(path string, info FileMetaInfo) error { if info.IsDir() { return nil } - - filename := info.Meta().Path - filename = filepath.ToSlash(filename) - - names = append(names, filename) + names = append(names, info.Meta().PathInfo.Path()) return nil } - w := NewWalkway(WalkwayConfig{Fs: fs, BasePath: base, Root: root, WalkFn: walkFn}) + w := NewWalkway(WalkwayConfig{Fs: fs, Root: root, WalkFn: walkFn, SortDirEntries: true, FailOnNotExist: true}) err := w.Walk() return names, err } -func collectFileinfos(fs afero.Fs, base, root string) ([]FileMetaInfo, error) { +func collectFileinfos(fs afero.Fs, root string) ([]FileMetaInfo, error) { var fis []FileMetaInfo - walkFn := func(path string, info FileMetaInfo, err error) error { - if err != nil { - return err - } - + walkFn := func(path string, info FileMetaInfo) error { fis = append(fis, info) return nil } - w := NewWalkway(WalkwayConfig{Fs: fs, BasePath: base, Root: root, WalkFn: walkFn}) + w := NewWalkway(WalkwayConfig{Fs: fs, Root: root, WalkFn: walkFn, SortDirEntries: true, FailOnNotExist: true}) err := w.Walk() @@ -235,7 +155,7 @@ func BenchmarkWalk(b *testing.B) { writeFiles := func(dir string, numfiles int) { for i := 0; i < numfiles; i++ { filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i)) - c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0777), qt.IsNil) + c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0o777), qt.IsNil) } } @@ -249,10 +169,7 @@ func BenchmarkWalk(b *testing.B) { writeFiles("root/l1_2/l2_1", numFilesPerDir) writeFiles("root/l1_3", numFilesPerDir) - walkFn := func(path string, info FileMetaInfo, err error) error { - if err != nil { - return err - } + walkFn := func(path string, info FileMetaInfo) error { if info.IsDir() { return nil } diff --git a/hugolib/404_test.go b/hugolib/404_test.go index 383302e0b..3c6a06045 100644 --- a/hugolib/404_test.go +++ b/hugolib/404_test.go @@ -20,10 +20,10 @@ import ( func Test404(t *testing.T) { t.Parallel() - b := newTestSitesBuilder(t) - b.WithSimpleConfigFile().WithTemplatesAdded( - "404.html", - ` + files := ` +-- hugo.toml -- +baseURL = "http://example.com/" +-- layouts/404.html -- {{ $home := site.Home }} 404: Parent: {{ .Parent.Kind }} @@ -35,16 +35,21 @@ InSection: {{ .InSection $home.Section }}|{{ $home.InSection . }} Sections: {{ len .Sections }}| Page: {{ .Page.RelPermalink }}| Data: {{ len .Data }}| +` -`, - ) - b.Build(BuildCfg{}) + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + //LogLevel: logg.LevelTrace, + //Verbose: true, + }, + ).Build() // Note: We currently have only 1 404 page. One might think that we should have // multiple, to follow the Custom Output scheme, but I don't see how that would work // right now. b.AssertFileContent("public/404.html", ` - 404: Parent: home IsAncestor: false/true diff --git a/hugolib/alias.go b/hugolib/alias.go index 5165edb04..08d57a8bc 100644 --- a/hugolib/alias.go +++ b/hugolib/alias.go @@ -64,7 +64,7 @@ func (a aliasHandler) renderAlias(permalink string, p page.Page) (io.Reader, err p, } - ctx := tpl.SetPageInContext(context.Background(), p) + ctx := tpl.Context.Page.Set(context.Background(), p) buffer := new(bytes.Buffer) err := a.t.ExecuteWithContext(ctx, templ, buffer, data) diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go index e03107ada..c017050c6 100644 --- a/hugolib/alias_test.go +++ b/hugolib/alias_test.go @@ -14,6 +14,7 @@ package hugolib import ( + "os" "path/filepath" "runtime" "testing" @@ -90,6 +91,8 @@ func TestAliasMultipleOutputFormats(t *testing.T) { b.CreateSites().Build(BuildCfg{}) + b.H.Sites[0].pageMap.debugPrint("", 999, os.Stdout) + // the real pages b.AssertFileContent("public/blog/page/index.html", "For some moments the old man") b.AssertFileContent("public/amp/blog/page/index.html", "For some moments the old man") diff --git a/hugolib/breaking_changes_test.go b/hugolib/breaking_changes_test.go index 533205deb..fbbf27aed 100644 --- a/hugolib/breaking_changes_test.go +++ b/hugolib/breaking_changes_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/hugolib/cascade_test.go b/hugolib/cascade_test.go index 0f607ecb5..194faeb0a 100644 --- a/hugolib/cascade_test.go +++ b/hugolib/cascade_test.go @@ -126,7 +126,7 @@ cascade: b.Build(BuildCfg{}) - p1 := b.H.Sites[0].getPage("p1") + p1 := b.H.Sites[0].getPageOldVersion("p1") if withHomeContent { b.Assert(p1.Params(), qt.DeepEquals, maps.Params{ @@ -158,34 +158,38 @@ func TestCascade(t *testing.T) { b := newCascadeTestBuilder(t, langs) b.Build(BuildCfg{}) + // b.H.Sites[0].pageMap.debugPrint("", 999, os.Stdout) + + // 12|term|/categories/cool|Cascade Category|cat.png|page|html-|\ + b.AssertFileContent("public/index.html", ` -12|term|categories/cool/_index.md|Cascade Category|cat.png|categories|html-| -12|term|categories/catsect1|catsect1|cat.png|categories|html-| -12|term|categories/funny|funny|cat.png|categories|html-| -12|taxonomy|categories/_index.md|My Categories|cat.png|categories|html-| -32|term|categories/sad/_index.md|Cascade Category|sad.png|categories|html-| -42|term|tags/blue|blue|home.png|tags|html-| -42|taxonomy|tags|Cascade Home|home.png|tags|html-| -42|section|sectnocontent|Cascade Home|home.png|sectnocontent|html-| -42|section|sect3|Cascade Home|home.png|sect3|html-| -42|page|bundle1/index.md|Cascade Home|home.png|page|html-| -42|page|p2.md|Cascade Home|home.png|page|html-| -42|page|sect2/p2.md|Cascade Home|home.png|sect2|html-| -42|page|sect3/nofrontmatter.md|Cascade Home|home.png|sect3|html-| -42|page|sect3/p1.md|Cascade Home|home.png|sect3|html-| -42|page|sectnocontent/p1.md|Cascade Home|home.png|sectnocontent|html-| -42|section|sectnofrontmatter/_index.md|Cascade Home|home.png|sectnofrontmatter|html-| -42|term|tags/green|green|home.png|tags|html-| -42|home|_index.md|Home|home.png|page|html-| -42|page|p1.md|p1|home.png|page|html-| -42|section|sect1/_index.md|Sect1|sect1.png|stype|html-| -42|section|sect1/s1_2/_index.md|Sect1_2|sect1.png|stype|html-| -42|page|sect1/s1_2/p1.md|Sect1_2_p1|sect1.png|stype|html-| -42|page|sect1/s1_2/p2.md|Sect1_2_p2|sect1.png|stype|html-| -42|section|sect2/_index.md|Sect2|home.png|sect2|html-| -42|page|sect2/p1.md|Sect2_p1|home.png|sect2|html-| -52|page|sect4/p1.md|Cascade Home|home.png|sect4|rss-| -52|section|sect4/_index.md|Sect4|home.png|sect4|rss-| +12|term|/categories/cool|Cascade Category|cat.png|categories|html-| +12|term|/categories/catsect1|Cascade Category|cat.png|categories|html-| +12|term|/categories/funny|Cascade Category|cat.png|categories|html-| +12|taxonomy|/categories|My Categories|cat.png|categories|html-| +32|term|/categories/sad|Cascade Category|sad.png|categories|html-| +42|term|/tags/blue|Cascade Home|home.png|tags|html-| +42|taxonomy|/tags|Cascade Home|home.png|tags|html-| +42|section|/sectnocontent|Cascade Home|home.png|sectnocontent|html-| +42|section|/sect3|Cascade Home|home.png|sect3|html-| +42|page|/bundle1|Cascade Home|home.png|page|html-| +42|page|/p2|Cascade Home|home.png|page|html-| +42|page|/sect2/p2|Cascade Home|home.png|sect2|html-| +42|page|/sect3/nofrontmatter|Cascade Home|home.png|sect3|html-| +42|page|/sect3/p1|Cascade Home|home.png|sect3|html-| +42|page|/sectnocontent/p1|Cascade Home|home.png|sectnocontent|html-| +42|section|/sectnofrontmatter|Cascade Home|home.png|sectnofrontmatter|html-| +42|term|/tags/green|Cascade Home|home.png|tags|html-| +42|home|/|Home|home.png|page|html-| +42|page|/p1|p1|home.png|page|html-| +42|section|/sect1|Sect1|sect1.png|stype|html-| +42|section|/sect1/s1_2|Sect1_2|sect1.png|stype|html-| +42|page|/sect1/s1_2/p1|Sect1_2_p1|sect1.png|stype|html-| +42|page|/sect1/s1_2/p2|Sect1_2_p2|sect1.png|stype|html-| +42|section|/sect2|Sect2|home.png|sect2|html-| +42|page|/sect2/p1|Sect2_p1|home.png|sect2|html-| +52|page|/sect4/p1|Cascade Home|home.png|sect4|rss-| +52|section|/sect4|Sect4|home.png|sect4|rss-| `) // Check that type set in cascade gets the correct layout. @@ -263,7 +267,7 @@ cascade: assert() b.AssertFileContent("public/post/dir/p1/index.html", `content edit -Banner: post.jpg`, + Banner: post.jpg`, ) }) @@ -323,17 +327,56 @@ Banner: post.jpg`, b.EditFiles("content/post/_index.md", indexContentCascade+"\ncontent edit") - counters := &testCounters{} + counters := &buildCounters{} b.Build(BuildCfg{testCounters: counters}) - // As we only changed the content, not the cascade front matter, - // only the home page is re-rendered. - b.Assert(int(counters.contentRenderCounter), qt.Equals, 1) + b.Assert(int(counters.contentRenderCounter.Load()), qt.Equals, 2) b.AssertFileContent("public/post/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|Content: <p>content edit</p>`) b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|`) }) } +func TestCascadeBuildOptionsTaxonomies(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL="https://example.org" +[taxonomies] +tag = "tags" + +[[cascade]] + +[cascade._build] +render = "never" +list = "never" +publishResources = false + +[cascade._target] +path = '/hidden/**' +-- content/p1.md -- +--- +title: P1 +--- +-- content/hidden/p2.md -- +--- +title: P2 +tags: [t1, t2] +--- +-- layouts/_default/list.html -- +List: {{ len .Pages }}| +-- layouts/_default/single.html -- +Single: Tags: {{ site.Taxonomies.tags }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/p1/index.html", "Single: Tags: map[]|") + b.AssertFileContent("public/tags/index.html", "List: 0|") + b.AssertFileExists("public/hidden/p2/index.html", false) + b.AssertFileExists("public/tags/t2/index.html", false) +} + func newCascadeTestBuilder(t testing.TB, langs []string) *sitesBuilder { p := func(m map[string]any) string { var yamlStr string @@ -474,7 +517,7 @@ defaultContentLanguageInSubDir = false b.WithTemplates("index.html", ` {{ range .Site.Pages }} -{{- .Weight }}|{{ .Kind }}|{{ path.Join .Path }}|{{ .Title }}|{{ .Params.icon }}|{{ .Type }}|{{ range .OutputFormats }}{{ .Name }}-{{ end }}| +{{- .Weight }}|{{ .Kind }}|{{ .Path }}|{{ .Title }}|{{ .Params.icon }}|{{ .Type }}|{{ range .OutputFormats }}{{ .Name }}-{{ end }}| {{ end }} `, diff --git a/hugolib/collections_test.go b/hugolib/collections_test.go index 6925d41cd..fff57337f 100644 --- a/hugolib/collections_test.go +++ b/hugolib/collections_test.go @@ -82,8 +82,8 @@ tags_weight: %d c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 2) b.AssertFileContent("public/index.html", - "pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)", - "pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)", + "pages:2:page.Pages:Page(/page1)/Page(/page2)", + "pageGroups:2:page.PagesGroup:Page(/page1)/Page(/page2)", `weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) } @@ -207,7 +207,7 @@ tags_weight: %d c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 2) b.AssertFileContent("public/index.html", - "pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)", + "pages:2:page.Pages:Page(/page2)/Page(/page1)", "appendPages:9:page.Pages:home/page", "appendStrings:[]string:[a b c d e]", "appendStringsSlice:[]string:[a b c c d]", diff --git a/hugolib/config_test.go b/hugolib/config_test.go index e912ff721..2cc3255fe 100644 --- a/hugolib/config_test.go +++ b/hugolib/config_test.go @@ -68,11 +68,9 @@ title = "English Comments Title" }, }, ) - } func TestLoadConfig(t *testing.T) { - t.Run("2 languages", func(t *testing.T) { t.Parallel() @@ -122,7 +120,6 @@ myparam = "svParamValue" b.Assert(svSite.Home().Title(), qt.Equals, "Svensk Title") b.Assert(svSite.Params()["myparam"], qt.Equals, "svParamValue") b.Assert(svSite.conf.StaticDir[0], qt.Equals, "mysvstatic") - }) t.Run("disable default language", func(t *testing.T) { @@ -149,7 +146,6 @@ weight = 2 b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, "cannot disable default content language") - }) t.Run("no internal config from outside", func(t *testing.T) { @@ -169,7 +165,6 @@ running = true ).Build() b.Assert(b.H.Conf.Running(), qt.Equals, false) - }) t.Run("env overrides", func(t *testing.T) { @@ -205,9 +200,7 @@ pm31: {{ .Site.Params.pm3.pm31 }} ).Build() b.AssertFileContent("public/index.html", "p1: p1base\np2: p2env\npm21: pm21env\npm22: pm22base\npm31: pm31env") - }) - } func TestLoadConfigThemeLanguage(t *testing.T) { @@ -256,7 +249,6 @@ p1: p1base p2: p2en sub: map[sub1:sub1en] `) - } func TestDisableRootSlicesFromEnv(t *testing.T) { @@ -297,7 +289,6 @@ Home. b.Assert(conf.DisableLanguages, qt.DeepEquals, []string{"sv", "no"}) b.Assert(conf.DisableKinds, qt.DeepEquals, []string{"taxonomy", "term"}) } - } func TestLoadMultiConfig(t *testing.T) { @@ -326,7 +317,6 @@ func TestLoadMultiConfig(t *testing.T) { c.Assert(cfg.PaginatePath, qt.Equals, "top") c.Assert(cfg.Paginate, qt.Equals, 32) - } func TestLoadConfigFromThemes(t *testing.T) { @@ -509,7 +499,6 @@ name = "menu-theme" // Issue #8724 for _, mergeStrategy := range []string{"none", "shallow"} { c.Run(fmt.Sprintf("Merge with sitemap config in theme, mergestrategy %s", mergeStrategy), func(c *qt.C) { - smapConfigTempl := `[sitemap] changefreq = %q filename = "sitemap.xml" @@ -531,10 +520,8 @@ name = "menu-theme" b.Assert(got.Sitemap, qt.DeepEquals, config.SitemapConfig{ChangeFreq: "monthly", Priority: -1, Filename: "sitemap.xml"}) b.AssertFileContent("public/sitemap.xml", "<changefreq>monthly</changefreq>") } - }) } - } func TestLoadConfigFromThemeDir(t *testing.T) { @@ -561,9 +548,9 @@ t2 = "tv2" b := newTestSitesBuilder(t) b.WithConfigFile("toml", mainConfig).WithThemeConfigFile("toml", themeConfig) - b.Assert(b.Fs.Source.MkdirAll(themeConfigDirDefault, 0777), qt.IsNil) - b.Assert(b.Fs.Source.MkdirAll(themeConfigDirProduction, 0777), qt.IsNil) - b.Assert(b.Fs.Source.MkdirAll(projectConfigDir, 0777), qt.IsNil) + b.Assert(b.Fs.Source.MkdirAll(themeConfigDirDefault, 0o777), qt.IsNil) + b.Assert(b.Fs.Source.MkdirAll(themeConfigDirProduction, 0o777), qt.IsNil) + b.Assert(b.Fs.Source.MkdirAll(projectConfigDir, 0o777), qt.IsNil) b.WithSourceFile(filepath.Join(projectConfigDir, "config.toml"), `[params] m2 = "mv2" @@ -587,7 +574,6 @@ t3 = "tv3p" "t1": "tv1", "t2": "tv2d", }) - } func TestPrivacyConfig(t *testing.T) { @@ -729,7 +715,6 @@ defaultMarkdownHandler = 'blackfriday' b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, "Configured defaultMarkdownHandler \"blackfriday\" not found. Did you mean to use goldmark? Blackfriday was removed in Hugo v0.100.0.") - } // Issue 8979 @@ -786,10 +771,8 @@ author: {{ site.Author }} "social: map[twitter:bepsays]", "author: map[name:bep]", ) - }) } - } // Issue #11089 @@ -835,7 +818,6 @@ Single. b.Assert(err, qt.IsNil) b.AssertFileContent("public/index.html", "Home: en|2|") b.AssertFileContent("public/sv/index.html", "Home: sv|0|") - }) t.Run("Sv first", func(t *testing.T) { @@ -857,7 +839,6 @@ Single. b.AssertFileContent("public/sv/index.html", "Home: sv|0|") } }) - } func TestConfigOutputFormatDefinedInTheme(t *testing.T) { @@ -893,7 +874,6 @@ Home. b.Assert(err, qt.IsNil) b.AssertFileContent("public/myindex.html", "Home.") - } func TestConfigParamSetOnLanguageLevel(t *testing.T) { @@ -971,6 +951,7 @@ LanguageCode: {{ eq site.LanguageCode site.Language.LanguageCode }}|{{ site.Lang IntegrationTestConfig{ T: t, TxtarString: files, + LogLevel: logg.LevelWarn, }, ).Build() @@ -991,7 +972,6 @@ Param: svParamValue LanguageCode: true|sv| `) - } func TestConfigEmptyMainSections(t *testing.T) { @@ -1017,7 +997,6 @@ mainSections: {{ site.Params.mainSections }} b.AssertFileContent("public/index.html", ` mainSections: [] `) - } func TestConfigHugoWorkingDir(t *testing.T) { @@ -1040,7 +1019,6 @@ WorkingDir: {{ hugo.WorkingDir }}| b.AssertFileContent("public/index.html", ` WorkingDir: myworkingdir| `) - } func TestConfigMergeLanguageDeepEmptyLefSide(t *testing.T) { @@ -1093,7 +1071,6 @@ Ein "Zitat" auf Deutsch. b.AssertFileContent("public/index.html", "p1: p1base", "<p>A “quote” in English.</p>") b.AssertFileContent("public/de/index.html", "p1: p1de", "<p>Ein «Zitat» auf Deutsch.</p>") - } func TestConfigLegacyValues(t *testing.T) { @@ -1160,7 +1137,6 @@ HTACCESS. ).Build() b.AssertFileContent("public/.htaccess", "HTACCESS") - } func TestConfigLanguageCodeTopLevel(t *testing.T) { @@ -1182,7 +1158,6 @@ LanguageCode: {{ .Site.LanguageCode }}|{{ site.Language.LanguageCode }}| ).Build() b.AssertFileContent("public/index.html", "LanguageCode: en-US|en-US|") - } // See #11159 @@ -1224,7 +1199,6 @@ Home. b.Assert(f.Path, qt.Equals, "foo") m, _ = svConfig.MediaTypes.Config.GetByType("text/html") b.Assert(m.Suffixes(), qt.DeepEquals, []string{"bar"}) - } func TestConfigMiscPanics(t *testing.T) { @@ -1232,7 +1206,6 @@ func TestConfigMiscPanics(t *testing.T) { // Issue 11047, t.Run("empty params", func(t *testing.T) { - files := ` -- hugo.yaml -- params: @@ -1253,7 +1226,6 @@ Foo: {{ site.Params.foo }}| // Issue 11046 t.Run("invalid language setup", func(t *testing.T) { - files := ` -- hugo.toml -- baseURL = "https://example.org" @@ -1284,7 +1256,6 @@ Foo: {{ site.Params.foo }}| // Issue 11044 t.Run("invalid defaultContentLanguage", func(t *testing.T) { - files := ` -- hugo.toml -- baseURL = "https://example.org" @@ -1309,7 +1280,6 @@ weight = 1 b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, "defaultContentLanguage does not match any language definition") }) - } // Issue #11040 @@ -1334,11 +1304,9 @@ Home. b.Assert(b.H.Configs.Base.Module.Mounts, qt.HasLen, 7) b.Assert(b.H.Configs.LanguageConfigSlice[0].Module.Mounts, qt.HasLen, 7) - } func TestDefaultContentLanguageInSubdirOnlyOneLanguage(t *testing.T) { - t.Run("One language, default in sub dir", func(t *testing.T) { t.Parallel() @@ -1431,7 +1399,6 @@ Home. b.AssertFileContent("public/sitemap.xml", "sitemapindex") b.AssertFileContent("public/en/sitemap.xml", "urlset") }) - } func TestLanguagesDisabled(t *testing.T) { @@ -1458,11 +1425,9 @@ Home. ).Build() b.Assert(len(b.H.Sites), qt.Equals, 1) - } func TestLoadConfigYamlEnvVar(t *testing.T) { - defaultEnv := []string{`HUGO_OUTPUTS=home: ['json']`} runVariant := func(t testing.TB, files string, env []string) *IntegrationTestBuilder { @@ -1487,7 +1452,6 @@ func TestLoadConfigYamlEnvVar(t *testing.T) { } return b - } t.Run("with empty slice", func(t *testing.T) { @@ -1511,7 +1475,6 @@ home = ["html"] "taxonomy": {"html", "rss"}, "term": {"html", "rss"}, }) - }) t.Run("with existing outputs", func(t *testing.T) { @@ -1527,7 +1490,6 @@ home = ["html"] ` runVariant(t, files, nil) - }) { @@ -1543,7 +1505,6 @@ home = ["html"] ` runVariant(t, files, []string{"HUGO_OUTPUTS_HOME=json"}) - }) } @@ -1558,7 +1519,6 @@ disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT", "page", "sect ` runVariant(t, files, nil) - }) t.Run("without existing outputs direct", func(t *testing.T) { @@ -1571,9 +1531,7 @@ disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT", "page", "sect ` runVariant(t, files, []string{"HUGO_OUTPUTS_HOME=json"}) - }) - } // Issue #11257 @@ -1606,10 +1564,9 @@ List. }, ).Build() - b.AssertDestinationExists("index.html", true) - b.AssertDestinationExists("categories/c1/index.html", true) - b.AssertDestinationExists("categories/index.html", false) - + b.AssertFileExists("public/index.html", true) + b.AssertFileExists("public/categories/c1/index.html", true) + b.AssertFileExists("public/categories/index.html", false) } func TestKindsUnknown(t *testing.T) { @@ -1636,7 +1593,6 @@ List. b.AssertLogContains("WARN Unknown kind \"foo\" in disableKinds configuration.\n") b.AssertLogContains("WARN Unknown kind \"foo\" in outputs configuration.\n") - } func TestDeprecateTaxonomyTerm(t *testing.T) { @@ -1664,5 +1620,4 @@ List. b.AssertLogContains("WARN DEPRECATED: Kind \"taxonomyterm\" used in disableKinds is deprecated, use \"taxonomy\" instead.\n") b.AssertLogContains("WARN DEPRECATED: Kind \"taxonomyterm\" used in outputs configuration is deprecated, use \"taxonomy\" instead.\n") - } diff --git a/hugolib/configdir_test.go b/hugolib/configdir_test.go index 559de6b22..032cb4e26 100644 --- a/hugolib/configdir_test.go +++ b/hugolib/configdir_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,7 +16,6 @@ package hugolib import "testing" func TestConfigDir(t *testing.T) { - t.Parallel() files := ` @@ -51,5 +50,4 @@ Params: map[a:acp1 b:bc1 c:c1 d:dcp1] `) - } diff --git a/hugolib/content_factory.go b/hugolib/content_factory.go index e22f46513..e6b0fb506 100644 --- a/hugolib/content_factory.go +++ b/hugolib/content_factory.go @@ -22,7 +22,8 @@ import ( "time" "github.com/gohugoio/hugo/common/htime" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/source" @@ -42,25 +43,17 @@ type ContentFactory struct { } // ApplyArchetypeFilename archetypeFilename to w as a template using the given Page p as the foundation for the data context. -func (f ContentFactory) ApplyArchetypeFilename(w io.Writer, p page.Page, archetypeKind, archetypeFilename string) error { - - fi, err := f.h.SourceFilesystems.Archetypes.Fs.Stat(archetypeFilename) - if err != nil { - return err - } - +func (f ContentFactory) ApplyArchetypeFi(w io.Writer, p page.Page, archetypeKind string, fi hugofs.FileMetaInfo) error { if fi.IsDir() { - return fmt.Errorf("archetype directory (%q) not supported", archetypeFilename) + return fmt.Errorf("archetype directory (%q) not supported", fi.Meta().Filename) } - templateSource, err := afero.ReadFile(f.h.SourceFilesystems.Archetypes.Fs, archetypeFilename) + templateSource, err := fi.Meta().ReadAll() if err != nil { - return fmt.Errorf("failed to read archetype file %q: %s: %w", archetypeFilename, err, err) - + return fmt.Errorf("failed to read archetype file %q: %s: %w", fi.Meta().Filename, err, err) } return f.ApplyArchetypeTemplate(w, p, archetypeKind, string(templateSource)) - } // ApplyArchetypeTemplate templateSource to w as a template using the given Page p as the foundation for the data context. @@ -84,7 +77,7 @@ func (f ContentFactory) ApplyArchetypeTemplate(w io.Writer, p page.Page, archety return fmt.Errorf("failed to parse archetype template: %s: %w", err, err) } - result, err := executeToString(context.TODO(), ps.s.Tmpl(), templ, d) + result, err := executeToString(context.Background(), ps.s.Tmpl(), templ, d) if err != nil { return fmt.Errorf("failed to execute archetype template: %s: %w", err, err) } @@ -92,7 +85,6 @@ func (f ContentFactory) ApplyArchetypeTemplate(w io.Writer, p page.Page, archety _, err = io.WriteString(w, f.shortcodeReplacerPost.Replace(result)) return err - } func (f ContentFactory) SectionFromFilename(filename string) (string, error) { @@ -102,7 +94,7 @@ func (f ContentFactory) SectionFromFilename(filename string) (string, error) { return "", err } - parts := strings.Split(helpers.ToSlashTrimLeading(rel), "/") + parts := strings.Split(paths.ToSlashTrimLeading(rel), "/") if len(parts) < 2 { return "", nil } @@ -114,7 +106,6 @@ func (f ContentFactory) SectionFromFilename(filename string) (string, error) { func (f ContentFactory) CreateContentPlaceHolder(filename string, force bool) (string, error) { filename = filepath.Clean(filename) _, abs, err := f.h.AbsProjectContentDir(filename) - if err != nil { return "", err } @@ -169,7 +160,7 @@ type archetypeFileData struct { // File is the same as Page.File, embedded here for historic reasons. // TODO(bep) make this a method. - source.File + *source.File } func (f *archetypeFileData) Site() page.Site { diff --git a/hugolib/content_factory_test.go b/hugolib/content_factory_test.go index 2c4b843a9..9181cda68 100644 --- a/hugolib/content_factory_test.go +++ b/hugolib/content_factory_test.go @@ -6,6 +6,7 @@ import ( "testing" qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/hugofs" ) func TestContentFactory(t *testing.T) { @@ -52,7 +53,9 @@ Hello World. b.Assert(p, qt.Not(qt.IsNil)) var buf bytes.Buffer - b.Assert(cf.ApplyArchetypeFilename(&buf, p, "", "post.md"), qt.IsNil) + fi, err := b.H.BaseFs.Archetypes.Fs.Stat("post.md") + b.Assert(err, qt.IsNil) + b.Assert(cf.ApplyArchetypeFi(&buf, p, "", fi.(hugofs.FileMetaInfo)), qt.IsNil) b.Assert(buf.String(), qt.Contains, `title: "Mypage"`) }) @@ -72,7 +75,5 @@ theme = 'ipsum' abs, err := cf.CreateContentPlaceHolder(filepath.FromSlash("posts/test.md"), false) b.Assert(err, qt.IsNil) b.Assert(abs, qt.Equals, filepath.FromSlash("content/posts/test.md")) - }) - } diff --git a/hugolib/content_map.go b/hugolib/content_map.go index 8cb307691..fefa90bf1 100644 --- a/hugolib/content_map.go +++ b/hugolib/content_map.go @@ -18,1052 +18,253 @@ import ( "path" "path/filepath" "strings" - "sync" + "unicode" - "github.com/gohugoio/hugo/helpers" + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/source" - "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - - "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/hugofs" - - radix "github.com/armon/go-radix" -) - -// We store the branch nodes in either the `sections` or `taxonomies` tree -// with their path as a key; Unix style slashes, a leading and trailing slash. -// -// E.g. "/blog/" or "/categories/funny/" -// -// Pages that belongs to a section are stored in the `pages` tree below -// the section name and a branch separator, e.g. "/blog/__hb_". A page is -// given a key using the path below the section and the base filename with no extension -// with a leaf separator added. -// -// For bundled pages (/mybundle/index.md), we use the folder name. -// -// An example of a full page key would be "/blog/__hb_page1__hl_" -// -// Bundled resources are stored in the `resources` having their path prefixed -// with the bundle they belong to, e.g. -// "/blog/__hb_bundle__hl_data.json". -// -// The weighted taxonomy entries extracted from page front matter are stored in -// the `taxonomyEntries` tree below /plural/term/page-key, e.g. -// "/categories/funny/blog/__hb_bundle__hl_". -const ( - cmBranchSeparator = "__hb_" - cmLeafSeparator = "__hl_" ) // Used to mark ambiguous keys in reverse index lookups. -var ambiguousContentNode = &contentNode{} - -func newContentMap(cfg contentMapConfig) *contentMap { - m := &contentMap{ - cfg: &cfg, - pages: &contentTree{Name: "pages", Tree: radix.New()}, - sections: &contentTree{Name: "sections", Tree: radix.New()}, - taxonomies: &contentTree{Name: "taxonomies", Tree: radix.New()}, - taxonomyEntries: &contentTree{Name: "taxonomyEntries", Tree: radix.New()}, - resources: &contentTree{Name: "resources", Tree: radix.New()}, - } - - m.pageTrees = []*contentTree{ - m.pages, m.sections, m.taxonomies, - } - - m.bundleTrees = []*contentTree{ - m.pages, m.sections, m.taxonomies, m.resources, - } - - m.branchTrees = []*contentTree{ - m.sections, m.taxonomies, - } - - addToReverseMap := func(k string, n *contentNode, m map[any]*contentNode) { - k = strings.ToLower(k) - existing, found := m[k] - if found && existing != ambiguousContentNode { - m[k] = ambiguousContentNode - } else if !found { - m[k] = n - } - } +var ambiguousContentNode = &pageState{} - m.pageReverseIndex = &contentTreeReverseIndex{ - t: []*contentTree{m.pages, m.sections, m.taxonomies}, - contentTreeReverseIndexMap: &contentTreeReverseIndexMap{ - initFn: func(t *contentTree, m map[any]*contentNode) { - t.Walk(func(s string, v any) bool { - n := v.(*contentNode) - if n.p != nil && !n.p.File().IsZero() { - meta := n.p.File().FileInfo().Meta() - if meta.Path != meta.PathFile() { - // Keep track of the original mount source. - mountKey := filepath.ToSlash(filepath.Join(meta.Module, meta.PathFile())) - addToReverseMap(mountKey, n, m) - } - } - k := strings.TrimPrefix(strings.TrimSuffix(path.Base(s), cmLeafSeparator), cmBranchSeparator) - addToReverseMap(k, n, m) - return false - }) - }, - }, - } - - return m +var trimCutsetDotSlashSpace = func(r rune) bool { + return r == '.' || r == '/' || unicode.IsSpace(r) } -type cmInsertKeyBuilder struct { - m *contentMap - - err error - - // Builder state - tree *contentTree - baseKey string // Section or page key - key string +type contentMapConfig struct { + lang string + taxonomyConfig taxonomiesConfigValues + taxonomyDisabled bool + taxonomyTermDisabled bool + pageDisabled bool + isRebuild bool } -func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder { - //fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key, "tree:", b.tree.Name) - baseKey := b.baseKey - b.baseKey = s - - if baseKey != "/" { - // Don't repeat the section path in the key. - s = strings.TrimPrefix(s, baseKey) - } - s = strings.TrimPrefix(s, "/") +var _ contentNodeI = (*resourceSource)(nil) - switch b.tree { - case b.m.sections: - b.tree = b.m.pages - b.key = baseKey + cmBranchSeparator + s + cmLeafSeparator - case b.m.taxonomies: - b.key = path.Join(baseKey, s) - default: - panic("invalid state") - } +type resourceSource struct { + path *paths.Path + opener hugio.OpenReadSeekCloser + fi hugofs.FileMetaInfo - return &b + r resource.Resource } -func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder { - // fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key) - - baseKey := helpers.AddTrailingSlash(b.baseKey) - s = strings.TrimPrefix(s, baseKey) - - switch b.tree { - case b.m.pages: - b.key = b.key + s - case b.m.sections, b.m.taxonomies: - b.key = b.key + cmLeafSeparator + s - default: - panic(fmt.Sprintf("invalid state: %#v", b.tree)) - } - b.tree = b.m.resources - return &b +func (r resourceSource) clone() *resourceSource { + r.r = nil + return &r } -func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder { - if b.err == nil { - b.tree.Insert(b.Key(), n) +func (r *resourceSource) LangIndex() int { + if r.r != nil && r.isPage() { + return r.r.(*pageState).s.languagei } - return b -} -func (b *cmInsertKeyBuilder) Key() string { - switch b.tree { - case b.m.sections, b.m.taxonomies: - return cleanSectionTreeKey(b.key) - default: - return cleanTreeKey(b.key) - } + return r.fi.Meta().LangIndex } -func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder { - if b.err == nil { - b.tree.DeletePrefix(b.Key()) - } - return b +func (r *resourceSource) MarkStale() { + resource.MarkStale(r.r) } -func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilder { - b.newTopLevel() - m := b.m - meta := fi.Meta() - p := cleanTreeKey(meta.Path) - bundlePath := m.getBundleDir(meta) - isBundle := meta.Classifier.IsBundle() - if isBundle { - panic("not implemented") - } - - p, k := b.getBundle(p) - if k == "" { - b.err = fmt.Errorf("no bundle header found for %q", bundlePath) - return b +func (r *resourceSource) resetBuildState() { + if rr, ok := r.r.(buildStateReseter); ok { + rr.resetBuildState() } - - id := k + m.reduceKeyPart(p, fi.Meta().Path) - b.tree = b.m.resources - b.key = id - b.baseKey = p - - return b -} - -func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder { - s = cleanSectionTreeKey(s) - b.newTopLevel() - b.tree = b.m.sections - b.baseKey = s - b.key = s - return b } -func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder { - s = cleanSectionTreeKey(s) - b.newTopLevel() - b.tree = b.m.taxonomies - b.baseKey = s - b.key = s - return b +func (r *resourceSource) isPage() bool { + _, ok := r.r.(page.Page) + return ok } -// getBundle gets both the key to the section and the prefix to where to store -// this page bundle and its resources. -func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) { - m := b.m - section, _ := m.getSection(s) - - p := strings.TrimPrefix(s, section) - - bundlePathParts := strings.Split(p, "/") - basePath := section + cmBranchSeparator - - // Put it into an existing bundle if found. - for i := len(bundlePathParts) - 2; i >= 0; i-- { - bundlePath := path.Join(bundlePathParts[:i]...) - searchKey := basePath + bundlePath + cmLeafSeparator - if _, found := m.pages.Get(searchKey); found { - return section + bundlePath, searchKey - } +func (r *resourceSource) GetIdentity() identity.Identity { + if r.r != nil { + return r.r.(identity.IdentityProvider).GetIdentity() } - - // Put it into the section bundle. - return section, section + cmLeafSeparator + return r.path } -func (b *cmInsertKeyBuilder) newTopLevel() { - b.key = "" -} - -type contentBundleViewInfo struct { - ordinal int - name viewName - termKey string - termOrigin string - weight int - ref *contentNode -} - -func (c *contentBundleViewInfo) kind() string { - if c.termKey != "" { - return kinds.KindTerm - } - return kinds.KindTaxonomy +func (r *resourceSource) ForEeachIdentity(f func(identity.Identity) bool) { + f(r.GetIdentity()) } -func (c *contentBundleViewInfo) sections() []string { - if c.kind() == kinds.KindTaxonomy { - return []string{c.name.plural} - } - - return []string{c.name.plural, c.termKey} +func (r *resourceSource) Path() string { + return r.path.Path() } -func (c *contentBundleViewInfo) term() string { - if c.termOrigin != "" { - return c.termOrigin - } - - return c.termKey +func (r *resourceSource) isContentNodeBranch() bool { + return false } -type contentMap struct { - cfg *contentMapConfig - - // View of regular pages, sections, and taxonomies. - pageTrees contentTrees - - // View of pages, sections, taxonomies, and resources. - bundleTrees contentTrees - - // View of sections and taxonomies. - branchTrees contentTrees - - // Stores page bundles keyed by its path's directory or the base filename, - // e.g. "blog/post.md" => "/blog/post", "blog/post/index.md" => "/blog/post" - // These are the "regular pages" and all of them are bundles. - pages *contentTree - - // A reverse index used as a fallback in GetPage. - // There are currently two cases where this is used: - // 1. Short name lookups in ref/relRef, e.g. using only "mypage.md" without a path. - // 2. Links resolved from a remounted content directory. These are restricted to the same module. - // Both of the above cases can result in ambiguous lookup errors. - pageReverseIndex *contentTreeReverseIndex - - // Section nodes. - sections *contentTree +var _ contentNodeI = (*resourceSources)(nil) - // Taxonomy nodes. - taxonomies *contentTree +type resourceSources []*resourceSource - // Pages in a taxonomy. - taxonomyEntries *contentTree - - // Resources stored per bundle below a common prefix, e.g. "/blog/post__hb_". - resources *contentTree -} - -func (m *contentMap) AddFiles(fis ...hugofs.FileMetaInfo) error { - for _, fi := range fis { - if err := m.addFile(fi); err != nil { - return err +func (n resourceSources) MarkStale() { + for _, r := range n { + if r != nil { + r.MarkStale() } } - - return nil } -func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hugofs.FileMetaInfo) error { - var ( - meta = header.Meta() - classifier = meta.Classifier - isBranch = classifier == files.ContentClassBranch - bundlePath = m.getBundleDir(meta) - - n = m.newContentNodeFromFi(header) - b = m.newKeyBuilder() - - section string - ) - - if isBranch { - // Either a section or a taxonomy node. - section = bundlePath - if tc := m.cfg.getTaxonomyConfig(section); !tc.IsZero() { - term := strings.TrimPrefix(strings.TrimPrefix(section, "/"+tc.plural), "/") - - n.viewInfo = &contentBundleViewInfo{ - name: tc, - termKey: term, - termOrigin: term, - } - - n.viewInfo.ref = n - b.WithTaxonomy(section).Insert(n) - } else { - b.WithSection(section).Insert(n) - } - } else { - // A regular page. Attach it to its section. - section, _ = m.getOrCreateSection(n, bundlePath) - b = b.WithSection(section).ForPage(bundlePath).Insert(n) - } - - if m.cfg.isRebuild { - // The resource owner will be either deleted or overwritten on rebuilds, - // but make sure we handle deletion of resources (images etc.) as well. - b.ForResource("").DeleteAll() - } - - for _, r := range resources { - rb := b.ForResource(cleanTreeKey(r.Meta().Path)) - rb.Insert(&contentNode{fi: r}) - } - - return nil -} - -func (m *contentMap) CreateMissingNodes() error { - // Create missing home and root sections - rootSections := make(map[string]any) - trackRootSection := func(s string, b *contentNode) { - parts := strings.Split(s, "/") - if len(parts) > 2 { - root := strings.TrimSuffix(parts[1], cmBranchSeparator) - if root != "" { - if _, found := rootSections[root]; !found { - rootSections[root] = b - } - } - } - } - - m.sections.Walk(func(s string, v any) bool { - n := v.(*contentNode) - - if s == "/" { - return false - } - - trackRootSection(s, n) - return false - }) - - m.pages.Walk(func(s string, v any) bool { - trackRootSection(s, v.(*contentNode)) - return false - }) - - if _, found := rootSections["/"]; !found { - rootSections["/"] = true - } - - for sect, v := range rootSections { - var sectionPath string - if n, ok := v.(*contentNode); ok && n.path != "" { - sectionPath = n.path - firstSlash := strings.Index(sectionPath, "/") - if firstSlash != -1 { - sectionPath = sectionPath[:firstSlash] - } - } - sect = cleanSectionTreeKey(sect) - _, found := m.sections.Get(sect) - if !found { - m.sections.Insert(sect, &contentNode{path: sectionPath}) - } - } - - for _, view := range m.cfg.taxonomyConfig { - s := cleanSectionTreeKey(view.plural) - _, found := m.taxonomies.Get(s) - if !found { - b := &contentNode{ - viewInfo: &contentBundleViewInfo{ - name: view, - }, - } - b.viewInfo.ref = b - m.taxonomies.Insert(s, b) - } - } - - return nil -} - -func (m *contentMap) getBundleDir(meta *hugofs.FileMeta) string { - dir := cleanTreeKey(filepath.Dir(meta.Path)) - - switch meta.Classifier { - case files.ContentClassContent: - return path.Join(dir, meta.TranslationBaseName) - default: - return dir - } +func (n resourceSources) Path() string { + panic("not supported") } -func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode { - return &contentNode{ - fi: fi, - path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path), "/"), - } +func (n resourceSources) isContentNodeBranch() bool { + return false } -func (m *contentMap) getFirstSection(s string) (string, *contentNode) { - s = helpers.AddTrailingSlash(s) - for { - k, v, found := m.sections.LongestPrefix(s) - - if !found { - return "", nil +func (n resourceSources) resetBuildState() { + for _, r := range n { + if r != nil { + r.resetBuildState() } - - if strings.Count(k, "/") <= 2 { - return k, v.(*contentNode) - } - - s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/"))) - } } -func (m *contentMap) newKeyBuilder() *cmInsertKeyBuilder { - return &cmInsertKeyBuilder{m: m} -} - -func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *contentNode) { - level := strings.Count(s, "/") - k, b := m.getSection(s) - - mustCreate := false - - if k == "" { - mustCreate = true - } else if level > 1 && k == "/" { - // We found the home section, but this page needs to be placed in - // the root, e.g. "/blog", section. - mustCreate = true - } - - if mustCreate { - k = cleanSectionTreeKey(s[:strings.Index(s[1:], "/")+1]) - - b = &contentNode{ - path: n.rootSection(), +func (n resourceSources) GetIdentity() identity.Identity { + for _, r := range n { + if r != nil { + return r.GetIdentity() } - - m.sections.Insert(k, b) - } - - return k, b -} - -func (m *contentMap) getPage(section, name string) *contentNode { - section = helpers.AddTrailingSlash(section) - key := section + cmBranchSeparator + name + cmLeafSeparator - - v, found := m.pages.Get(key) - if found { - return v.(*contentNode) } return nil } -func (m *contentMap) getSection(s string) (string, *contentNode) { - s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/"))) - - k, v, found := m.sections.LongestPrefix(s) - - if found { - return k, v.(*contentNode) - } - return "", nil -} - -func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) { - s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/"))) - k, v, found := m.taxonomies.LongestPrefix(s) - - if found { - return k, v.(*contentNode) - } - - v, found = m.sections.Get("/") - if found { - return s, v.(*contentNode) - } - - return "", nil -} - -func (m *contentMap) addFile(fi hugofs.FileMetaInfo) error { - b := m.newKeyBuilder() - return b.WithFile(fi).Insert(m.newContentNodeFromFi(fi)).err -} - -func cleanTreeKey(k string) string { - k = "/" + strings.ToLower(strings.Trim(path.Clean(filepath.ToSlash(k)), "./")) - return k -} - -func cleanSectionTreeKey(k string) string { - k = cleanTreeKey(k) - if k != "/" { - k += "/" - } - - return k -} - -func (m *contentMap) onSameLevel(s1, s2 string) bool { - return strings.Count(s1, "/") == strings.Count(s2, "/") -} - -func (m *contentMap) deleteBundleMatching(matches func(b *contentNode) bool) { - // Check sections first - s := m.sections.getMatch(matches) - if s != "" { - m.deleteSectionByPath(s) - return - } - - s = m.pages.getMatch(matches) - if s != "" { - m.deletePage(s) - return - } - - s = m.resources.getMatch(matches) - if s != "" { - m.resources.Delete(s) - } -} - -// Deletes any empty root section that's not backed by a content file. -func (m *contentMap) deleteOrphanSections() { - var sectionsToDelete []string - - m.sections.Walk(func(s string, v any) bool { - n := v.(*contentNode) - - if n.fi != nil { - // Section may be empty, but is backed by a content file. - return false - } - - if s == "/" || strings.Count(s, "/") > 2 { - return false - } - - prefixBundle := s + cmBranchSeparator - - if !(m.sections.hasBelow(s) || m.pages.hasBelow(prefixBundle) || m.resources.hasBelow(prefixBundle)) { - sectionsToDelete = append(sectionsToDelete, s) - } - - return false - }) - - for _, s := range sectionsToDelete { - m.sections.Delete(s) - } -} - -func (m *contentMap) deletePage(s string) { - m.pages.DeletePrefix(s) - m.resources.DeletePrefix(s) -} - -func (m *contentMap) deleteSectionByPath(s string) { - if !strings.HasSuffix(s, "/") { - panic("section must end with a slash") - } - if !strings.HasPrefix(s, "/") { - panic("section must start with a slash") - } - m.sections.DeletePrefix(s) - m.pages.DeletePrefix(s) - m.resources.DeletePrefix(s) -} - -func (m *contentMap) deletePageByPath(s string) { - m.pages.Walk(func(s string, v any) bool { - fmt.Println("S", s) - - return false - }) -} - -func (m *contentMap) deleteTaxonomy(s string) { - m.taxonomies.DeletePrefix(s) -} - -func (m *contentMap) reduceKeyPart(dir, filename string) string { - dir, filename = filepath.ToSlash(dir), filepath.ToSlash(filename) - dir, filename = strings.TrimPrefix(dir, "/"), strings.TrimPrefix(filename, "/") - - return strings.TrimPrefix(strings.TrimPrefix(filename, dir), "/") -} - -func (m *contentMap) splitKey(k string) []string { - if k == "" || k == "/" { - return nil - } - - parts := strings.Split(k, "/")[1:] - if len(parts) == 0 { - return nil - } - if parts[len(parts)-1] == "" { - parts = parts[:len(parts)-1] - } - return parts -} - -func (m *contentMap) testDump() string { - var sb strings.Builder - - for i, r := range []*contentTree{m.pages, m.sections, m.resources} { - sb.WriteString(fmt.Sprintf("Tree %d:\n", i)) - r.Walk(func(s string, v any) bool { - sb.WriteString("\t" + s + "\n") - return false - }) - } - - for i, r := range []*contentTree{m.pages, m.sections} { - r.Walk(func(s string, v any) bool { - c := v.(*contentNode) - cpToString := func(c *contentNode) string { - var sb strings.Builder - if c.p != nil { - sb.WriteString("|p:" + c.p.Title()) - } - if c.fi != nil { - sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path)) - } - return sb.String() +func (n resourceSources) ForEeachIdentity(f func(identity.Identity) bool) { + for _, r := range n { + if r != nil { + if f(r.GetIdentity()) { + return } - sb.WriteString(path.Join(m.cfg.lang, r.Name) + s + cpToString(c) + "\n") - - resourcesPrefix := s - - if i == 1 { - resourcesPrefix += cmLeafSeparator - - m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v any) bool { - sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n") - return false - }) - } - - m.resources.WalkPrefix(resourcesPrefix, func(s string, v any) bool { - sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n") - return false - }) - - return false - }) + } } - - return sb.String() -} - -type contentMapConfig struct { - lang string - taxonomyConfig []viewName - taxonomyDisabled bool - taxonomyTermDisabled bool - pageDisabled bool - isRebuild bool } func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) { - s = strings.TrimPrefix(s, "/") - if s == "" { - return - } - for _, n := range cfg.taxonomyConfig { - if strings.HasPrefix(s, n.plural) { + for _, n := range cfg.taxonomyConfig.views { + if strings.HasPrefix(s, n.pluralTreeKey) { return n } } - return } -type contentNode struct { - p *pageState - - // Set for taxonomy nodes. - viewInfo *contentBundleViewInfo - - // Set if source is a file. - // We will soon get other sources. - fi hugofs.FileMetaInfo - - // The source path. Unix slashes. No leading slash. - path string -} - -func (b *contentNode) rootSection() string { - if b.path == "" { - return "" - } - firstSlash := strings.Index(b.path, "/") - if firstSlash == -1 { - return b.path - } - return b.path[:firstSlash] -} - -type contentTree struct { - Name string - *radix.Tree -} - -type contentTrees []*contentTree - -func (t contentTrees) DeletePrefix(prefix string) int { - var count int - for _, tree := range t { - tree.Walk(func(s string, v any) bool { - return false - }) - count += tree.DeletePrefix(prefix) +func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error { + if fi.IsDir() { + return nil } - return count -} -type contentTreeNodeCallback func(s string, n *contentNode) bool - -func newContentTreeFilter(fn func(n *contentNode) bool) contentTreeNodeCallback { - return func(s string, n *contentNode) bool { - return fn(n) + meta := fi.Meta() + if m.s.conf.IsLangDisabled(meta.Lang) { + return nil } -} -var ( - contentTreeNoListAlwaysFilter = func(s string, n *contentNode) bool { - if n.p == nil { - return true - } - return n.p.m.noListAlways() - } + insertResource := func(fim hugofs.FileMetaInfo) error { + pi := fi.Meta().PathInfo + key := pi.Base() + tree := m.treeResources - contentTreeNoRenderFilter = func(s string, n *contentNode) bool { - if n.p == nil { - return true - } - return n.p.m.noRender() - } + commit := tree.Lock(true) + defer commit() - contentTreeNoLinkFilter = func(s string, n *contentNode) bool { - if n.p == nil { - return true + r := func() (hugio.ReadSeekCloser, error) { + return fim.Meta().Open() } - return n.p.m.noLink() - } -) -func (c *contentTree) WalkQuery(query pageMapQuery, walkFn contentTreeNodeCallback) { - filter := query.Filter - if filter == nil { - filter = contentTreeNoListAlwaysFilter - } - if query.Prefix != "" { - c.WalkBelow(query.Prefix, func(s string, v any) bool { - n := v.(*contentNode) - if filter != nil && filter(s, n) { - return false + var rs *resourceSource + if pi.IsContent() { + // Create the page now as we need it at assemembly time. + // The other resources are created if needed. + pageResource, err := m.s.h.newPage( + &pageMeta{ + f: source.NewFileInfo(fim), + pathInfo: pi, + bundled: true, + }, + ) + if err != nil { + return err } - return walkFn(s, n) - }) - - return - } - - c.Walk(func(s string, v any) bool { - n := v.(*contentNode) - if filter != nil && filter(s, n) { - return false + rs = &resourceSource{r: pageResource} + } else { + rs = &resourceSource{path: pi, opener: r, fi: fim} } - return walkFn(s, n) - }) -} -func (c contentTrees) WalkRenderable(fn contentTreeNodeCallback) { - query := pageMapQuery{Filter: contentTreeNoRenderFilter} - for _, tree := range c { - tree.WalkQuery(query, fn) - } -} - -func (c contentTrees) WalkLinkable(fn contentTreeNodeCallback) { - query := pageMapQuery{Filter: contentTreeNoLinkFilter} - for _, tree := range c { - tree.WalkQuery(query, fn) - } -} + tree.InsertIntoValuesDimension(key, rs) -func (c contentTrees) Walk(fn contentTreeNodeCallback) { - for _, tree := range c { - tree.Walk(func(s string, v any) bool { - n := v.(*contentNode) - return fn(s, n) - }) + return nil } -} -func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) { - for _, tree := range c { - tree.WalkPrefix(prefix, func(s string, v any) bool { - n := v.(*contentNode) - return fn(s, n) - }) - } -} + pi := meta.PathInfo -// WalkBelow walks the tree below the given prefix, i.e. it skips the -// node with the given prefix as key. -func (c *contentTree) WalkBelow(prefix string, fn radix.WalkFn) { - c.Tree.WalkPrefix(prefix, func(s string, v any) bool { - if s == prefix { - return false + switch pi.BundleType() { + case paths.PathTypeFile, paths.PathTypeContentResource: + m.s.Log.Trace(logg.StringFunc( + func() string { + return fmt.Sprintf("insert resource: %q", fi.Meta().Filename) + }, + )) + if err := insertResource(fi); err != nil { + return err } - return fn(s, v) - }) -} - -func (c *contentTree) getMatch(matches func(b *contentNode) bool) string { - var match string - c.Walk(func(s string, v any) bool { - n, ok := v.(*contentNode) - if !ok { - return false + default: + m.s.Log.Trace(logg.StringFunc( + func() string { + return fmt.Sprintf("insert bundle: %q", fi.Meta().Filename) + }, + )) + // A content file. + p, err := m.s.h.newPage( + &pageMeta{ + f: source.NewFileInfo(fi), + pathInfo: pi, + bundled: false, + }, + ) + if err != nil { + return err } - - if matches(n) { - match = s - return true + if p == nil { + // Disabled page. + return nil } - return false - }) - - return match -} - -func (c *contentTree) hasBelow(s1 string) bool { - var t bool - c.WalkBelow(s1, func(s2 string, v any) bool { - t = true - return true - }) - return t -} - -func (c *contentTree) printKeys() { - c.Walk(func(s string, v any) bool { - fmt.Println(s) - return false - }) -} - -func (c *contentTree) printKeysPrefix(prefix string) { - c.WalkPrefix(prefix, func(s string, v any) bool { - fmt.Println(s) - return false - }) -} - -// contentTreeRef points to a node in the given tree. -type contentTreeRef struct { - m *pageMap - t *contentTree - n *contentNode - key string -} - -func (c *contentTreeRef) getCurrentSection() (string, *contentNode) { - if c.isSection() { - return c.key, c.n - } - return c.getSection() -} - -func (c *contentTreeRef) isSection() bool { - return c.t == c.m.sections -} - -func (c *contentTreeRef) getSection() (string, *contentNode) { - if c.t == c.m.taxonomies { - return c.m.getTaxonomyParent(c.key) - } - return c.m.getSection(c.key) -} - -func (c *contentTreeRef) getPages() page.Pages { - var pas page.Pages - c.m.collectPages( - pageMapQuery{ - Prefix: c.key + cmBranchSeparator, - Filter: c.n.p.m.getListFilter(true), - }, - func(c *contentNode) { - pas = append(pas, c.p) - }, - ) - page.SortByDefault(pas) - - return pas -} - -func (c *contentTreeRef) getPagesRecursive() page.Pages { - var pas page.Pages + m.treePages.InsertWithLock(pi.Base(), p) - query := pageMapQuery{ - Filter: c.n.p.m.getListFilter(true), } - - query.Prefix = c.key - c.m.collectPages(query, func(c *contentNode) { - pas = append(pas, c.p) - }) - - page.SortByDefault(pas) - - return pas + return nil } -func (c *contentTreeRef) getPagesAndSections() page.Pages { - var pas page.Pages - - query := pageMapQuery{ - Filter: c.n.p.m.getListFilter(true), - Prefix: c.key, +// The home page is represented with the zero string. +// All other keys starts with a leading slash. No trailing slash. +// Slashes are Unix-style. +func cleanTreeKey(elem ...string) string { + var s string + if len(elem) > 0 { + s = elem[0] + if len(elem) > 1 { + s = path.Join(elem...) + } } - - c.m.collectPagesAndSections(query, func(c *contentNode) { - pas = append(pas, c.p) - }) - - page.SortByDefault(pas) - - return pas -} - -func (c *contentTreeRef) getSections() page.Pages { - var pas page.Pages - - query := pageMapQuery{ - Filter: c.n.p.m.getListFilter(true), - Prefix: c.key, + s = strings.TrimFunc(s, trimCutsetDotSlashSpace) + s = filepath.ToSlash(strings.ToLower(paths.Sanitize(s))) + if s == "" || s == "/" { + return "" } - - c.m.collectSections(query, func(c *contentNode) { - pas = append(pas, c.p) - }) - - page.SortByDefault(pas) - - return pas -} - -type contentTreeReverseIndex struct { - t []*contentTree - *contentTreeReverseIndexMap -} - -type contentTreeReverseIndexMap struct { - m map[any]*contentNode - init sync.Once - initFn func(*contentTree, map[any]*contentNode) -} - -func (c *contentTreeReverseIndex) Reset() { - c.contentTreeReverseIndexMap = &contentTreeReverseIndexMap{ - initFn: c.initFn, + if s[0] != '/' { + s = "/" + s } -} - -func (c *contentTreeReverseIndex) Get(key any) *contentNode { - c.init.Do(func() { - c.m = make(map[any]*contentNode) - for _, tree := range c.t { - c.initFn(tree, c.m) - } - }) - return c.m[key] + return s } diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go index 2c14ffa59..536f23ccd 100644 --- a/hugolib/content_map_page.go +++ b/hugolib/content_map_page.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,1025 +16,1874 @@ package hugolib import ( "context" "fmt" + "io" "path" - "path/filepath" + "sort" + "strconv" "strings" - "sync" - - "github.com/gohugoio/hugo/common/maps" - + "sync/atomic" + "time" + + "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/common/predicate" + "github.com/gohugoio/hugo/common/rungroup" "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/hugolib/doctree" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources" + "github.com/spf13/cast" + + "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/common/hugio" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/parser/pageparser" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" - "github.com/spf13/cast" - - "github.com/gohugoio/hugo/common/para" ) -func newPageMaps(h *HugoSites) *pageMaps { - mps := make([]*pageMap, len(h.Sites)) - for i, s := range h.Sites { - mps[i] = s.pageMap - } - return &pageMaps{ - workers: para.New(h.numWorkers), - pmaps: mps, - } +var pagePredicates = struct { + KindPage predicate.P[*pageState] + KindSection predicate.P[*pageState] + KindHome predicate.P[*pageState] + KindTerm predicate.P[*pageState] + ShouldListLocal predicate.P[*pageState] + ShouldListGlobal predicate.P[*pageState] + ShouldListAny predicate.P[*pageState] + ShouldLink predicate.P[page.Page] +}{ + KindPage: func(p *pageState) bool { + return p.Kind() == kinds.KindPage + }, + KindSection: func(p *pageState) bool { + return p.Kind() == kinds.KindSection + }, + KindHome: func(p *pageState) bool { + return p.Kind() == kinds.KindHome + }, + KindTerm: func(p *pageState) bool { + return p.Kind() == kinds.KindTerm + }, + ShouldListLocal: func(p *pageState) bool { + return p.m.shouldList(false) + }, + ShouldListGlobal: func(p *pageState) bool { + return p.m.shouldList(true) + }, + ShouldListAny: func(p *pageState) bool { + return p.m.shouldListAny() + }, + ShouldLink: func(p page.Page) bool { + return !p.(*pageState).m.noLink() + }, } type pageMap struct { + i int s *Site - *contentMap -} -func (m *pageMap) Len() int { - l := 0 - for _, t := range m.contentMap.pageTrees { - l += t.Len() - } - return l -} + // Main storage for all pages. + *pageTrees -func (m *pageMap) createMissingTaxonomyNodes() error { - if m.cfg.taxonomyDisabled { - return nil - } - m.taxonomyEntries.Walk(func(s string, v any) bool { - n := v.(*contentNode) - vi := n.viewInfo - k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey) + // Used for simple page lookups by name, e.g. "mypage.md" or "mypage". + pageReverseIndex *contentTreeReverseIndex - if _, found := m.taxonomies.Get(k); !found { - vic := &contentBundleViewInfo{ - name: vi.name, - termKey: vi.termKey, - termOrigin: vi.termOrigin, - } - m.taxonomies.Insert(k, &contentNode{viewInfo: vic}) - } - return false - }) + cachePages *dynacache.Partition[string, page.Pages] + cacheResources *dynacache.Partition[string, resource.Resources] + cacheContentRendered *dynacache.Partition[string, *resources.StaleValue[contentSummary]] + cacheContentPlain *dynacache.Partition[string, *resources.StaleValue[contentPlainPlainWords]] + contentTableOfContents *dynacache.Partition[string, *resources.StaleValue[contentTableOfContents]] + cacheContentSource *dynacache.Partition[string, *resources.StaleValue[[]byte]] - return nil + cfg contentMapConfig } -func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapBucket, owner *pageState) (*pageState, error) { - if n.fi == nil { - panic("FileInfo must (currently) be set") - } +// pageTrees holds pages and resources in a tree structure for all sites/languages. +// Eeach site gets its own tree set via the Shape method. +type pageTrees struct { + // This tree contains all Pages. + // This include regular pages, sections, taxonimies and so on. + // Note that all of these trees share the same key structure, + // so you can take a leaf Page key and do a prefix search + // with key + "/" to get all of its resources. + treePages *doctree.NodeShiftTree[contentNodeI] - f, err := newFileInfo(m.s.SourceSpec, n.fi) - if err != nil { - return nil, err - } + // This tree contains Resoures bundled in pages. + treeResources *doctree.NodeShiftTree[contentNodeI] - meta := n.fi.Meta() - content := func() (hugio.ReadSeekCloser, error) { - return meta.Open() - } + // All pages and resources. + treePagesResources doctree.WalkableTrees[contentNodeI] - bundled := owner != nil - s := m.s + // This tree contains all taxonomy entries, e.g "/tags/blue/page1" + treeTaxonomyEntries *doctree.TreeShiftTree[*weightedContentNode] - sections := s.sectionsFromFile(f) + // A slice of the resource trees. + resourceTrees doctree.MutableTrees +} - kind := s.kindFromFileInfoOrSections(f, sections) - if kind == kinds.KindTerm { - s.PathSpec.MakePathsSanitized(sections) +// collectIdentities collects all identities from in all trees matching the given key. +// This will at most match in one tree, but may give identies from multiple dimensions (e.g. language). +func (t *pageTrees) collectIdentities(key string) []identity.Identity { + var ids []identity.Identity + if n := t.treePages.Get(key); n != nil { + n.ForEeachIdentity(func(id identity.Identity) bool { + ids = append(ids, id) + return false + }) } - - metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f} - - ps, err := newPageBase(metaProvider) - if err != nil { - return nil, err + if n := t.treeResources.Get(key); n != nil { + n.ForEeachIdentity(func(id identity.Identity) bool { + ids = append(ids, id) + return false + }) } - if n.fi.Meta().IsRootFile { - // Make sure that the bundle/section we start walking from is always - // rendered. - // This is only relevant in server fast render mode. - ps.forceRender = true - } + return ids +} - n.p = ps - if ps.IsNode() { - ps.bucket = newPageBucket(ps) - } +// collectIdentitiesSurrounding collects all identities surrounding the given key. +func (t *pageTrees) collectIdentitiesSurrounding(key string, maxSamplesPerTree int) []identity.Identity { + // TODO1 test language coverage from this. + ids := t.collectIdentitiesSurroundingIn(key, maxSamplesPerTree, t.treePages) + ids = append(ids, t.collectIdentitiesSurroundingIn(key, maxSamplesPerTree, t.treeResources)...) + return ids +} - gi, err := s.h.gitInfoForPage(ps) - if err != nil { - return nil, fmt.Errorf("failed to load Git data: %w", err) +func (t *pageTrees) collectIdentitiesSurroundingIn(key string, maxSamples int, tree *doctree.NodeShiftTree[contentNodeI]) []identity.Identity { + var ids []identity.Identity + section, ok := tree.LongestPrefixAll(path.Dir(key)) + if ok { + count := 0 + prefix := section + "/" + level := strings.Count(prefix, "/") + tree.WalkPrefixRaw(prefix, func(s string, n contentNodeI) bool { + if level != strings.Count(s, "/") { + return true + } + n.ForEeachIdentity(func(id identity.Identity) bool { + ids = append(ids, id) + return false + }) + count++ + return count > maxSamples + }) } - ps.gitInfo = gi - owners, err := s.h.codeownersForPage(ps) - if err != nil { - return nil, fmt.Errorf("failed to load CODEOWNERS: %w", err) - } - ps.codeowners = owners + return ids +} - r, err := content() - if err != nil { - return nil, err +func (t *pageTrees) DeletePageAndResourcesBelow(ss ...string) { + commit1 := t.resourceTrees.Lock(true) + defer commit1() + commit2 := t.treePages.Lock(true) + defer commit2() + for _, s := range ss { + t.resourceTrees.DeletePrefix(paths.AddTrailingSlash(s)) + t.treePages.Delete(s) } - defer r.Close() +} - parseResult, err := pageparser.Parse( - r, - pageparser.Config{}, - ) - if err != nil { - return nil, err - } +// Shape shapes all trees in t to the given dimension. +func (t pageTrees) Shape(d, v int) *pageTrees { + t.treePages = t.treePages.Shape(d, v) + t.treeResources = t.treeResources.Shape(d, v) + t.treeTaxonomyEntries = t.treeTaxonomyEntries.Shape(d, v) - ps.pageContent = pageContent{ - source: rawPageContent{ - parsed: parseResult, - posMainContent: -1, - posSummaryEnd: -1, - posBodyStart: -1, - }, - } + return &t +} - if err := ps.mapContent(parentBucket, metaProvider); err != nil { - return nil, ps.wrapError(err) - } +var ( + _ resource.Identifier = pageMapQueryPagesInSection{} + _ resource.Identifier = pageMapQueryPagesBelowPath{} +) - if err := metaProvider.applyDefaultValues(n); err != nil { - return nil, err - } +type pageMapQueryPagesInSection struct { + pageMapQueryPagesBelowPath - ps.init.Add(func(context.Context) (any, error) { - pp, err := newPagePaths(s, ps, metaProvider) - if err != nil { - return nil, err - } + Recursive bool + IncludeSelf bool +} - outputFormatsForPage := ps.m.outputFormats() +func (q pageMapQueryPagesInSection) Key() string { + return "gagesInSection" + "/" + q.pageMapQueryPagesBelowPath.Key() + "/" + strconv.FormatBool(q.Recursive) + "/" + strconv.FormatBool(q.IncludeSelf) +} - // Prepare output formats for all sites. - // We do this even if this page does not get rendered on - // its own. It may be referenced via .Site.GetPage and - // it will then need an output format. - ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats)) - created := make(map[string]*pageOutput) - shouldRenderPage := !ps.m.noRender() +// This needs to be hashable. +type pageMapQueryPagesBelowPath struct { + Path string - for i, f := range ps.s.h.renderFormats { - if po, found := created[f.Name]; found { - ps.pageOutputs[i] = po - continue - } + // Additional identifier for this query. + // Used as part of the cache key. + KeyPart string - render := shouldRenderPage - if render { - _, render = outputFormatsForPage.GetByName(f.Name) - } + // Page inclusion filter. + // May be nil. + Include predicate.P[*pageState] +} - po := newPageOutput(ps, pp, f, render) +func (q pageMapQueryPagesBelowPath) Key() string { + return q.Path + "/" + q.KeyPart +} - // Create a content provider for the first, - // we may be able to reuse it. - if i == 0 { - contentProvider, err := newPageContentOutput(ps, po) - if err != nil { - return nil, err +// Apply fn to all pages in m matching the given predicate. +// fn may return true to stop the walk. +func (m *pageMap) forEachPage(include predicate.P[*pageState], fn func(p *pageState) (bool, error)) error { + if include == nil { + include = func(p *pageState) bool { + return true + } + } + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: m.treePages, + LockType: doctree.LockTypeRead, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if p, ok := n.(*pageState); ok && include(p) { + if terminate, err := fn(p); terminate || err != nil { + return terminate, err } - po.initContentProvider(contentProvider) } + return false, nil + }, + } - ps.pageOutputs[i] = po - created[f.Name] = po + return w.Walk(context.Background()) +} +func (m *pageMap) forEeachPageIncludingBundledPages(include predicate.P[*pageState], fn func(p *pageState) (bool, error)) error { + if include == nil { + include = func(p *pageState) bool { + return true } + } - if err := ps.initCommonProviders(pp); err != nil { - return nil, err - } + if err := m.forEachPage(include, fn); err != nil { + return err + } - return nil, nil - }) + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: m.treeResources, + LockType: doctree.LockTypeRead, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if rs, ok := n.(*resourceSource); ok { + if p, ok := rs.r.(*pageState); ok && include(p) { + if terminate, err := fn(p); terminate || err != nil { + return terminate, err + } + } + } + return false, nil + }, + } - ps.parent = owner + return w.Walk(context.Background()) +} - return ps, nil +func (m *pageMap) getOrCreatePagesFromCache( + key string, create func(string) (page.Pages, error), +) (page.Pages, error) { + return m.cachePages.GetOrCreate(key, create) } -func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) { - if owner == nil { - panic("owner is nil") - } - // TODO(bep) consolidate with multihost logic + clean up - outputFormats := owner.m.outputFormats() - seen := make(map[string]bool) - var targetBasePaths []string - // Make sure bundled resources are published to all of the output formats' - // sub paths. - for _, f := range outputFormats { - p := f.Path - if seen[p] { - continue - } - seen[p] = true - targetBasePaths = append(targetBasePaths, p) +func (m *pageMap) getPagesInSection(q pageMapQueryPagesInSection) page.Pages { + cacheKey := q.Key() - } + pages, err := m.getOrCreatePagesFromCache(cacheKey, func(string) (page.Pages, error) { + prefix := paths.AddTrailingSlash(q.Path) - meta := fim.Meta() - r := func() (hugio.ReadSeekCloser, error) { - return meta.Open() - } + var ( + pas page.Pages + otherBranch string + ) - target := strings.TrimPrefix(meta.Path, owner.File().Dir()) + include := q.Include + if include == nil { + include = pagePredicates.ShouldListLocal + } - return owner.s.ResourceSpec.New( - resources.ResourceSourceDescriptor{ - TargetPaths: owner.getTargetPaths, - OpenReadSeekCloser: r, - FileInfo: fim, - RelTargetFilename: target, - TargetBasePaths: targetBasePaths, - LazyPublish: !owner.m.buildConfig.PublishResources, - }) -} + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: m.treePages, + Prefix: prefix, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if q.Recursive { + if p, ok := n.(*pageState); ok && include(p) { + pas = append(pas, p) + } + return false, nil + } -func (m *pageMap) createSiteTaxonomies() error { - m.s.taxonomies = make(page.TaxonomyList) - var walkErr error - m.taxonomies.Walk(func(s string, v any) bool { - n := v.(*contentNode) - t := n.viewInfo + // We store both leafs and branches in the same tree, so for non-recursive walks, + // we need to walk until the end, but can skip + // any not belonging to child branches. + if otherBranch != "" && strings.HasPrefix(key, otherBranch) { + return false, nil + } - viewName := t.name + if p, ok := n.(*pageState); ok && include(p) { + pas = append(pas, p) + } - if t.termKey == "" { - m.s.taxonomies[viewName.plural] = make(page.Taxonomy) - } else { - taxonomy := m.s.taxonomies[viewName.plural] - if taxonomy == nil { - walkErr = fmt.Errorf("missing taxonomy: %s", viewName.plural) - return true - } - m.taxonomyEntries.WalkPrefix(s, func(ss string, v any) bool { - b2 := v.(*contentNode) - info := b2.viewInfo - taxonomy[info.termKey] = append(taxonomy[info.termKey], page.NewWeightedPage(info.weight, info.ref.p, n.p)) + if n.isContentNodeBranch() { + otherBranch = key + "/" + } - return false - }) + return false, nil + }, } - return false - }) + err := w.Walk(context.Background()) - for _, taxonomy := range m.s.taxonomies { - for _, v := range taxonomy { - v.Sort() + if err == nil { + if q.IncludeSelf { + if n := m.treePages.Get(q.Path); n != nil { + if p, ok := n.(*pageState); ok && include(p) { + pas = append(pas, p) + } + } + } + page.SortByDefault(pas) } + + return pas, err + }) + if err != nil { + panic(err) } - return walkErr + return pages } -func (m *pageMap) createListAllPages() page.Pages { - pages := make(page.Pages, 0) +func (m *pageMap) getPagesWithTerm(q pageMapQueryPagesBelowPath) page.Pages { + key := q.Key() - m.contentMap.pageTrees.Walk(func(s string, n *contentNode) bool { - if n.p == nil { - panic(fmt.Sprintf("BUG: page not set for %q", s)) + v, err := m.cachePages.GetOrCreate(key, func(string) (page.Pages, error) { + var pas page.Pages + include := q.Include + if include == nil { + include = pagePredicates.ShouldListLocal } - if contentTreeNoListAlwaysFilter(s, n) { - return false + + err := m.treeTaxonomyEntries.WalkPrefix( + doctree.LockTypeNone, + paths.AddTrailingSlash(q.Path), + func(s string, n *weightedContentNode) (bool, error) { + p := n.n.(*pageState) + if !include(p) { + return false, nil + } + pas = append(pas, pageWithWeight0{n.weight, p}) + return false, nil + }, + ) + if err != nil { + return nil, err } - pages = append(pages, n.p) - return false + + page.SortByDefault(pas) + + return pas, nil }) + if err != nil { + panic(err) + } - page.SortByDefault(pages) - return pages + return v } -func (m *pageMap) assemblePages() error { - m.taxonomyEntries.DeletePrefix("/") +func (m *pageMap) getTermsForPageInTaxonomy(path, taxonomy string) page.Pages { + prefix := paths.AddLeadingSlash(taxonomy) - if err := m.assembleSections(); err != nil { - return err - } + v, err := m.cachePages.GetOrCreate(prefix+path, func(string) (page.Pages, error) { + var pas page.Pages - var err error + err := m.treeTaxonomyEntries.WalkPrefix( + doctree.LockTypeNone, + paths.AddTrailingSlash(prefix), + func(s string, n *weightedContentNode) (bool, error) { + if strings.HasSuffix(s, path) { + pas = append(pas, n.term) + } + return false, nil + }, + ) + if err != nil { + return nil, err + } + page.SortByDefault(pas) + + return pas, nil + }) if err != nil { - return err + panic(err) } - m.pages.Walk(func(s string, v any) bool { - n := v.(*contentNode) + return v +} - var shouldBuild bool +func (m *pageMap) forEachResourceInPage( + ps *pageState, + lockType doctree.LockType, + exact bool, + handle func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error), +) error { + keyPage := ps.Path() + if keyPage == "/" { + keyPage = "" + } + prefix := paths.AddTrailingSlash(ps.Path()) + isBranch := ps.IsNode() + + rw := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: m.treeResources, + Prefix: prefix, + LockType: lockType, + Exact: exact, + } - defer func() { - // Make sure we always rebuild the view cache. - if shouldBuild && err == nil && n.p != nil { - m.attachPageToViews(s, n) + rw.Handle = func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if isBranch { + ownerKey, _ := m.treePages.LongestPrefixAll(resourceKey) + if ownerKey != keyPage { + // Stop walking downwards, someone else owns this resource. + rw.SkipPrefix(ownerKey + "/") + return false, nil } - }() - - if n.p != nil { - // A rebuild - shouldBuild = true - return false } + return handle(resourceKey, n, match) + } - var parent *contentNode - var parentBucket *pagesMapBucket + return rw.Walk(context.Background()) +} - _, parent = m.getSection(s) - if parent == nil { - panic(fmt.Sprintf("BUG: parent not set for %q", s)) +func (m *pageMap) getResourcesForPage(ps *pageState) (resource.Resources, error) { + var res resource.Resources + m.forEachResourceInPage(ps, doctree.LockTypeNone, false, func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + rs := n.(*resourceSource) + if rs.r != nil { + res = append(res, rs.r) } - parentBucket = parent.p.bucket + return false, nil + }) + return res, nil +} - n.p, err = m.newPageFromContentNode(n, parentBucket, nil) +func (m *pageMap) getOrCreateResourcesForPage(ps *pageState) resource.Resources { + keyPage := ps.Path() + if keyPage == "/" { + keyPage = "" + } + key := keyPage + "/get-resources-for-page" + v, err := m.cacheResources.GetOrCreate(key, func(string) (resource.Resources, error) { + res, err := m.getResourcesForPage(ps) if err != nil { - return true - } - - shouldBuild = !(n.p.Kind() == kinds.KindPage && m.cfg.pageDisabled) && m.s.shouldBuild(n.p) - if !shouldBuild { - m.deletePage(s) - return false + return nil, err } - n.p.treeRef = &contentTreeRef{ - m: m, - t: m.pages, - n: n, - key: s, + if translationKey := ps.m.translationKey; translationKey != "" { + // This this should not be a very common case. + // Merge in resources from the other languages. + translatedPages, _ := m.s.h.translationKeyPages.Get(translationKey) + for _, tp := range translatedPages { + if tp == ps { + continue + } + tps := tp.(*pageState) + // Make sure we query from the correct language root. + res2, err := tps.s.pageMap.getResourcesForPage(tps) + if err != nil { + return nil, err + } + // Add if Name not already in res. + for _, r := range res2 { + var found bool + for _, r2 := range res { + if r2.Name() == r.Name() { + found = true + break + } + } + if !found { + res = append(res, r) + } + } + } } - if err = m.assembleResources(s, n.p, parentBucket); err != nil { - return true - } + lessFunc := func(i, j int) bool { + ri, rj := res[i], res[j] + if ri.ResourceType() < rj.ResourceType() { + return true + } - return false - }) + p1, ok1 := ri.(page.Page) + p2, ok2 := rj.(page.Page) - m.deleteOrphanSections() + if ok1 != ok2 { + // Pull pages behind other resources. - return err -} + return ok2 + } -func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesMapBucket) error { - var err error - - m.resources.WalkPrefix(s, func(s string, v any) bool { - n := v.(*contentNode) - meta := n.fi.Meta() - classifier := meta.Classifier - var r resource.Resource - switch classifier { - case files.ContentClassContent: - var rp *pageState - rp, err = m.newPageFromContentNode(n, parentBucket, p) - if err != nil { - return true + if ok1 { + return page.DefaultPageSort(p1, p2) } - rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.File().Path(), p.File().Dir())) - r = rp - case files.ContentClassFile: - r, err = m.newResource(n.fi, p) - if err != nil { - return true + // Make sure not to use RelPermalink or any of the other methods that + // trigger lazy publishing. + return ri.Name() < rj.Name() + } + + sort.SliceStable(res, lessFunc) + + if len(ps.m.resourcesMetadata) > 0 { + for i, r := range res { + res[i] = resources.CloneWithMetadataIfNeeded(ps.m.resourcesMetadata, r) } - default: - panic(fmt.Sprintf("invalid classifier: %q", classifier)) + sort.SliceStable(res, lessFunc) } - p.resources = append(p.resources, r) - return false + return res, nil }) + if err != nil { + panic(err) + } - return err + return v } -func (m *pageMap) assembleSections() error { - var sectionsToDelete []string - var err error +type weightedContentNode struct { + n contentNodeI + weight int + term *pageWithOrdinal +} - m.sections.Walk(func(s string, v any) bool { - n := v.(*contentNode) - var shouldBuild bool +type buildStateReseter interface { + resetBuildState() +} - defer func() { - // Make sure we always rebuild the view cache. - if shouldBuild && err == nil && n.p != nil { - m.attachPageToViews(s, n) - if n.p.IsHome() { - m.s.home = n.p - } - } - }() +type contentNodeI interface { + identity.IdentityProvider + identity.ForEeachIdentityProvider + Path() string + isContentNodeBranch() bool + buildStateReseter + resource.StaleMarker +} - sections := m.splitKey(s) +var _ contentNodeI = (*contentNodeIs)(nil) - if n.p != nil { - if n.p.IsHome() { - m.s.home = n.p - } - shouldBuild = true - return false - } +type contentNodeIs []contentNodeI - var parent *contentNode - var parentBucket *pagesMapBucket +func (n contentNodeIs) Path() string { + return n[0].Path() +} - if s != "/" { - _, parent = m.getSection(s) - if parent == nil || parent.p == nil { - panic(fmt.Sprintf("BUG: parent not set for %q", s)) - } +func (n contentNodeIs) isContentNodeBranch() bool { + return n[0].isContentNodeBranch() +} + +func (n contentNodeIs) GetIdentity() identity.Identity { + return n[0].GetIdentity() +} + +func (n contentNodeIs) ForEeachIdentity(f func(identity.Identity) bool) { + for _, nn := range n { + if nn != nil { + nn.ForEeachIdentity(f) } + } +} - if parent != nil { - parentBucket = parent.p.bucket - } else if s == "/" { - parentBucket = m.s.siteBucket +func (n contentNodeIs) resetBuildState() { + for _, nn := range n { + if nn != nil { + nn.resetBuildState() } + } +} - kind := kinds.KindSection - if s == "/" { - kind = kinds.KindHome +func (n contentNodeIs) MarkStale() { + for _, nn := range n { + if nn != nil { + nn.MarkStale() } + } +} - if n.fi != nil { - n.p, err = m.newPageFromContentNode(n, parentBucket, nil) - if err != nil { - return true +type contentNodeShifter struct { + numLanguages int +} + +func (s *contentNodeShifter) Delete(n contentNodeI, dimension doctree.Dimension) (bool, bool) { + lidx := dimension[0] + switch v := n.(type) { + case contentNodeIs: + resource.MarkStale(v[lidx]) + wasDeleted := v[lidx] != nil + v[lidx] = nil + isEmpty := true + for _, vv := range v { + if vv != nil { + isEmpty = false + break } - } else { - n.p = m.s.newPage(n, parentBucket, kind, "", sections...) } - - shouldBuild = m.s.shouldBuild(n.p) - if !shouldBuild { - sectionsToDelete = append(sectionsToDelete, s) - return false + return wasDeleted, isEmpty + case resourceSources: + resource.MarkStale(v[lidx]) + wasDeleted := v[lidx] != nil + v[lidx] = nil + isEmpty := true + for _, vv := range v { + if vv != nil { + isEmpty = false + break + } } + return wasDeleted, isEmpty + case *resourceSource: + resource.MarkStale(v) + return true, true + case *pageState: + resource.MarkStale(v) + return true, true + default: + panic(fmt.Sprintf("unknown type %T", n)) + } +} - n.p.treeRef = &contentTreeRef{ - m: m, - t: m.sections, - n: n, - key: s, +func (s *contentNodeShifter) Shift(n contentNodeI, dimension doctree.Dimension, exact bool) (contentNodeI, bool, doctree.DimensionFlag) { + lidx := dimension[0] + // How accurate is the match. + accuracy := doctree.DimensionLanguage + switch v := n.(type) { + case contentNodeIs: + if len(v) == 0 { + panic("empty contentNodeIs") } - - if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil { - return true + vv := v[lidx] + if vv != nil { + return vv, true, accuracy } + return nil, false, 0 + case resourceSources: + vv := v[lidx] + if vv != nil { + return vv, true, doctree.DimensionLanguage + } + if exact { + return nil, false, 0 + } + // For non content resources, pick the first match. + for _, vv := range v { + if vv != nil { + if vv.isPage() { + return nil, false, 0 + } + return vv, true, 0 + } + } + case *resourceSource: + if v.LangIndex() == lidx { + return v, true, doctree.DimensionLanguage + } + if !v.isPage() && !exact { + return v, true, 0 + } + case *pageState: + if v.s.languagei == lidx { + return n, true, doctree.DimensionLanguage + } + default: + panic(fmt.Sprintf("unknown type %T", n)) + } + return nil, false, 0 +} - return false - }) - - for _, s := range sectionsToDelete { - m.deleteSectionByPath(s) +func (s *contentNodeShifter) ForEeachInDimension(n contentNodeI, d int, f func(contentNodeI) bool) { + if d != doctree.DimensionLanguage.Index() { + panic("only language dimension supported") } - return err + switch vv := n.(type) { + case contentNodeIs: + for _, v := range vv { + if v != nil { + if f(v) { + return + } + } + } + default: + f(vv) + } } -func (m *pageMap) assembleTaxonomies() error { - var taxonomiesToDelete []string - var err error +func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree.Dimension) contentNodeI { + langi := dimension[doctree.DimensionLanguage.Index()] + switch vv := old.(type) { + case *pageState: + newp, ok := new.(*pageState) + if !ok { + panic(fmt.Sprintf("unknown type %T", new)) + } + if vv.s.languagei == newp.s.languagei && newp.s.languagei == langi { + return new + } + is := make(contentNodeIs, s.numLanguages) + is[vv.s.languagei] = old + is[langi] = new + return is + case contentNodeIs: + vv[langi] = new + return vv + case resourceSources: + vv[langi] = new.(*resourceSource) + return vv + case *resourceSource: + newp, ok := new.(*resourceSource) + if !ok { + panic(fmt.Sprintf("unknown type %T", new)) + } + if vv.LangIndex() == newp.LangIndex() && newp.LangIndex() == langi { + return new + } + rs := make(resourceSources, s.numLanguages) + rs[vv.LangIndex()] = vv + rs[langi] = newp + return rs - m.taxonomies.Walk(func(s string, v any) bool { - n := v.(*contentNode) + default: + panic(fmt.Sprintf("unknown type %T", old)) + } +} - if n.p != nil { - return false +func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI { + switch vv := old.(type) { + case *pageState: + newp, ok := new.(*pageState) + if !ok { + panic(fmt.Sprintf("unknown type %T", new)) + } + if vv.s.languagei == newp.s.languagei { + return new + } + is := make(contentNodeIs, s.numLanguages) + is[newp.s.languagei] = new + is[vv.s.languagei] = old + return is + case contentNodeIs: + newp, ok := new.(*pageState) + if !ok { + panic(fmt.Sprintf("unknown type %T", new)) } + vv[newp.s.languagei] = new + return vv + case *resourceSource: + newp, ok := new.(*resourceSource) + if !ok { + panic(fmt.Sprintf("unknown type %T", new)) + } + if vv.LangIndex() == newp.LangIndex() { + return new + } + rs := make(resourceSources, s.numLanguages) + rs[newp.LangIndex()] = newp + rs[vv.LangIndex()] = vv + return rs + case resourceSources: + newp, ok := new.(*resourceSource) + if !ok { + panic(fmt.Sprintf("unknown type %T", new)) + } + vv[newp.LangIndex()] = newp + return vv + default: + panic(fmt.Sprintf("unknown type %T", old)) + } +} - kind := n.viewInfo.kind() - sections := n.viewInfo.sections() +func newPageMap(i int, s *Site, mcache *dynacache.Cache, pageTrees *pageTrees) *pageMap { + var m *pageMap + + var taxonomiesConfig taxonomiesConfig = s.conf.Taxonomies + + m = &pageMap{ + pageTrees: pageTrees.Shape(0, i), + cachePages: dynacache.GetOrCreatePartition[string, page.Pages](mcache, fmt.Sprintf("/pags/%d", i), dynacache.OptionsPartition{Weight: 10, ClearWhen: dynacache.ClearOnRebuild}), + cacheResources: dynacache.GetOrCreatePartition[string, resource.Resources](mcache, fmt.Sprintf("/ress/%d", i), dynacache.OptionsPartition{Weight: 60, ClearWhen: dynacache.ClearOnRebuild}), + cacheContentRendered: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentSummary]](mcache, fmt.Sprintf("/cont/ren/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), + cacheContentPlain: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentPlainPlainWords]](mcache, fmt.Sprintf("/cont/pla/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), + contentTableOfContents: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentTableOfContents]](mcache, fmt.Sprintf("/cont/toc/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), + cacheContentSource: dynacache.GetOrCreatePartition[string, *resources.StaleValue[[]byte]](mcache, fmt.Sprintf("/cont/src/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), + + cfg: contentMapConfig{ + lang: s.Lang(), + taxonomyConfig: taxonomiesConfig.Values(), + taxonomyDisabled: !s.conf.IsKindEnabled(kinds.KindTaxonomy), + taxonomyTermDisabled: !s.conf.IsKindEnabled(kinds.KindTerm), + pageDisabled: !s.conf.IsKindEnabled(kinds.KindPage), + }, + i: i, + s: s, + } - _, parent := m.getTaxonomyParent(s) - if parent == nil || parent.p == nil { - panic(fmt.Sprintf("BUG: parent not set for %q", s)) - } - parentBucket := parent.p.bucket + m.pageReverseIndex = &contentTreeReverseIndex{ + initFn: func(rm map[any]contentNodeI) { + add := func(k string, n contentNodeI) { + existing, found := rm[k] + if found && existing != ambiguousContentNode { + rm[k] = ambiguousContentNode + } else if !found { + rm[k] = n + } + } - if n.fi != nil { - n.p, err = m.newPageFromContentNode(n, parent.p.bucket, nil) - if err != nil { - return true + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: m.treePages, + LockType: doctree.LockTypeRead, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + p := n.(*pageState) + if p.File() != nil { + add(p.File().FileInfo().Meta().PathInfo.BaseNameNoIdentifier(), p) + } + return false, nil + }, } - } else { - title := "" - if kind == kinds.KindTerm { - title = n.viewInfo.term() + + if err := w.Walk(context.Background()); err != nil { + panic(err) } - n.p = m.s.newPage(n, parent.p.bucket, kind, title, sections...) - } + }, + contentTreeReverseIndexMap: &contentTreeReverseIndexMap{}, + } - if !m.s.shouldBuild(n.p) { - taxonomiesToDelete = append(taxonomiesToDelete, s) - return false - } + return m +} - n.p.treeRef = &contentTreeRef{ - m: m, - t: m.taxonomies, - n: n, - key: s, - } +type contentTreeReverseIndex struct { + initFn func(rm map[any]contentNodeI) + *contentTreeReverseIndexMap +} - if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil { - return true - } +func (c *contentTreeReverseIndex) Reset() { + c.contentTreeReverseIndexMap = &contentTreeReverseIndexMap{ + m: make(map[any]contentNodeI), + } +} - return false +func (c *contentTreeReverseIndex) Get(key any) contentNodeI { + c.init.Do(func() { + c.m = make(map[any]contentNodeI) + c.initFn(c.contentTreeReverseIndexMap.m) }) + return c.m[key] +} - for _, s := range taxonomiesToDelete { - m.deleteTaxonomy(s) - } +type contentTreeReverseIndexMap struct { + init sync.Once + m map[any]contentNodeI +} - return err +type sitePagesAssembler struct { + *Site + watching bool + incomingChanges *whatChanged + assembleChanges *whatChanged + ctx context.Context } -func (m *pageMap) attachPageToViews(s string, b *contentNode) { - if m.cfg.taxonomyDisabled { - return +func (m *pageMap) debugPrint(prefix string, maxLevel int, w io.Writer) { + noshift := false + var prevKey string + + pageWalker := &doctree.NodeShiftTreeWalker[contentNodeI]{ + NoShift: noshift, + Tree: m.treePages, + Prefix: prefix, + WalkContext: &doctree.WalkContext[contentNodeI]{}, } - for _, viewName := range m.cfg.taxonomyConfig { - vals := types.ToStringSlicePreserveString(getParam(b.p, viewName.plural, false)) - if vals == nil { - continue + resourceWalker := pageWalker.Extend() + resourceWalker.Tree = m.treeResources + + pageWalker.Handle = func(keyPage string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + level := strings.Count(keyPage, "/") + if level > maxLevel { + return false, nil } - w := getParamToLower(b.p, viewName.plural+"_weight") - weight, err := cast.ToIntE(w) - if err != nil { - m.s.Log.Errorf("Unable to convert taxonomy weight %#v to int for %q", w, b.p.Pathc()) - // weight will equal zero, so let the flow continue + const indentStr = " " + p := n.(*pageState) + s := strings.TrimPrefix(keyPage, paths.CommonDir(prevKey, keyPage)) + lenIndent := len(keyPage) - len(s) + fmt.Fprint(w, strings.Repeat(indentStr, lenIndent)) + info := fmt.Sprintf("%s lm: %s (%s)", s, p.Lastmod().Format("2006-01-02"), p.Kind()) + fmt.Fprintln(w, info) + switch p.Kind() { + case kinds.KindTerm: + m.treeTaxonomyEntries.WalkPrefix( + doctree.LockTypeNone, + keyPage+"/", + func(s string, n *weightedContentNode) (bool, error) { + fmt.Fprint(w, strings.Repeat(indentStr, lenIndent+4)) + fmt.Fprintln(w, s) + return false, nil + }, + ) } - for i, v := range vals { - termKey := m.s.getTaxonomyKey(v) - - bv := &contentNode{ - viewInfo: &contentBundleViewInfo{ - ordinal: i, - name: viewName, - termKey: termKey, - termOrigin: v, - weight: weight, - ref: b, - }, + isBranch := n.isContentNodeBranch() + prevKey = keyPage + resourceWalker.Prefix = keyPage + "/" + + resourceWalker.Handle = func(ss string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if isBranch { + ownerKey, _ := pageWalker.Tree.LongestPrefix(ss, true, nil) + if ownerKey != keyPage { + // Stop walking downwards, someone else owns this resource. + pageWalker.SkipPrefix(ownerKey + "/") + return false, nil + } } + fmt.Fprint(w, strings.Repeat(indentStr, lenIndent+8)) + fmt.Fprintln(w, ss+" (resource)") + return false, nil + } + + return false, resourceWalker.Walk(context.Background()) + } - var key string - if strings.HasSuffix(s, "/") { - key = cleanSectionTreeKey(path.Join(viewName.plural, termKey, s)) - } else { - key = cleanTreeKey(path.Join(viewName.plural, termKey, s)) + err := pageWalker.Walk(context.Background()) + if err != nil { + panic(err) + } +} + +func (h *HugoSites) resolveAndClearStateForIdentities( + ctx context.Context, + l logg.LevelLogger, + cachebuster func(s string) bool, changes []identity.Identity, +) error { + h.Log.Debug().Log(logg.StringFunc( + func() string { + var sb strings.Builder + for _, change := range changes { + var key string + if kp, ok := change.(resource.Identifier); ok { + key = " " + kp.Key() + } + sb.WriteString(fmt.Sprintf("Direct dependencies of %q (%T%s) =>\n", change.IdentifierBase(), change, key)) + seen := map[string]bool{ + change.IdentifierBase(): true, + } + // Print the top level dependenies. + identity.WalkIdentitiesDeep(change, func(level int, id identity.Identity) bool { + if level > 1 { + return true + } + if !seen[id.IdentifierBase()] { + sb.WriteString(fmt.Sprintf(" %s%s\n", strings.Repeat(" ", level), id.IdentifierBase())) + } + seen[id.IdentifierBase()] = true + return false + }) } - m.taxonomyEntries.Insert(key, bv) + return sb.String() + }), + ) + + for _, id := range changes { + if staler, ok := id.(resource.Staler); ok { + h.Log.Trace(logg.StringFunc(func() string { return fmt.Sprintf("Marking stale: %s (%T)\n", id, id) })) + staler.MarkStale() } } -} -type pageMapQuery struct { - Prefix string - Filter contentTreeNodeCallback -} + // The order matters here: + // 1. Handle the cache busters first, as those may produce identities for the page reset step. + // 2. Then reset the page outputs, which may mark some resources as stale. + // 3. Then GC the cache. + if cachebuster != nil { + if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { + ll := l.WithField("substep", "gc dynacache cachebuster") + + shouldDelete := func(k, v any) bool { + if cachebuster == nil { + return false + } + var b bool + if s, ok := k.(string); ok { + b = cachebuster(s) + } + + if b { + identity.WalkIdentitiesShallow(v, func(level int, id identity.Identity) bool { + // Add them to the change set so we can reset any page that depends on them. + changes = append(changes, id) + return false + }) + } -func (m *pageMap) collectPages(query pageMapQuery, fn func(c *contentNode)) error { - if query.Filter == nil { - query.Filter = contentTreeNoListAlwaysFilter + return b + } + + h.MemCache.ClearMatching(shouldDelete) + + return ll, nil + }); err != nil { + return err + } } - m.pages.WalkQuery(query, func(s string, n *contentNode) bool { - fn(n) - return false - }) + // Remove duplicates + seen := make(map[identity.Identity]bool) + var n int + for _, id := range changes { + if !seen[id] { + seen[id] = true + changes[n] = id + n++ + } + } + changes = changes[:n] - return nil -} + if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { + // changesLeft: The IDs that the pages is dependent on. + // changesRight: The IDs that the pages depend on. + ll := l.WithField("substep", "resolve page output change set").WithField("changes", len(changes)) -func (m *pageMap) collectPagesAndSections(query pageMapQuery, fn func(c *contentNode)) error { - if err := m.collectSections(query, fn); err != nil { + checkedCount, matchCount, err := h.resolveAndResetDependententPageOutputs(ctx, changes) + ll = ll.WithField("checked", checkedCount).WithField("matches", matchCount) + return ll, err + }); err != nil { return err } - query.Prefix = query.Prefix + cmBranchSeparator - if err := m.collectPages(query, fn); err != nil { + if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { + ll := l.WithField("substep", "gc dynacache") + + h.MemCache.ClearOnRebuild(changes...) + h.Log.Trace(logg.StringFunc(func() string { + var sb strings.Builder + sb.WriteString("dynacache keys:\n") + for _, key := range h.MemCache.Keys(nil) { + sb.WriteString(fmt.Sprintf(" %s\n", key)) + } + return sb.String() + })) + return ll, nil + }); err != nil { return err } return nil } -func (m *pageMap) collectSections(query pageMapQuery, fn func(c *contentNode)) error { - level := strings.Count(query.Prefix, "/") +// The left change set is the IDs that the pages is dependent on. +// The right change set is the IDs that the pages depend on. +func (h *HugoSites) resolveAndResetDependententPageOutputs(ctx context.Context, changes []identity.Identity) (int, int, error) { + if changes == nil { + return 0, 0, nil + } - return m.collectSectionsFn(query, func(s string, c *contentNode) bool { - if strings.Count(s, "/") != level+1 { - return false + // This can be shared (many of the same IDs are repeated). + depsFinder := identity.NewFinder(identity.FinderConfig{}) + + h.Log.Trace(logg.StringFunc(func() string { + var sb strings.Builder + sb.WriteString("resolve page dependencies: ") + for _, id := range changes { + sb.WriteString(fmt.Sprintf(" %T: %s|", id, id.IdentifierBase())) } + return sb.String() + })) - fn(c) + var ( + resetCounter atomic.Int64 + checkedCounter atomic.Int64 + ) - return false - }) -} + resetPo := func(po *pageOutput, r identity.FinderResult) { + if po.pco != nil { + po.pco.Reset() // Will invalidate content cache. + } -func (m *pageMap) collectSectionsFn(query pageMapQuery, fn func(s string, c *contentNode) bool) error { - if !strings.HasSuffix(query.Prefix, "/") { - query.Prefix += "/" + po.renderState = 0 + po.p.resourcesPublishInit = &sync.Once{} + if r == identity.FinderFoundOneOfMany { + // Will force a re-render even in fast render mode. + po.renderOnce = false + } + resetCounter.Add(1) + h.Log.Trace(logg.StringFunc(func() string { + p := po.p + return fmt.Sprintf("Resetting page output %s for %s for output %s\n", p.Kind(), p.Path(), po.f.Name) + })) } - m.sections.WalkQuery(query, func(s string, n *contentNode) bool { - return fn(s, n) + // This can be a relativeley expensive operations, so we do it in parallel. + g := rungroup.Run[*pageState](ctx, rungroup.Config[*pageState]{ + NumWorkers: h.numWorkers, + Handle: func(ctx context.Context, p *pageState) error { + if !p.isRenderedAny() { + // This needs no reset, so no need to check it. + return nil + } + // First check the top level dependency manager. + for _, id := range changes { + checkedCounter.Add(1) + if r := depsFinder.Contains(id, p.dependencyManager, 100); r > identity.FinderFoundOneOfManyRepetition { + for _, po := range p.pageOutputs { + resetPo(po, r) + } + // Done. + return nil + } + } + // Then do a more fine grained reset for each output format. + OUTPUTS: + for _, po := range p.pageOutputs { + if !po.isRendered() { + continue + } + for _, id := range changes { + checkedCounter.Add(1) + if r := depsFinder.Contains(id, po.dependencyManagerOutput, 2); r > identity.FinderFoundOneOfManyRepetition { + resetPo(po, r) + continue OUTPUTS + } + } + } + return nil + }, }) - return nil -} - -func (m *pageMap) collectSectionsRecursiveIncludingSelf(query pageMapQuery, fn func(c *contentNode)) error { - return m.collectSectionsFn(query, func(s string, c *contentNode) bool { - fn(c) + h.withPage(func(s string, p *pageState) bool { + var needToCheck bool + for _, po := range p.pageOutputs { + if po.isRendered() { + needToCheck = true + break + } + } + if needToCheck { + g.Enqueue(p) + } return false }) -} -func (m *pageMap) collectTaxonomies(prefix string, fn func(c *contentNode)) error { - m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool { - fn(n) - return false - }) - return nil -} + err := g.Wait() + resetCount := int(resetCounter.Load()) + checkedCount := int(checkedCounter.Load()) -// withEveryBundlePage applies fn to every Page, including those bundled inside -// leaf bundles. -func (m *pageMap) withEveryBundlePage(fn func(p *pageState) bool) { - m.bundleTrees.Walk(func(s string, n *contentNode) bool { - if n.p != nil { - return fn(n.p) - } - return false - }) + return checkedCount, resetCount, err } -type pageMaps struct { - workers *para.Workers - pmaps []*pageMap -} +// Calculate and apply aggregate values to the page tree (e.g. dates, cascades). +func (sa *sitePagesAssembler) applyAggregates() error { + sectionPageCount := map[string]int{} -// deleteSection deletes the entire section from s. -func (m *pageMaps) deleteSection(s string) { - m.withMaps(func(pm *pageMap) error { - pm.deleteSectionByPath(s) - return nil - }) -} + pw := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: sa.pageMap.treePages, + LockType: doctree.LockTypeRead, + WalkContext: &doctree.WalkContext[contentNodeI]{}, + } + rw := pw.Extend() + rw.Tree = sa.pageMap.treeResources + sa.lastmod = time.Time{} -func (m *pageMaps) AssemblePages() error { - return m.withMaps(func(pm *pageMap) error { - if err := pm.CreateMissingNodes(); err != nil { - return err - } + pw.Handle = func(keyPage string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + pageBundle := n.(*pageState) - if err := pm.assemblePages(); err != nil { - return err + if pageBundle.Kind() == kinds.KindTerm { + // Delay this until they're created. + return false, nil } - if err := pm.createMissingTaxonomyNodes(); err != nil { - return err + if pageBundle.IsPage() { + rootSection := pageBundle.Section() + sectionPageCount[rootSection]++ } - // Handle any new sections created in the step above. - if err := pm.assembleSections(); err != nil { - return err + // Handle cascades first to get any default dates set. + var cascade map[page.PageMatcher]maps.Params + if keyPage == "" { + // Home page gets it's cascade from the site config. + cascade = sa.conf.Cascade.Config + + if pageBundle.m.cascade == nil { + // Pass the site cascade downwards. + pw.WalkContext.Data().Insert(keyPage, cascade) + } + } else { + _, data := pw.WalkContext.Data().LongestPrefix(keyPage) + if data != nil { + cascade = data.(map[page.PageMatcher]maps.Params) + } } - if pm.s.home == nil { - // Home is disabled, everything is. - pm.bundleTrees.DeletePrefix("") - return nil + if (pageBundle.IsHome() || pageBundle.IsSection()) && pageBundle.m.setMetaPostCount > 0 { + oldDates := pageBundle.m.dates + + // We need to wait until after the walk to determine if any of the dates have changed. + pw.WalkContext.AddPostHook( + func() error { + if oldDates != pageBundle.m.dates { + sa.assembleChanges.Add(pageBundle) + } + return nil + }, + ) } - if err := pm.assembleTaxonomies(); err != nil { - return err + // Combine the cascade map with front matter. + pageBundle.setMetaPost(cascade) + + // We receive cascade values from above. If this leads to a change compared + // to the previous value, we need to mark the page and its dependencies as changed. + if pageBundle.m.setMetaPostCascadeChanged { + sa.assembleChanges.Add(pageBundle) } - if err := pm.createSiteTaxonomies(); err != nil { - return err + const eventName = "dates" + if n.isContentNodeBranch() { + if pageBundle.m.cascade != nil { + // Pass it down. + pw.WalkContext.Data().Insert(keyPage, pageBundle.m.cascade) + } + wasZeroDates := resource.IsZeroDates(pageBundle.m.dates) + if wasZeroDates || pageBundle.IsHome() { + pw.WalkContext.AddEventListener(eventName, keyPage, func(e *doctree.Event[contentNodeI]) { + sp, ok := e.Source.(*pageState) + if !ok { + return + } + + if wasZeroDates { + pageBundle.m.dates.UpdateDateAndLastmodIfAfter(sp.m.dates) + } + + if pageBundle.IsHome() { + if pageBundle.m.dates.Lastmod().After(pageBundle.s.lastmod) { + pageBundle.s.lastmod = pageBundle.m.dates.Lastmod() + } + if sp.m.dates.Lastmod().After(pageBundle.s.lastmod) { + pageBundle.s.lastmod = sp.m.dates.Lastmod() + } + } + }) + } } - sw := §ionWalker{m: pm.contentMap} - a := sw.applyAggregates() + // Send the date info up the tree. + pw.WalkContext.SendEvent(&doctree.Event[contentNodeI]{Source: n, Path: keyPage, Name: eventName}) + + isBranch := n.isContentNodeBranch() + rw.Prefix = keyPage + "/" + + rw.Handle = func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if isBranch { + ownerKey, _ := pw.Tree.LongestPrefix(resourceKey, true, nil) + if ownerKey != keyPage { + // Stop walking downwards, someone else owns this resource. + rw.SkipPrefix(ownerKey + "/") + return false, nil + } + } + rs := n.(*resourceSource) + if rs.isPage() { + pageResource := rs.r.(*pageState) + relPath := pageResource.m.pathInfo.BaseRel(pageBundle.m.pathInfo) + pageResource.m.resourcePath = relPath + var cascade map[page.PageMatcher]maps.Params + // Apply cascade (if set) to the page. + _, data := pw.WalkContext.Data().LongestPrefix(resourceKey) + if data != nil { + cascade = data.(map[page.PageMatcher]maps.Params) + } + pageResource.setMetaPost(cascade) + } - if a.mainSection != "" { - // Note, sites that have no custom config share a common config struct pointer. - // This means that we currently do not support setting different values per language. - // The end user can, however, configure this per language if needed. - mainSections := []string{strings.TrimRight(a.mainSection, "/")} - pm.s.s.conf.C.SetMainSectionsIfNotSet(mainSections) + return false, nil } - pm.s.lastmod = a.datesAll.Lastmod() - if resource.IsZeroDates(pm.s.home) { - pm.s.home.m.Dates = a.datesAll + return false, rw.Walk(sa.ctx) + } + + if err := pw.Walk(sa.ctx); err != nil { + return err + } + + if err := pw.WalkContext.HandleEventsAndHooks(); err != nil { + return err + } + + if !sa.s.conf.C.IsMainSectionsSet() { + var mainSection string + var maxcount int + for section, counter := range sectionPageCount { + if section != "" && counter > maxcount { + mainSection = section + maxcount = counter + } } + sa.s.conf.C.SetMainSections([]string{mainSection}) - return nil - }) -} + } -func (m *pageMaps) walkBundles(fn func(n *contentNode) bool) { - _ = m.withMaps(func(pm *pageMap) error { - pm.bundleTrees.Walk(func(s string, n *contentNode) bool { - return fn(n) - }) - return nil - }) + return nil } -func (m *pageMaps) walkBranchesPrefix(prefix string, fn func(s string, n *contentNode) bool) { - _ = m.withMaps(func(pm *pageMap) error { - pm.branchTrees.WalkPrefix(prefix, func(s string, n *contentNode) bool { - return fn(s, n) - }) +func (sa *sitePagesAssembler) applyAggregatesToTaxonomiesAndTerms() error { + walkContext := &doctree.WalkContext[contentNodeI]{} + + handlePlural := func(key string) error { + var pw *doctree.NodeShiftTreeWalker[contentNodeI] + pw = &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: sa.pageMap.treePages, + Prefix: key, // We also want to include the root taxonomy nodes, so no trailing slash. + LockType: doctree.LockTypeRead, + WalkContext: walkContext, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + p := n.(*pageState) + if p.Kind() != kinds.KindTerm { + // The other kinds were handled in applyAggregates. + if p.m.cascade != nil { + // Pass it down. + pw.WalkContext.Data().Insert(s, p.m.cascade) + } + } + + if p.Kind() != kinds.KindTerm && p.Kind() != kinds.KindTaxonomy { + // Already handled. + return false, nil + } + + const eventName = "dates" + + if p.Kind() == kinds.KindTerm { + var cascade map[page.PageMatcher]maps.Params + _, data := pw.WalkContext.Data().LongestPrefix(s) + if data != nil { + cascade = data.(map[page.PageMatcher]maps.Params) + } + p.setMetaPost(cascade) + + if err := sa.pageMap.treeTaxonomyEntries.WalkPrefix( + doctree.LockTypeRead, + paths.AddTrailingSlash(s), + func(ss string, wn *weightedContentNode) (bool, error) { + // Send the date info up the tree. + pw.WalkContext.SendEvent(&doctree.Event[contentNodeI]{Source: wn.n, Path: ss, Name: eventName}) + return false, nil + }, + ); err != nil { + return false, err + } + } + + // Send the date info up the tree. + pw.WalkContext.SendEvent(&doctree.Event[contentNodeI]{Source: n, Path: s, Name: eventName}) + + if resource.IsZeroDates(p.m.dates) { + pw.WalkContext.AddEventListener(eventName, s, func(e *doctree.Event[contentNodeI]) { + sp, ok := e.Source.(*pageState) + if !ok { + return + } + + p.m.dates.UpdateDateAndLastmodIfAfter(sp.m.dates) + }) + } + + return false, nil + }, + } + + if err := pw.Walk(sa.ctx); err != nil { + return err + } return nil - }) -} + } -func (m *pageMaps) withMaps(fn func(pm *pageMap) error) error { - g, _ := m.workers.Start(context.Background()) - for _, pm := range m.pmaps { - pm := pm - g.Run(func() error { - return fn(pm) - }) + for _, viewName := range sa.pageMap.cfg.taxonomyConfig.views { + if err := handlePlural(viewName.pluralTreeKey); err != nil { + return err + } } - return g.Wait() + + if err := walkContext.HandleEventsAndHooks(); err != nil { + return err + } + + return nil } -type pagesMapBucket struct { - // Cascading front matter. - cascade map[page.PageMatcher]maps.Params +func (sa *sitePagesAssembler) assembleTermsAndTranslations() error { + var ( + pages = sa.pageMap.treePages + entries = sa.pageMap.treeTaxonomyEntries + views = sa.pageMap.cfg.taxonomyConfig.views + ) - owner *pageState // The branch node + lockType := doctree.LockTypeWrite + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: pages, + LockType: lockType, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + ps := n.(*pageState) - *pagesMapBucketPages -} + if ps.m.noLink() { + return false, nil + } -type pagesMapBucketPages struct { - pagesInit sync.Once - pages page.Pages + // This is a little out of place, but is conveniently put here. + // Check if translationKey is set by user. + // This is to support the manual way of setting the translationKey in front matter. + if ps.m.translationKey != "" { + sa.s.h.translationKeyPages.Append(ps.m.translationKey, ps) + } - pagesAndSectionsInit sync.Once - pagesAndSections page.Pages + if sa.pageMap.cfg.taxonomyTermDisabled { + return false, nil + } - sectionsInit sync.Once - sections page.Pages -} + for _, viewName := range views { + vals := types.ToStringSlicePreserveString(getParam(ps, viewName.plural, false)) + if vals == nil { + continue + } -func (b *pagesMapBucket) getPages() page.Pages { - b.pagesInit.Do(func() { - b.pages = b.owner.treeRef.getPages() - page.SortByDefault(b.pages) - }) - return b.pages -} + w := getParamToLower(ps, viewName.plural+"_weight") + weight, err := cast.ToIntE(w) + if err != nil { + sa.Log.Warnf("Unable to convert taxonomy weight %#v to int for %q", w, n.Path()) + // weight will equal zero, so let the flow continue + } -func (b *pagesMapBucket) getPagesRecursive() page.Pages { - pages := b.owner.treeRef.getPagesRecursive() - page.SortByDefault(pages) - return pages -} + for i, v := range vals { + if v == "" { + continue + } + viewTermKey := "/" + viewName.plural + "/" + v + pi := sa.Site.Conf.PathParser().Parse(files.ComponentFolderContent, viewTermKey+"/_index.md") + term := pages.Get(pi.Base()) + if term == nil { + m := &pageMeta{ + term: v, + singular: viewName.singular, + s: sa.Site, + pathInfo: pi, + kind: kinds.KindTerm, + } + n, err := sa.h.newPage(m) + if err != nil { + return false, err + } + pages.InsertIntoValuesDimension(pi.Base(), n) + term = pages.Get(pi.Base()) + } + + if s == "" { + // Consider making this the real value. + s = "/" + } + + key := pi.Base() + s + + entries.Insert(key, &weightedContentNode{ + weight: weight, + n: n, + term: &pageWithOrdinal{pageState: term.(*pageState), ordinal: i}, + }) + } + } + return false, nil + }, + } -func (b *pagesMapBucket) getPagesAndSections() page.Pages { - b.pagesAndSectionsInit.Do(func() { - b.pagesAndSections = b.owner.treeRef.getPagesAndSections() - }) - return b.pagesAndSections + return w.Walk(sa.ctx) } -func (b *pagesMapBucket) getSections() page.Pages { - b.sectionsInit.Do(func() { - if b.owner.treeRef == nil { - return - } - b.sections = b.owner.treeRef.getSections() - }) +func (sa *sitePagesAssembler) assembleResources() error { + pagesTree := sa.pageMap.treePages + resourcesTree := sa.pageMap.treeResources + + lockType := doctree.LockTypeWrite + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: pagesTree, + LockType: lockType, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + ps := n.(*pageState) + + // Prepare resources for this page. + ps.shiftToOutputFormat(true, 0) + targetPaths := ps.targetPaths() + baseTarget := targetPaths.SubResourceBaseTarget + duplicateResourceFiles := true + if ps.s.ContentSpec.Converters.IsGoldmark(ps.m.markup) { + duplicateResourceFiles = ps.s.ContentSpec.Converters.GetMarkupConfig().Goldmark.DuplicateResourceFiles + } - return b.sections -} + duplicateResourceFiles = duplicateResourceFiles || ps.s.Conf.IsMultihost() + + sa.pageMap.forEachResourceInPage( + ps, lockType, + !duplicateResourceFiles, + func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + rs := n.(*resourceSource) + if !match.Has(doctree.DimensionLanguage) { + // We got an alternative language version. + // Clone this and insert it into the tree. + rs = rs.clone() + resourcesTree.InsertIntoCurrentDimension(resourceKey, rs) + } + if rs.r != nil { + return false, nil + } + + relPathOriginal := rs.path.PathRel(ps.m.pathInfo) + relPath := rs.path.BaseRel(ps.m.pathInfo) + + var targetBasePaths []string + if ps.s.Conf.IsMultihost() { + baseTarget = targetPaths.SubResourceBaseLink + // In multihost we need to publish to the lang sub folder. + targetBasePaths = []string{ps.s.GetTargetLanguageBasePath()} // TODO(bep) we don't need this as a slice anymore. + + } + + rd := resources.ResourceSourceDescriptor{ + OpenReadSeekCloser: rs.opener, + Path: rs.path, + GroupIdentity: rs.path, + TargetPath: relPathOriginal, // Use the original path for the target path, so the links can be guessed. + TargetBasePaths: targetBasePaths, + BasePathRelPermalink: targetPaths.SubResourceBaseLink, + BasePathTargetPath: baseTarget, + Name: relPath, + NameOriginal: relPathOriginal, + LazyPublish: !ps.m.buildConfig.PublishResources, + } + r, err := ps.m.s.ResourceSpec.NewResource(rd) + if err != nil { + return false, err + } + rs.r = r + return false, nil + }, + ) -func (b *pagesMapBucket) getTaxonomies() page.Pages { - b.sectionsInit.Do(func() { - var pas page.Pages - ref := b.owner.treeRef - ref.m.collectTaxonomies(ref.key, func(c *contentNode) { - pas = append(pas, c.p) - }) - page.SortByDefault(pas) - b.sections = pas - }) + return false, nil + }, + } - return b.sections + return w.Walk(sa.ctx) } -func (b *pagesMapBucket) getTaxonomyEntries() page.Pages { - var pas page.Pages - ref := b.owner.treeRef - viewInfo := ref.n.viewInfo - prefix := strings.ToLower("/" + viewInfo.name.plural + "/" + viewInfo.termKey + "/") - ref.m.taxonomyEntries.WalkPrefix(prefix, func(s string, v any) bool { - n := v.(*contentNode) - pas = append(pas, n.viewInfo.ref.p) - return false - }) - page.SortByDefault(pas) - return pas +func (sa *sitePagesAssembler) assemblePagesStep1(ctx context.Context) error { + if err := sa.addMissingTaxonomies(); err != nil { + return err + } + if err := sa.addMissingRootSections(); err != nil { + return err + } + if err := sa.addStandalonePages(); err != nil { + return err + } + if err := sa.applyAggregates(); err != nil { + return err + } + return nil } -type sectionAggregate struct { - datesAll resource.Dates - datesSection resource.Dates - pageCount int - mainSection string - mainSectionPageCount int +func (sa *sitePagesAssembler) assemblePagesStep2() error { + if err := sa.removeShouldNotBuild(); err != nil { + return err + } + if err := sa.assembleTermsAndTranslations(); err != nil { + return err + } + if err := sa.applyAggregatesToTaxonomiesAndTerms(); err != nil { + return err + } + if err := sa.assembleResources(); err != nil { + return err + } + return nil } -type sectionAggregateHandler struct { - sectionAggregate - sectionPageCount int +// Remove any leftover node that we should not build for some reason (draft, expired, scheduled in the future). +// Note that for the home and section kinds we just disable the nodes to preserve the structure. +func (sa *sitePagesAssembler) removeShouldNotBuild() error { + s := sa.Site + var keys []string + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + LockType: doctree.LockTypeRead, + Tree: sa.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + p := n.(*pageState) + if !s.shouldBuild(p) { + switch p.Kind() { + case kinds.KindHome, kinds.KindSection, kinds.KindTaxonomy: + // We need to keep these for the structure, but disable + // them so they don't get listed/rendered. + (&p.m.buildConfig).Disable() + default: + keys = append(keys, key) + } + } + return false, nil + }, + } + if err := w.Walk(sa.ctx); err != nil { + return err + } + + sa.pageMap.DeletePageAndResourcesBelow(keys...) - // Section - b *contentNode - s string + return nil } -func (h *sectionAggregateHandler) String() string { - return fmt.Sprintf("%s/%s - %d - %s", h.sectionAggregate.datesAll, h.sectionAggregate.datesSection, h.sectionPageCount, h.s) -} +// // Create the fixed output pages, e.g. sitemap.xml, if not already there. +func (sa *sitePagesAssembler) addStandalonePages() error { + s := sa.Site + m := s.pageMap + tree := m.treePages + + commit := tree.Lock(true) + defer commit() + + addStandalone := func(key, kind string, f output.Format) { + if !s.Conf.IsMultihost() { + switch kind { + case kinds.KindSitemapIndex, kinds.KindRobotsTXT: + // Only one for all languages. + if s.languagei != 0 { + return + } + } + } -func (h *sectionAggregateHandler) isRootSection() bool { - return h.s != "/" && strings.Count(h.s, "/") == 2 -} + if !sa.Site.conf.IsKindEnabled(kind) || tree.Has(key) { + return + } -func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error { - nested := v.(*sectionAggregateHandler) - h.sectionPageCount += nested.pageCount - h.pageCount += h.sectionPageCount - h.datesAll.UpdateDateAndLastmodIfAfter(nested.datesAll) - h.datesSection.UpdateDateAndLastmodIfAfter(nested.datesAll) - return nil -} + m := &pageMeta{ + s: s, + pathInfo: s.Conf.PathParser().Parse(files.ComponentFolderContent, key+f.MediaType.FirstSuffix.FullSuffix), + kind: kind, + standaloneOutputFormat: f, + } -func (h *sectionAggregateHandler) handlePage(s string, n *contentNode) error { - h.sectionPageCount++ + p, _ := s.h.newPage(m) - var d resource.Dated - if n.p != nil { - d = n.p - } else if n.viewInfo != nil && n.viewInfo.ref != nil { - d = n.viewInfo.ref.p - } else { - return nil + tree.InsertIntoValuesDimension(key, p) } - h.datesAll.UpdateDateAndLastmodIfAfter(d) - h.datesSection.UpdateDateAndLastmodIfAfter(d) - return nil -} + addStandalone("/404", kinds.KindStatus404, output.HTTPStatusHTMLFormat) -func (h *sectionAggregateHandler) handleSectionPost() error { - if h.sectionPageCount > h.mainSectionPageCount && h.isRootSection() { - h.mainSectionPageCount = h.sectionPageCount - h.mainSection = strings.TrimPrefix(h.s, "/") + if s.conf.EnableRobotsTXT { + if m.i == 0 || s.Conf.IsMultihost() { + addStandalone("/robots", kinds.KindRobotsTXT, output.RobotsTxtFormat) + } } - if resource.IsZeroDates(h.b.p) { - h.b.p.m.Dates = h.datesSection + sitemapEnabled := false + for _, s := range s.h.Sites { + if s.conf.IsKindEnabled(kinds.KindSitemap) { + sitemapEnabled = true + break + } } - h.datesSection = resource.Dates{} + if sitemapEnabled { + addStandalone("/sitemap", kinds.KindSitemap, output.SitemapFormat) + skipSitemapIndex := s.Conf.IsMultihost() || !(s.Conf.DefaultContentLanguageInSubdir() || s.Conf.IsMultiLingual()) - return nil -} + if !skipSitemapIndex { + addStandalone("/sitemapindex", kinds.KindSitemapIndex, output.SitemapIndexFormat) + } + } -func (h *sectionAggregateHandler) handleSectionPre(s string, b *contentNode) error { - h.s = s - h.b = b - h.sectionPageCount = 0 - h.datesAll.UpdateDateAndLastmodIfAfter(b.p) return nil } -type sectionWalkHandler interface { - handleNested(v sectionWalkHandler) error - handlePage(s string, b *contentNode) error - handleSectionPost() error - handleSectionPre(s string, b *contentNode) error -} +func (sa *sitePagesAssembler) addMissingRootSections() error { + var hasHome bool + + // Add missing root sections. + seen := map[string]bool{} + var w *doctree.NodeShiftTreeWalker[contentNodeI] + w = &doctree.NodeShiftTreeWalker[contentNodeI]{ + LockType: doctree.LockTypeWrite, + Tree: sa.pageMap.treePages, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if n == nil { + panic("n is nil") + } -type sectionWalker struct { - err error - m *contentMap -} + ps := n.(*pageState) -func (w *sectionWalker) applyAggregates() *sectionAggregateHandler { - return w.walkLevel("/", func() sectionWalkHandler { - return §ionAggregateHandler{} - }).(*sectionAggregateHandler) -} - -func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler { - level := strings.Count(prefix, "/") + if ps.Lang() != sa.Lang() { + panic(fmt.Sprintf("lang mismatch: %q: %s != %s", s, ps.Lang(), sa.Lang())) + } - visitor := createVisitor() + if s == "" { + hasHome = true + sa.home = ps + return false, nil + } - w.m.taxonomies.WalkBelow(prefix, func(s string, v any) bool { - currentLevel := strings.Count(s, "/") + p := ps.m.pathInfo + section := p.Section() + if section == "" || seen[section] { + return false, nil + } + seen[section] = true - if currentLevel > level+1 { - return false - } + // Try to preserve the original casing if possible. + sectionUnnormalized := p.Unmormalized().Section() + pth := sa.s.Conf.PathParser().Parse(files.ComponentFolderContent, "/"+sectionUnnormalized+"/_index.md") + nn := w.Tree.Get(pth.Base()) - n := v.(*contentNode) + if nn == nil { + m := &pageMeta{ + s: sa.Site, + pathInfo: pth, + } - if w.err = visitor.handleSectionPre(s, n); w.err != nil { - return true - } + ps, err := sa.h.newPage(m) + if err != nil { + return false, err + } + w.Tree.InsertIntoValuesDimension(pth.Base(), ps) + } - if currentLevel == 2 { - nested := w.walkLevel(s, createVisitor) - if w.err = visitor.handleNested(nested); w.err != nil { - return true + // /a/b, we don't need to walk deeper. + if strings.Count(s, "/") > 1 { + w.SkipPrefix(s + "/") } - } else { - w.m.taxonomyEntries.WalkPrefix(s, func(ss string, v any) bool { - n := v.(*contentNode) - w.err = visitor.handlePage(ss, n) - return w.err != nil - }) - } - w.err = visitor.handleSectionPost() + return false, nil + }, + } - return w.err != nil - }) + if err := w.Walk(sa.ctx); err != nil { + return err + } - w.m.sections.WalkBelow(prefix, func(s string, v any) bool { - currentLevel := strings.Count(s, "/") - if currentLevel > level+1 { - return false + if !hasHome { + p := sa.Site.Conf.PathParser().Parse(files.ComponentFolderContent, "/_index.md") + m := &pageMeta{ + s: sa.Site, + pathInfo: p, + kind: kinds.KindHome, + } + n, err := sa.h.newPage(m) + if err != nil { + return err } + w.Tree.InsertWithLock(p.Base(), n) + sa.home = n + } - n := v.(*contentNode) + return nil +} - if w.err = visitor.handleSectionPre(s, n); w.err != nil { - return true - } +func (sa *sitePagesAssembler) addMissingTaxonomies() error { + if sa.pageMap.cfg.taxonomyDisabled && sa.pageMap.cfg.taxonomyTermDisabled { + return nil + } - w.m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v any) bool { - w.err = visitor.handlePage(s, v.(*contentNode)) - return w.err != nil - }) + tree := sa.pageMap.treePages - if w.err != nil { - return true + commit := tree.Lock(true) + defer commit() + + for _, viewName := range sa.pageMap.cfg.taxonomyConfig.views { + key := viewName.pluralTreeKey + if v := tree.Get(key); v == nil { + m := &pageMeta{ + s: sa.Site, + pathInfo: sa.Conf.PathParser().Parse(files.ComponentFolderContent, key+"/_index.md"), + kind: kinds.KindTaxonomy, + singular: viewName.singular, + } + p, _ := sa.h.newPage(m) + tree.InsertIntoValuesDimension(key, p) } + } - nested := w.walkLevel(s, createVisitor) - if w.err = visitor.handleNested(nested); w.err != nil { - return true + return nil +} + +func (m *pageMap) CreateSiteTaxonomies(ctx context.Context) error { + m.s.taxonomies = make(page.TaxonomyList) + + if m.cfg.taxonomyDisabled && m.cfg.taxonomyTermDisabled { + return nil + } + + for _, viewName := range m.cfg.taxonomyConfig.views { + key := viewName.pluralTreeKey + m.s.taxonomies[viewName.plural] = make(page.Taxonomy) + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: m.treePages, + Prefix: paths.AddTrailingSlash(key), + LockType: doctree.LockTypeRead, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + p := n.(*pageState) + plural := p.Section() + + switch p.Kind() { + case kinds.KindTerm: + taxonomy := m.s.taxonomies[plural] + if taxonomy == nil { + return true, fmt.Errorf("missing taxonomy: %s", plural) + } + k := strings.ToLower(p.m.term) + err := m.treeTaxonomyEntries.WalkPrefix( + doctree.LockTypeRead, + paths.AddTrailingSlash(s), + func(s string, wn *weightedContentNode) (bool, error) { + taxonomy[k] = append(taxonomy[k], page.NewWeightedPage(wn.weight, wn.n.(page.Page), wn.term.Page())) + return false, nil + }, + ) + if err != nil { + return true, err + } + + default: + return false, nil + } + + return false, nil + }, } - w.err = visitor.handleSectionPost() + if err := w.Walk(ctx); err != nil { + return err + } + } - return w.err != nil - }) + for _, taxonomy := range m.s.taxonomies { + for _, v := range taxonomy { + v.Sort() + } + } - return visitor + return nil } type viewName struct { - singular string // e.g. "category" - plural string // e.g. "categories" + singular string // e.g. "category" + plural string // e.g. "categories" + pluralTreeKey string } func (v viewName) IsZero() bool { diff --git a/hugolib/content_map_test.go b/hugolib/content_map_test.go index 883587a01..a41b2aae9 100644 --- a/hugolib/content_map_test.go +++ b/hugolib/content_map_test.go @@ -16,295 +16,11 @@ package hugolib import ( "fmt" "path/filepath" - "strings" "testing" - "github.com/gohugoio/hugo/common/paths" - - "github.com/gohugoio/hugo/htesting/hqt" - - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/afero" - qt "github.com/frankban/quicktest" ) -func BenchmarkContentMap(b *testing.B) { - writeFile := func(c *qt.C, fs afero.Fs, filename, content string) hugofs.FileMetaInfo { - c.Helper() - filename = filepath.FromSlash(filename) - c.Assert(fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil) - c.Assert(afero.WriteFile(fs, filename, []byte(content), 0777), qt.IsNil) - - fi, err := fs.Stat(filename) - c.Assert(err, qt.IsNil) - - mfi := fi.(hugofs.FileMetaInfo) - return mfi - } - - createFs := func(fs afero.Fs, lang string) afero.Fs { - return hugofs.NewBaseFileDecorator(fs, - func(fi hugofs.FileMetaInfo) { - meta := fi.Meta() - // We have a more elaborate filesystem setup in the - // real flow, so simulate this here. - meta.Lang = lang - meta.Path = meta.Filename - meta.Classifier = files.ClassifyContentFile(fi.Name(), meta.OpenFunc) - }) - } - - b.Run("CreateMissingNodes", func(b *testing.B) { - c := qt.New(b) - b.StopTimer() - mps := make([]*contentMap, b.N) - for i := 0; i < b.N; i++ { - m := newContentMap(contentMapConfig{lang: "en"}) - mps[i] = m - memfs := afero.NewMemMapFs() - fs := createFs(memfs, "en") - for i := 1; i <= 20; i++ { - c.Assert(m.AddFilesBundle(writeFile(c, fs, fmt.Sprintf("sect%d/a/index.md", i), "page")), qt.IsNil) - c.Assert(m.AddFilesBundle(writeFile(c, fs, fmt.Sprintf("sect2%d/%sindex.md", i, strings.Repeat("b/", i)), "page")), qt.IsNil) - } - - } - - b.StartTimer() - - for i := 0; i < b.N; i++ { - m := mps[i] - c.Assert(m.CreateMissingNodes(), qt.IsNil) - - b.StopTimer() - m.pages.DeletePrefix("/") - m.sections.DeletePrefix("/") - b.StartTimer() - } - }) -} - -func TestContentMap(t *testing.T) { - c := qt.New(t) - - writeFile := func(c *qt.C, fs afero.Fs, filename, content string) hugofs.FileMetaInfo { - c.Helper() - filename = filepath.FromSlash(filename) - c.Assert(fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil) - c.Assert(afero.WriteFile(fs, filename, []byte(content), 0777), qt.IsNil) - - fi, err := fs.Stat(filename) - c.Assert(err, qt.IsNil) - - mfi := fi.(hugofs.FileMetaInfo) - return mfi - } - - createFs := func(fs afero.Fs, lang string) afero.Fs { - return hugofs.NewBaseFileDecorator(fs, - func(fi hugofs.FileMetaInfo) { - meta := fi.Meta() - // We have a more elaborate filesystem setup in the - // real flow, so simulate this here. - meta.Lang = lang - meta.Path = meta.Filename - meta.TranslationBaseName = paths.Filename(fi.Name()) - meta.Classifier = files.ClassifyContentFile(fi.Name(), meta.OpenFunc) - }) - } - - c.Run("AddFiles", func(c *qt.C) { - memfs := afero.NewMemMapFs() - - fsl := func(lang string) afero.Fs { - return createFs(memfs, lang) - } - - fs := fsl("en") - - header := writeFile(c, fs, "blog/a/index.md", "page") - - c.Assert(header.Meta().Lang, qt.Equals, "en") - - resources := []hugofs.FileMetaInfo{ - writeFile(c, fs, "blog/a/b/data.json", "data"), - writeFile(c, fs, "blog/a/logo.png", "image"), - } - - m := newContentMap(contentMapConfig{lang: "en"}) - - c.Assert(m.AddFilesBundle(header, resources...), qt.IsNil) - - c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/b/c/index.md", "page")), qt.IsNil) - - c.Assert(m.AddFilesBundle( - writeFile(c, fs, "blog/_index.md", "section page"), - writeFile(c, fs, "blog/sectiondata.json", "section resource"), - ), qt.IsNil) - - got := m.testDump() - - expect := ` - Tree 0: - /blog/__hb_a__hl_ - /blog/__hb_b/c__hl_ - Tree 1: - /blog/ - Tree 2: - /blog/__hb_a__hl_b/data.json - /blog/__hb_a__hl_logo.png - /blog/__hl_sectiondata.json - en/pages/blog/__hb_a__hl_|f:blog/a/index.md - - R: blog/a/b/data.json - - R: blog/a/logo.png - en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md - en/sections/blog/|f:blog/_index.md - - P: blog/a/index.md - - P: blog/b/c/index.md - - R: blog/sectiondata.json - -` - - c.Assert(got, hqt.IsSameString, expect, qt.Commentf(got)) - - // Add a data file to the section bundle - c.Assert(m.AddFiles( - writeFile(c, fs, "blog/sectiondata2.json", "section resource"), - ), qt.IsNil) - - // And then one to the leaf bundles - c.Assert(m.AddFiles( - writeFile(c, fs, "blog/a/b/data2.json", "data2"), - ), qt.IsNil) - - c.Assert(m.AddFiles( - writeFile(c, fs, "blog/b/c/d/data3.json", "data3"), - ), qt.IsNil) - - got = m.testDump() - - expect = ` - Tree 0: - /blog/__hb_a__hl_ - /blog/__hb_b/c__hl_ - Tree 1: - /blog/ - Tree 2: - /blog/__hb_a__hl_b/data.json - /blog/__hb_a__hl_b/data2.json - /blog/__hb_a__hl_logo.png - /blog/__hb_b/c__hl_d/data3.json - /blog/__hl_sectiondata.json - /blog/__hl_sectiondata2.json - en/pages/blog/__hb_a__hl_|f:blog/a/index.md - - R: blog/a/b/data.json - - R: blog/a/b/data2.json - - R: blog/a/logo.png - en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md - - R: blog/b/c/d/data3.json - en/sections/blog/|f:blog/_index.md - - P: blog/a/index.md - - P: blog/b/c/index.md - - R: blog/sectiondata.json - - R: blog/sectiondata2.json - -` - - c.Assert(got, hqt.IsSameString, expect, qt.Commentf(got)) - - // Add a regular page (i.e. not a bundle) - c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/b.md", "page")), qt.IsNil) - - c.Assert(m.testDump(), hqt.IsSameString, ` - Tree 0: - /blog/__hb_a__hl_ - /blog/__hb_b/c__hl_ - /blog/__hb_b__hl_ - Tree 1: - /blog/ - Tree 2: - /blog/__hb_a__hl_b/data.json - /blog/__hb_a__hl_b/data2.json - /blog/__hb_a__hl_logo.png - /blog/__hb_b/c__hl_d/data3.json - /blog/__hl_sectiondata.json - /blog/__hl_sectiondata2.json - en/pages/blog/__hb_a__hl_|f:blog/a/index.md - - R: blog/a/b/data.json - - R: blog/a/b/data2.json - - R: blog/a/logo.png - en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md - - R: blog/b/c/d/data3.json - en/pages/blog/__hb_b__hl_|f:blog/b.md - en/sections/blog/|f:blog/_index.md - - P: blog/a/index.md - - P: blog/b/c/index.md - - P: blog/b.md - - R: blog/sectiondata.json - - R: blog/sectiondata2.json - - - `, qt.Commentf(m.testDump())) - }) - - c.Run("CreateMissingNodes", func(c *qt.C) { - memfs := afero.NewMemMapFs() - - fsl := func(lang string) afero.Fs { - return createFs(memfs, lang) - } - - fs := fsl("en") - - m := newContentMap(contentMapConfig{lang: "en"}) - - c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/page.md", "page")), qt.IsNil) - c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/a/index.md", "page")), qt.IsNil) - c.Assert(m.AddFilesBundle(writeFile(c, fs, "bundle/index.md", "page")), qt.IsNil) - - c.Assert(m.CreateMissingNodes(), qt.IsNil) - - got := m.testDump() - - c.Assert(got, hqt.IsSameString, ` - - Tree 0: - /__hb_bundle__hl_ - /blog/__hb_a__hl_ - /blog/__hb_page__hl_ - Tree 1: - / - /blog/ - Tree 2: - en/pages/__hb_bundle__hl_|f:bundle/index.md - en/pages/blog/__hb_a__hl_|f:blog/a/index.md - en/pages/blog/__hb_page__hl_|f:blog/page.md - en/sections/ - - P: bundle/index.md - en/sections/blog/ - - P: blog/a/index.md - - P: blog/page.md - - `, qt.Commentf(got)) - }) - - c.Run("cleanKey", func(c *qt.C) { - for _, test := range []struct { - in string - expected string - }{ - {"/a/b/", "/a/b"}, - {filepath.FromSlash("/a/b/"), "/a/b"}, - {"/a//b/", "/a/b"}, - } { - c.Assert(cleanTreeKey(test.in), qt.Equals, test.expected) - } - }) -} - func TestContentMapSite(t *testing.T) { b := newTestSitesBuilder(t) @@ -313,13 +29,17 @@ func TestContentMapSite(t *testing.T) { title: "Page %d" date: "2019-06-0%d" lastMod: "2019-06-0%d" -categories: ["funny"] +categories: [%q] --- Page content. ` createPage := func(i int) string { - return fmt.Sprintf(pageTempl, i, i, i+1) + return fmt.Sprintf(pageTempl, i, i, i+1, "funny") + } + + createPageInCategory := func(i int, category string) string { + return fmt.Sprintf(pageTempl, i, i, i+1, category) } draftTemplate := `--- @@ -358,12 +78,12 @@ Home Content. b.WithContent("blog/draftsection/sub/_index.md", createPage(12)) b.WithContent("blog/draftsection/sub/page.md", createPage(13)) b.WithContent("docs/page6.md", createPage(11)) - b.WithContent("tags/_index.md", createPage(32)) - b.WithContent("overlap/_index.md", createPage(33)) + b.WithContent("tags/_index.md", createPageInCategory(32, "sad")) + b.WithContent("overlap/_index.md", createPageInCategory(33, "sad")) b.WithContent("overlap2/_index.md", createPage(34)) b.WithTemplatesAdded("layouts/index.html", ` -Num Regular: {{ len .Site.RegularPages }} +Num Regular: {{ len .Site.RegularPages }}|{{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ Main Sections: {{ .Site.Params.mainSections }} Pag Num Pages: {{ len .Paginator.Pages }} {{ $home := .Site.Home }} @@ -394,13 +114,13 @@ InSection: true: {{ $page.InSection $blog }} false: {{ $page.InSection $blogSub Next: {{ $page2.Next.RelPermalink }} NextInSection: {{ $page2.NextInSection.RelPermalink }} Pages: {{ range $blog.Pages }}{{ .RelPermalink }}|{{ end }} -Sections: {{ range $home.Sections }}{{ .RelPermalink }}|{{ end }} -Categories: {{ range .Site.Taxonomies.categories }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }} -Category Terms: {{ $categories.Kind}}: {{ range $categories.Data.Terms.Alphabetical }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }} -Category Funny: {{ $funny.Kind}}; {{ $funny.Data.Term }}: {{ range $funny.Pages }}{{ .RelPermalink }};|{{ end }} +Sections: {{ range $home.Sections }}{{ .RelPermalink }}|{{ end }}:END +Categories: {{ range .Site.Taxonomies.categories }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }}:END +Category Terms: {{ $categories.Kind}}: {{ range $categories.Data.Terms.Alphabetical }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }}:END +Category Funny: {{ $funny.Kind}}; {{ $funny.Data.Term }}: {{ range $funny.Pages }}{{ .RelPermalink }};|{{ end }}:END Pag Num Pages: {{ len .Paginator.Pages }} Pag Blog Num Pages: {{ len $blog.Paginator.Pages }} -Blog Num RegularPages: {{ len $blog.RegularPages }} +Blog Num RegularPages: {{ len $blog.RegularPages }}|{{ range $blog.RegularPages }}P: {{ .RelPermalink }}|{{ end }} Blog Num Pages: {{ len $blog.Pages }} Draft1: {{ if (.Site.GetPage "blog/subsection/draft") }}FOUND{{ end }}| @@ -409,7 +129,7 @@ Draft3: {{ if (.Site.GetPage "blog/draftsection/page") }}FOUND{{ end }}| Draft4: {{ if (.Site.GetPage "blog/draftsection/sub") }}FOUND{{ end }}| Draft5: {{ if (.Site.GetPage "blog/draftsection/sub/page") }}FOUND{{ end }}| -{{ define "print-page" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Date.Format "2006-01-02" }}|Current Section: {{ .CurrentSection.SectionsPath }}|Resources: {{ range .Resources }}{{ .ResourceType }}: {{ .RelPermalink }}|{{ end }}{{ end }} +{{ define "print-page" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Date.Format "2006-01-02" }}|Current Section: {{ with .CurrentSection }}{{ .Path }}{{ else }}NIL{{ end }}|Resources: {{ range .Resources }}{{ .ResourceType }}: {{ .RelPermalink }}|{{ end }}{{ end }} `) b.Build(BuildCfg{}) @@ -417,15 +137,15 @@ Draft5: {{ if (.Site.GetPage "blog/draftsection/sub/page") }}FOUND{{ end }}| b.AssertFileContent("public/index.html", ` - Num Regular: 7 + Num Regular: 9 Main Sections: [blog] - Pag Num Pages: 7 + Pag Num Pages: 9 - Home: Hugo Home|/|2019-06-08|Current Section: |Resources: - Blog Section: Blogs|/blog/|2019-06-08|Current Section: blog|Resources: - Blog Sub Section: Page 3|/blog/subsection/|2019-06-03|Current Section: blog/subsection|Resources: application: /blog/subsection/subdata.json| - Page: Page 1|/blog/page1/|2019-06-01|Current Section: blog|Resources: - Bundle: Page 12|/blog/bundle/|0001-01-01|Current Section: blog|Resources: application: /blog/bundle/data.json|page: | + Home: Hugo Home|/|2019-06-08|Current Section: /|Resources: + Blog Section: Blogs|/blog/|2019-06-08|Current Section: /blog|Resources: + Blog Sub Section: Page 3|/blog/subsection/|2019-06-03|Current Section: /blog/subsection|Resources: application: /blog/subsection/subdata.json| + Page: Page 1|/blog/page1/|2019-06-01|Current Section: /blog|Resources: + Bundle: Page 12|/blog/bundle/|0001-01-01|Current Section: /blog|Resources: application: /blog/bundle/data.json|page: | IsDescendant: true: true true: true true: true true: true true: true false: false false: false IsAncestor: true: true true: true true: true true: true true: true true: true false: false false: false false: false false: false IsDescendant overlap1: false: false @@ -437,20 +157,126 @@ Draft5: {{ if (.Site.GetPage "blog/draftsection/sub/page") }}FOUND{{ end }}| Next: /blog/page3/ NextInSection: /blog/page3/ Pages: /blog/page3/|/blog/subsection/|/blog/page2/|/blog/page1/|/blog/bundle/| - Sections: /blog/|/docs/| - Categories: /categories/funny/; funny; 11| - Category Terms: taxonomy: /categories/funny/; funny; 11| - Category Funny: term; funny: /blog/subsection/page4/;|/blog/page3/;|/blog/subsection/;|/blog/page2/;|/blog/page1/;|/blog/subsection/page5/;|/docs/page6/;|/blog/bundle/;|;| - Pag Num Pages: 7 + Sections: /blog/|/docs/|/overlap/|/overlap2/|:END + Categories: /categories/funny/; Funny; 12|/categories/sad/; Sad; 2|:END + Category Terms: taxonomy: /categories/funny/; Funny; 12|/categories/sad/; Sad; 2|:END + Category Funny: term; Funny: /blog/subsection/page4/;|/blog/page3/;|/blog/subsection/;|/blog/page2/;|/blog/page1/;|/blog/subsection/page5/;|/docs/page6/;|/blog/bundle/;|/blog/draftsection/page/;|/blog/draftsection/sub/;|/blog/draftsection/sub/page/;|/overlap2/;|:END + Pag Num Pages: 9 Pag Blog Num Pages: 4 Blog Num RegularPages: 4 Blog Num Pages: 5 Draft1: | - Draft2: | - Draft3: | - Draft4: | - Draft5: | + Draft2: FOUND| + Draft3: FOUND| + Draft4: FOUND| + Draft5: FOUND| `) } + +func TestIntegrationTestTemplate(t *testing.T) { + t.Parallel() + c := qt.New(t) + + files := ` +-- config.toml -- +title = "Integration Test" +disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT", "RSS"] +-- layouts/index.html -- +Home: {{ .Title }}| + + ` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: c, + TxtarString: files, + }).Build() + + b.AssertFileContent("public/index.html", "Home: Integration Test|") +} + +// Issue #11840 +func TestBundleResourceLanguageBestMatch(t *testing.T) { + files := ` +-- hugo.toml -- +defaultContentLanguage = "fr" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +[languages.de] +weight = 3 +-- layouts/index.html -- +{{ $bundle := site.GetPage "bundle" }} +{{ $r := $bundle.Resources.GetMatch "*.txt" }} +{{ .Language.Lang }}: {{ $r.RelPermalink }}|{{ $r.Content }} +-- content/bundle/index.fr.md -- +--- +title: "Bundle Fr" +--- +-- content/bundle/index.en.md -- +--- +title: "Bundle En" +--- +-- content/bundle/index.de.md -- +--- +title: "Bundle De" +--- +-- content/bundle/data.fr.txt -- +Data fr +-- content/bundle/data.en.txt -- +Data en + +` + b := Test(t, files) + + b.AssertFileContent("public/fr/index.html", "fr: /fr/bundle/data.fr.txt|Data fr") + b.AssertFileContent("public/en/index.html", "en: /en/bundle/data.en.txt|Data en") + b.AssertFileContent("public/de/index.html", "de: /fr/bundle/data.fr.txt|Data fr") +} + +func TestBundleMultipleContentPageWithSamePath(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/bundle/index.md -- +--- +title: "Bundle md" +foo: md +--- +-- content/bundle/index.html -- +--- +title: "Bundle html" +foo: html +--- +-- content/bundle/data.txt -- +Data. +-- content/p1.md -- +--- +title: "P1 md" +foo: md +--- +-- content/p1.html -- +--- +title: "P1 html" +foo: html +--- +-- layouts/index.html -- +{{ $bundle := site.GetPage "bundle" }} +Bundle: {{ $bundle.Title }}|{{ $bundle.Params.foo }}|{{ $bundle.File.Filename }}| +{{ $p1 := site.GetPage "p1" }} +P1: {{ $p1.Title }}|{{ $p1.Params.foo }}|{{ $p1.File.Filename }}| +` + + b := Test(t, files) + + // There's multiple content files sharing the same logical path and language. + // This is a little arbitrary, but we have to pick one and prefer the Markdown version. + b.AssertFileContent("public/index.html", + filepath.FromSlash("Bundle: Bundle md|md|/content/bundle/index.md|"), + filepath.FromSlash("P1: P1 md|md|/content/p1.md|"), + ) +} diff --git a/hugolib/content_render_hooks_test.go b/hugolib/content_render_hooks_test.go index 5b2121ef8..22a33c35a 100644 --- a/hugolib/content_render_hooks_test.go +++ b/hugolib/content_render_hooks_test.go @@ -14,361 +14,61 @@ package hugolib import ( - "fmt" "testing" - - qt "github.com/frankban/quicktest" ) -func TestRenderHookEditNestedPartial(t *testing.T) { - config := ` -baseURL="https://example.org" -workingDir="/mywork" -` - b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running() - - b.WithTemplates("_default/single.html", "{{ .Content }}") - b.WithTemplates("partials/mypartial1.html", `PARTIAL1 {{ partial "mypartial2.html" }}`) - b.WithTemplates("partials/mypartial2.html", `PARTIAL2`) - b.WithTemplates("_default/_markup/render-link.html", `Link {{ .Text | safeHTML }}|{{ partial "mypartial1.html" . }}END`) - - b.WithContent("p1.md", `--- -title: P1 ---- - -[First Link](https://www.google.com "Google's Homepage") - -`) - b.Build(BuildCfg{}) - - b.AssertFileContent("public/p1/index.html", `Link First Link|PARTIAL1 PARTIAL2END`) - - b.EditFiles("layouts/partials/mypartial1.html", `PARTIAL1_EDITED {{ partial "mypartial2.html" }}`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/p1/index.html", `Link First Link|PARTIAL1_EDITED PARTIAL2END`) - - b.EditFiles("layouts/partials/mypartial2.html", `PARTIAL2_EDITED`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/p1/index.html", `Link First Link|PARTIAL1_EDITED PARTIAL2_EDITEDEND`) -} - -func TestRenderHooks(t *testing.T) { - config := ` -baseURL="https://example.org" -workingDir="/mywork" - -[markup] -[markup.goldmark] -[markup.goldmark.parser] -autoHeadingID = true -autoHeadingIDType = "github" -[markup.goldmark.parser.attribute] -block = true -title = true - -` - b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running() - b.WithTemplatesAdded("_default/single.html", `{{ .Content }}`) - b.WithTemplatesAdded("shortcodes/myshortcode1.html", `{{ partial "mypartial1" }}`) - b.WithTemplatesAdded("shortcodes/myshortcode2.html", `{{ partial "mypartial2" }}`) - b.WithTemplatesAdded("shortcodes/myshortcode3.html", `SHORT3|`) - b.WithTemplatesAdded("shortcodes/myshortcode4.html", ` -<div class="foo"> -{{ .Inner | markdownify }} -</div> -`) - b.WithTemplatesAdded("shortcodes/myshortcode5.html", ` -Inner Inline: {{ .Inner | .Page.RenderString }} -Inner Block: {{ .Inner | .Page.RenderString (dict "display" "block" ) }} -`) - - b.WithTemplatesAdded("shortcodes/myshortcode6.html", `.Render: {{ .Page.Render "myrender" }}`) - b.WithTemplatesAdded("partials/mypartial1.html", `PARTIAL1`) - b.WithTemplatesAdded("partials/mypartial2.html", `PARTIAL2 {{ partial "mypartial3.html" }}`) - b.WithTemplatesAdded("partials/mypartial3.html", `PARTIAL3`) - b.WithTemplatesAdded("partials/mypartial4.html", `PARTIAL4`) - b.WithTemplatesAdded("customview/myrender.html", `myrender: {{ .Title }}|P4: {{ partial "mypartial4" }}`) - b.WithTemplatesAdded("_default/_markup/render-link.html", `{{ with .Page }}{{ .Title }}{{ end }}|{{ .Destination | safeURL }}|Title: {{ .Title | safeHTML }}|Text: {{ .Text | safeHTML }}|END`) - b.WithTemplatesAdded("docs/_markup/render-link.html", `Link docs section: {{ .Text | safeHTML }}|END`) - b.WithTemplatesAdded("_default/_markup/render-image.html", `IMAGE: {{ .Page.Title }}||{{ .Destination | safeURL }}|Title: {{ .Title | safeHTML }}|Text: {{ .Text | safeHTML }}|END`) - b.WithTemplatesAdded("_default/_markup/render-heading.html", `HEADING: {{ .Page.Title }}||Level: {{ .Level }}|Anchor: {{ .Anchor | safeURL }}|Text: {{ .Text | safeHTML }}|Attributes: {{ .Attributes }}|END`) - b.WithTemplatesAdded("docs/_markup/render-heading.html", `Docs Level: {{ .Level }}|END`) - - b.WithContent("customview/p1.md", `--- -title: Custom View ---- - -{{< myshortcode6 >}} - - `, "blog/p1.md", `--- -title: Cool Page ---- - -[First Link](https://www.google.com "Google's Homepage") -<https://foo.bar/> -https://bar.baz/ -<mailto:[email protected]> - -{{< myshortcode3 >}} - -[Second Link](https://www.google.com "Google's Homepage") - -Image: - -![Drag Racing](/images/Dragster.jpg "image title") - -Attributes: - -## Some Heading {.text-serif #a-heading title="Hovered"} - - -`, "blog/p2.md", `--- -title: Cool Page2 -layout: mylayout ---- - -{{< myshortcode1 >}} - -[Some Text](https://www.google.com "Google's Homepage") - -,[No Whitespace Please](https://gohugo.io), - - - -`, "blog/p3.md", `--- -title: Cool Page3 ---- - -{{< myshortcode2 >}} - - -`, "docs/docs1.md", `--- -title: Docs 1 ---- - - -[Docs 1](https://www.google.com "Google's Homepage") - - -`, "blog/p4.md", `--- -title: Cool Page With Image ---- - -Image: - -![Drag Racing](/images/Dragster.jpg "image title") - - -`, "blog/p5.md", `--- -title: Cool Page With Markdownify ---- - -{{< myshortcode4 >}} -Inner Link: [Inner Link](https://www.google.com "Google's Homepage") -{{< /myshortcode4 >}} - -`, "blog/p6.md", `--- -title: With RenderString ---- - -{{< myshortcode5 >}}Inner Link: [Inner Link](https://www.gohugo.io "Hugo's Homepage"){{< /myshortcode5 >}} - -`, "blog/p7.md", `--- -title: With Headings ---- - -# Heading Level 1 -some text - -## Heading Level 2 - -### Heading Level 3 -`, - "docs/p8.md", `--- -title: Doc With Heading ---- - -# Docs lvl 1 - -`, - ) - - for i := 1; i <= 30; i++ { - // Add some content with no shortcodes or links, i.e no templates needed. - b.WithContent(fmt.Sprintf("blog/notempl%d.md", i), `--- -title: No Template ---- - -## Content -`) - } - counters := &testCounters{} - b.Build(BuildCfg{testCounters: counters}) - b.Assert(int(counters.contentRenderCounter), qt.Equals, 45) - - b.AssertFileContent("public/blog/p1/index.html", ` -Cool Page|https://www.google.com|Title: Google's Homepage|Text: First Link|END -Cool Page|https://foo.bar/|Title: |Text: https://foo.bar/|END -Cool Page|https://bar.baz/|Title: |Text: https://bar.baz/|END -Cool Page|mailto:[email protected]|Title: |Text: [email protected]|END -Cool Page|mailto:[email protected]|Title: |Text: mailto:[email protected]|END -Text: Second -SHORT3| -<p>IMAGE: Cool Page||/images/Dragster.jpg|Title: image title|Text: Drag Racing|END</p> -`) - - b.AssertFileContent("public/customview/p1/index.html", `.Render: myrender: Custom View|P4: PARTIAL4`) - b.AssertFileContent("public/blog/p2/index.html", - `PARTIAL -,Cool Page2|https://gohugo.io|Title: |Text: No Whitespace Please|END,`, - ) - b.AssertFileContent("public/blog/p3/index.html", `PARTIAL3`) - // We may add type template support later, keep this for then. b.AssertFileContent("public/docs/docs1/index.html", `Link docs section: Docs 1|END`) - b.AssertFileContent("public/blog/p4/index.html", `<p>IMAGE: Cool Page With Image||/images/Dragster.jpg|Title: image title|Text: Drag Racing|END</p>`) - // markdownify - b.AssertFileContent("public/blog/p5/index.html", "Inner Link: |https://www.google.com|Title: Google's Homepage|Text: Inner Link|END") - - b.AssertFileContent("public/blog/p6/index.html", - "Inner Inline: Inner Link: With RenderString|https://www.gohugo.io|Title: Hugo's Homepage|Text: Inner Link|END", - "Inner Block: <p>Inner Link: With RenderString|https://www.gohugo.io|Title: Hugo's Homepage|Text: Inner Link|END</p>", - ) - - b.EditFiles( - "layouts/_default/_markup/render-link.html", `EDITED: {{ .Destination | safeURL }}|`, - "layouts/_default/_markup/render-image.html", `IMAGE EDITED: {{ .Destination | safeURL }}|`, - "layouts/docs/_markup/render-link.html", `DOCS EDITED: {{ .Destination | safeURL }}|`, - "layouts/partials/mypartial1.html", `PARTIAL1_EDITED`, - "layouts/partials/mypartial3.html", `PARTIAL3_EDITED`, - "layouts/partials/mypartial4.html", `PARTIAL4_EDITED`, - "layouts/shortcodes/myshortcode3.html", `SHORT3_EDITED|`, - ) - - counters = &testCounters{} - b.Build(BuildCfg{testCounters: counters}) - // Make sure that only content using the changed templates are re-rendered. - b.Assert(int(counters.contentRenderCounter), qt.Equals, 7) - - b.AssertFileContent("public/customview/p1/index.html", `.Render: myrender: Custom View|P4: PARTIAL4_EDITED`) - b.AssertFileContent("public/blog/p1/index.html", `<p>EDITED: https://www.google.com|</p>`, "SHORT3_EDITED|") - b.AssertFileContent("public/blog/p2/index.html", `PARTIAL1_EDITED`) - b.AssertFileContent("public/blog/p3/index.html", `PARTIAL3_EDITED`) - // We may add type template support later, keep this for then. b.AssertFileContent("public/docs/docs1/index.html", `DOCS EDITED: https://www.google.com|</p>`) - b.AssertFileContent("public/blog/p4/index.html", `IMAGE EDITED: /images/Dragster.jpg|`) - b.AssertFileContent("public/blog/p6/index.html", "<p>Inner Link: EDITED: https://www.gohugo.io|</p>") - b.AssertFileContent("public/blog/p7/index.html", "HEADING: With Headings||Level: 1|Anchor: heading-level-1|Text: Heading Level 1|Attributes: map[id:heading-level-1]|END<p>some text</p>\nHEADING: With Headings||Level: 2|Anchor: heading-level-2|Text: Heading Level 2|Attributes: map[id:heading-level-2]|ENDHEADING: With Headings||Level: 3|Anchor: heading-level-3|Text: Heading Level 3|Attributes: map[id:heading-level-3]|END") - - // https://github.com/gohugoio/hugo/issues/7349 - b.AssertFileContent("public/docs/p8/index.html", "Docs Level: 1") -} - -func TestRenderHooksDeleteTemplate(t *testing.T) { - config := ` -baseURL="https://example.org" -workingDir="/mywork" -` - b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running() - b.WithTemplatesAdded("_default/single.html", `{{ .Content }}`) - b.WithTemplatesAdded("_default/_markup/render-link.html", `html-render-link`) - - b.WithContent("p1.md", `--- -title: P1 ---- -[First Link](https://www.google.com "Google's Homepage") - -`) - b.Build(BuildCfg{}) - - b.AssertFileContent("public/p1/index.html", `<p>html-render-link</p>`) - - b.RemoveFiles( - "layouts/_default/_markup/render-link.html", - ) - - b.Build(BuildCfg{}) - b.AssertFileContent("public/p1/index.html", `<p><a href="https://www.google.com" title="Google's Homepage">First Link</a></p>`) -} - -func TestRenderHookAddTemplate(t *testing.T) { - config := ` -baseURL="https://example.org" -workingDir="/mywork" -` - b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running() - b.WithTemplatesAdded("_default/single.html", `{{ .Content }}`) - - b.WithContent("p1.md", `--- -title: P1 ---- -[First Link](https://www.google.com "Google's Homepage") - -`) - b.Build(BuildCfg{}) - - b.AssertFileContent("public/p1/index.html", `<p><a href="https://www.google.com" title="Google's Homepage">First Link</a></p>`) - - b.EditFiles("layouts/_default/_markup/render-link.html", `html-render-link`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/p1/index.html", `<p>html-render-link</p>`) -} - func TestRenderHooksRSS(t *testing.T) { - b := newTestSitesBuilder(t) - - b.WithTemplates("index.html", ` + files := ` +-- hugo.toml -- +baseURL = "https://example.org" +disableKinds = ["taxonomy", "term"] +-- layouts/index.html -- {{ $p := site.GetPage "p1.md" }} {{ $p2 := site.GetPage "p2.md" }} - P1: {{ $p.Content }} P2: {{ $p2.Content }} - - `, "index.xml", ` - +-- layouts/index.xml -- {{ $p2 := site.GetPage "p2.md" }} {{ $p3 := site.GetPage "p3.md" }} - P2: {{ $p2.Content }} P3: {{ $p3.Content }} - - - `, - "_default/_markup/render-link.html", `html-link: {{ .Destination | safeURL }}|`, - "_default/_markup/render-link.rss.xml", `xml-link: {{ .Destination | safeURL }}|`, - "_default/_markup/render-heading.html", `html-heading: {{ .Text }}|`, - "_default/_markup/render-heading.rss.xml", `xml-heading: {{ .Text }}|`, - ) - - b.WithContent("p1.md", `--- +-- layouts/_default/_markup/render-link.html -- +html-link: {{ .Destination | safeURL }}| +-- layouts/_default/_markup/render-link.rss.xml -- +xml-link: {{ .Destination | safeURL }}| +-- layouts/_default/_markup/render-heading.html -- +html-heading: {{ .Text }}| +-- layouts/_default/_markup/render-heading.rss.xml -- +xml-heading: {{ .Text }}| +-- content/p1.md -- +--- title: "p1" --- P1. [I'm an inline-style link](https://www.gohugo.io) # Heading in p1 -`, "p2.md", `--- +-- content/p2.md -- +--- title: "p2" --- -P1. [I'm an inline-style link](https://www.bep.is) +P2. [I'm an inline-style link](https://www.bep.is) # Heading in p2 -`, - "p3.md", `--- -title: "p2" +-- content/p3.md -- +--- +title: "p3" outputs: ["rss"] --- P3. [I'm an inline-style link](https://www.example.org) - -`, - ) - - b.Build(BuildCfg{}) +` + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() b.AssertFileContent("public/index.html", ` P1: <p>P1. html-link: https://www.gohugo.io|</p> @@ -376,7 +76,7 @@ html-heading: Heading in p1| html-heading: Heading in p2| `) b.AssertFileContent("public/index.xml", ` -P2: <p>P1. xml-link: https://www.bep.is|</p> +P2: <p>P2. xml-link: https://www.bep.is|</p> P3: <p>P3. xml-link: https://www.example.org|</p> xml-heading: Heading in p2| `) @@ -453,8 +153,12 @@ title: "p2" -- layouts/_default/_markup/render-heading.html -- Heading: {{ .Text }}| -Self Fragments: {{ .Page.Fragments.Identifiers }}| -P1 Fragments: {{ (site.GetPage "p1.md").Fragments.Identifiers }}| +{{ with .Page }} +Self Fragments: {{ .Fragments.Identifiers }}| +{{ end }} +{{ with (site.GetPage "p1.md") }} +P1 Fragments: {{ .Fragments.Identifiers }}| +{{ end }} -- layouts/_default/single.html -- {{ .Content}} ` @@ -474,5 +178,4 @@ P1 Fragments: [b c z] Self Fragments: [d e f] P1 Fragments: [b c z] `) - } diff --git a/hugolib/dates_test.go b/hugolib/dates_test.go index f6d5d2490..376fe3c60 100644 --- a/hugolib/dates_test.go +++ b/hugolib/dates_test.go @@ -15,11 +15,10 @@ package hugolib import ( "fmt" - - qt "github.com/frankban/quicktest" - "strings" "testing" + + qt "github.com/frankban/quicktest" ) func TestDateFormatMultilingual(t *testing.T) { @@ -57,7 +56,6 @@ Date: {{ .Date | time.Format ":date_long" }} b.AssertFileContent("public/en/index.html", `Date: July 18, 2021`) b.AssertFileContent("public/nn/index.html", `Date: 18. juli 2021`) - } func TestTimeZones(t *testing.T) { @@ -187,7 +185,6 @@ ExpiryDate: 2099-07-13 15:28:01 +0000 UTC` b.AssertFileContent("public/nn/short-date-toml-unqouted/index.html", expectShortDateNn) b.AssertFileContent("public/en/short-date-toml-qouted/index.html", expectShortDateEn) b.AssertFileContent("public/nn/short-date-toml-qouted/index.html", expectShortDateNn) - } // Issue 8832 @@ -212,7 +209,6 @@ func TestTimeOnError(t *testing.T) { b.WithContent("p1.md", "") b.Assert(b.BuildE(BuildCfg{}), qt.Not(qt.IsNil)) - } func TestTOMLDates(t *testing.T) { @@ -254,6 +250,8 @@ Past talks: {{ len $pastTalks }} Home's Date should be greater than past: {{ gt $homeDate (index $pastTalks 0).date }} Home's Date should be less than future: {{ lt $homeDate (index $futureTalks 0).date }} Home's Date should be equal mydata date: {{ eq $homeDate site.Data.mydata.date }} +Home date: {{ $homeDate }} +mydata.date: {{ site.Data.mydata.date }} Full time: {{ $p1Date | time.Format ":time_full" }} ` diff --git a/hugolib/disableKinds_test.go b/hugolib/disableKinds_test.go index c24f6b10e..006520580 100644 --- a/hugolib/disableKinds_test.go +++ b/hugolib/disableKinds_test.go @@ -104,7 +104,7 @@ title: Headless Local Lists Sub getPage := func(b *sitesBuilder, ref string) page.Page { b.Helper() - p, err := b.H.Sites[0].getPageNew(nil, ref) + p, err := b.H.Sites[0].getPage(nil, ref) b.Assert(err, qt.IsNil) return p } @@ -113,7 +113,7 @@ title: Headless Local Lists Sub b.Helper() for _, pages := range []page.Pages{b.H.Sites[0].Pages(), b.H.Sites[0].RegularPages()} { for _, p := range pages { - if ref == p.(*pageState).sourceRef() { + if ref == p.Path() { return p } } @@ -127,7 +127,7 @@ title: Headless Local Lists Sub } for _, pages := range pageCollections { for _, p := range pages { - if ref == p.(*pageState).sourceRef() { + if ref == p.Path() { return p } } @@ -206,7 +206,7 @@ title: Headless Local Lists Sub page := getPage(b, "/sect/page.md") b.Assert(page, qt.Not(qt.IsNil)) b.Assert(page.CurrentSection(), qt.Equals, sect) - b.Assert(getPageInPagePages(sect, "/sect/page.md"), qt.Not(qt.IsNil)) + b.Assert(getPageInPagePages(sect, "/sect/page"), qt.Not(qt.IsNil)) b.AssertFileContent("public/sitemap.xml", "sitemap") b.AssertFileContent("public/index.xml", "rss") }) @@ -227,7 +227,7 @@ title: Headless Local Lists Sub b.Assert(b.CheckExists("public/sitemap.xml"), qt.Equals, false) }) - disableKind = kinds.Kind404 + disableKind = kinds.KindStatus404 c.Run("Disable "+disableKind, func(c *qt.C) { b := newSitesBuilder(c, disableKind) b.Build(BuildCfg{}) @@ -277,10 +277,10 @@ title: Headless Local Lists Sub b.Assert(sect, qt.Not(qt.IsNil)) b.Assert(getPageInSitePages(b, ref), qt.IsNil) - b.Assert(getPageInSitePages(b, "/headless-local/_index.md"), qt.IsNil) - b.Assert(getPageInSitePages(b, "/headless-local/headless-local-page.md"), qt.IsNil) + b.Assert(getPageInSitePages(b, "/headless-local"), qt.IsNil) + b.Assert(getPageInSitePages(b, "/headless-local/headless-local-page"), qt.IsNil) - localPageRef := ref + "/headless-local-page.md" + localPageRef := ref + "/headless-local-page" b.Assert(getPageInPagePages(sect, localPageRef, sect.RegularPages()), qt.Not(qt.IsNil)) b.Assert(getPageInPagePages(sect, localPageRef, sect.RegularPagesRecursive()), qt.Not(qt.IsNil)) @@ -291,14 +291,14 @@ title: Headless Local Lists Sub sect = getPage(b, ref) b.Assert(sect, qt.Not(qt.IsNil)) - localPageRef = ref + "/headless-local-sub-page.md" + localPageRef = ref + "/headless-local-sub-page" b.Assert(getPageInPagePages(sect, localPageRef), qt.Not(qt.IsNil)) }) c.Run("Build config, no render", func(c *qt.C) { b := newSitesBuilder(c, disableKind) b.Build(BuildCfg{}) - ref := "/sect/no-render.md" + ref := "/sect/no-render" b.Assert(b.CheckExists("public/sect/no-render/index.html"), qt.Equals, false) p := getPage(b, ref) b.Assert(p, qt.Not(qt.IsNil)) @@ -312,7 +312,7 @@ title: Headless Local Lists Sub c.Run("Build config, no render link", func(c *qt.C) { b := newSitesBuilder(c, disableKind) b.Build(BuildCfg{}) - ref := "/sect/no-render-link.md" + ref := "/sect/no-render-link" b.Assert(b.CheckExists("public/sect/no-render/index.html"), qt.Equals, false) p := getPage(b, ref) b.Assert(p, qt.Not(qt.IsNil)) diff --git a/hugolib/doctree/dimensions.go b/hugolib/doctree/dimensions.go new file mode 100644 index 000000000..bcc3cae00 --- /dev/null +++ b/hugolib/doctree/dimensions.go @@ -0,0 +1,43 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree + +const ( + // Language is currently the only dimension in the Hugo build matrix. + DimensionLanguage DimensionFlag = 1 << iota +) + +// Dimension is a row in the Hugo build matrix which currently has one value: language. +type Dimension [1]int + +// DimensionFlag is a flag in the Hugo build matrix. +type DimensionFlag byte + +// Has returns whether the given flag is set. +func (d DimensionFlag) Has(o DimensionFlag) bool { + return d&o == o +} + +// Set sets the given flag. +func (d DimensionFlag) Set(o DimensionFlag) DimensionFlag { + return d | o +} + +// Index returns this flag's index in the Dimensions array. +func (d DimensionFlag) Index() int { + if d == 0 { + panic("dimension flag not set") + } + return int(d - 1) +} diff --git a/hugolib/doctree/dimensions_test.go b/hugolib/doctree/dimensions_test.go new file mode 100644 index 000000000..598f22a2d --- /dev/null +++ b/hugolib/doctree/dimensions_test.go @@ -0,0 +1,37 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree + +import ( + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestDimensionFlag(t *testing.T) { + c := qt.New(t) + + var zero DimensionFlag + var d DimensionFlag + var o DimensionFlag = 1 + var p DimensionFlag = 12 + + c.Assert(d.Has(o), qt.Equals, false) + d = d.Set(o) + c.Assert(d.Has(o), qt.Equals, true) + c.Assert(d.Has(d), qt.Equals, true) + c.Assert(func() { zero.Index() }, qt.PanicMatches, "dimension flag not set") + c.Assert(DimensionLanguage.Index(), qt.Equals, 0) + c.Assert(p.Index(), qt.Equals, 11) +} diff --git a/hugolib/doctree/nodeshiftree_test.go b/hugolib/doctree/nodeshiftree_test.go new file mode 100644 index 000000000..313be0bc4 --- /dev/null +++ b/hugolib/doctree/nodeshiftree_test.go @@ -0,0 +1,374 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree_test + +import ( + "context" + "fmt" + "math/rand" + "path" + "strings" + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/para" + "github.com/gohugoio/hugo/hugolib/doctree" + "github.com/google/go-cmp/cmp" +) + +var eq = qt.CmpEquals( + cmp.Comparer(func(n1, n2 *testValue) bool { + if n1 == n2 { + return true + } + + return n1.ID == n2.ID && n1.Lang == n2.Lang + }), +) + +func TestTree(t *testing.T) { + c := qt.New(t) + + zeroZero := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{}, + }, + ) + + a := &testValue{ID: "/a"} + zeroZero.InsertIntoValuesDimension("/a", a) + ab := &testValue{ID: "/a/b"} + zeroZero.InsertIntoValuesDimension("/a/b", ab) + + c.Assert(zeroZero.Get("/a"), eq, &testValue{ID: "/a", Lang: 0}) + s, v := zeroZero.LongestPrefix("/a/b/c", true, nil) + c.Assert(v, eq, ab) + c.Assert(s, eq, "/a/b") + + // Change language. + oneZero := zeroZero.Increment(0) + c.Assert(zeroZero.Get("/a"), eq, &testValue{ID: "/a", Lang: 0}) + c.Assert(oneZero.Get("/a"), eq, &testValue{ID: "/a", Lang: 1}) +} + +func TestTreeData(t *testing.T) { + c := qt.New(t) + + tree := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{}, + }, + ) + + tree.InsertIntoValuesDimension("", &testValue{ID: "HOME"}) + tree.InsertIntoValuesDimension("/a", &testValue{ID: "/a"}) + tree.InsertIntoValuesDimension("/a/b", &testValue{ID: "/a/b"}) + tree.InsertIntoValuesDimension("/b", &testValue{ID: "/b"}) + tree.InsertIntoValuesDimension("/b/c", &testValue{ID: "/b/c"}) + tree.InsertIntoValuesDimension("/b/c/d", &testValue{ID: "/b/c/d"}) + + var values []string + + ctx := &doctree.WalkContext[*testValue]{} + + w := &doctree.NodeShiftTreeWalker[*testValue]{ + Tree: tree, + WalkContext: ctx, + Handle: func(s string, t *testValue, match doctree.DimensionFlag) (bool, error) { + ctx.Data().Insert(s, map[string]any{ + "id": t.ID, + }) + + if s != "" { + p, v := ctx.Data().LongestPrefix(path.Dir(s)) + values = append(values, fmt.Sprintf("%s:%s:%v", s, p, v)) + } + return false, nil + }, + } + + w.Walk(context.Background()) + + c.Assert(strings.Join(values, "|"), qt.Equals, "/a::map[id:HOME]|/a/b:/a:map[id:/a]|/b::map[id:HOME]|/b/c:/b:map[id:/b]|/b/c/d:/b/c:map[id:/b/c]") +} + +func TestTreeEvents(t *testing.T) { + c := qt.New(t) + + tree := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{echo: true}, + }, + ) + + tree.InsertIntoValuesDimension("/a", &testValue{ID: "/a", Weight: 2, IsBranch: true}) + tree.InsertIntoValuesDimension("/a/p1", &testValue{ID: "/a/p1", Weight: 5}) + tree.InsertIntoValuesDimension("/a/p", &testValue{ID: "/a/p2", Weight: 6}) + tree.InsertIntoValuesDimension("/a/s1", &testValue{ID: "/a/s1", Weight: 5, IsBranch: true}) + tree.InsertIntoValuesDimension("/a/s1/p1", &testValue{ID: "/a/s1/p1", Weight: 8}) + tree.InsertIntoValuesDimension("/a/s1/p1", &testValue{ID: "/a/s1/p2", Weight: 9}) + tree.InsertIntoValuesDimension("/a/s1/s2", &testValue{ID: "/a/s1/s2", Weight: 6, IsBranch: true}) + tree.InsertIntoValuesDimension("/a/s1/s2/p1", &testValue{ID: "/a/s1/s2/p1", Weight: 8}) + tree.InsertIntoValuesDimension("/a/s1/s2/p2", &testValue{ID: "/a/s1/s2/p2", Weight: 7}) + + w := &doctree.NodeShiftTreeWalker[*testValue]{ + Tree: tree, + WalkContext: &doctree.WalkContext[*testValue]{}, + } + + w.Handle = func(s string, t *testValue, match doctree.DimensionFlag) (bool, error) { + if t.IsBranch { + w.WalkContext.AddEventListener("weight", s, func(e *doctree.Event[*testValue]) { + if e.Source.Weight > t.Weight { + t.Weight = e.Source.Weight + w.WalkContext.SendEvent(&doctree.Event[*testValue]{Source: t, Path: s, Name: "weight"}) + } + // Reduces the amount of events bubbling up the tree. If the weight for this branch has + // increased, that will be announced in its own event. + e.StopPropagation() + }) + } else { + w.WalkContext.SendEvent(&doctree.Event[*testValue]{Source: t, Path: s, Name: "weight"}) + } + + return false, nil + } + + c.Assert(w.Walk(context.Background()), qt.IsNil) + c.Assert(w.WalkContext.HandleEventsAndHooks(), qt.IsNil) + + c.Assert(tree.Get("/a").Weight, eq, 9) + c.Assert(tree.Get("/a/s1").Weight, eq, 9) + c.Assert(tree.Get("/a/p").Weight, eq, 6) + c.Assert(tree.Get("/a/s1/s2").Weight, eq, 8) + c.Assert(tree.Get("/a/s1/s2/p2").Weight, eq, 7) +} + +func TestTreeInsert(t *testing.T) { + c := qt.New(t) + + tree := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{}, + }, + ) + + a := &testValue{ID: "/a"} + tree.InsertIntoValuesDimension("/a", a) + ab := &testValue{ID: "/a/b"} + tree.InsertIntoValuesDimension("/a/b", ab) + + c.Assert(tree.Get("/a"), eq, &testValue{ID: "/a", Lang: 0}) + c.Assert(tree.Get("/notfound"), qt.IsNil) + + ab2 := &testValue{ID: "/a/b", Lang: 0} + v, ok := tree.InsertIntoValuesDimension("/a/b", ab2) + c.Assert(ok, qt.IsTrue) + c.Assert(v, qt.DeepEquals, ab2) + + tree1 := tree.Increment(0) + c.Assert(tree1.Get("/a/b"), qt.DeepEquals, &testValue{ID: "/a/b", Lang: 1}) +} + +func TestTreePara(t *testing.T) { + c := qt.New(t) + + p := para.New(4) + r, _ := p.Start(context.Background()) + + tree := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{}, + }, + ) + + for i := 0; i < 8; i++ { + i := i + r.Run(func() error { + a := &testValue{ID: "/a"} + lock := tree.Lock(true) + defer lock() + tree.InsertIntoValuesDimension("/a", a) + ab := &testValue{ID: "/a/b"} + tree.InsertIntoValuesDimension("/a/b", ab) + + key := fmt.Sprintf("/a/b/c/%d", i) + val := &testValue{ID: key} + tree.InsertIntoValuesDimension(key, val) + c.Assert(tree.Get(key), eq, val) + // s, _ := tree.LongestPrefix(key, nil) + // c.Assert(s, eq, "/a/b") + + return nil + }) + } + + c.Assert(r.Wait(), qt.IsNil) +} + +func TestValidateKey(t *testing.T) { + c := qt.New(t) + + c.Assert(doctree.ValidateKey(""), qt.IsNil) + c.Assert(doctree.ValidateKey("/a/b/c"), qt.IsNil) + c.Assert(doctree.ValidateKey("/"), qt.IsNotNil) + c.Assert(doctree.ValidateKey("a"), qt.IsNotNil) + c.Assert(doctree.ValidateKey("abc"), qt.IsNotNil) + c.Assert(doctree.ValidateKey("/abc/"), qt.IsNotNil) +} + +type testShifter struct { + echo bool +} + +func (s *testShifter) ForEeachInDimension(n *testValue, d int, f func(n *testValue) bool) { + if d != doctree.DimensionLanguage.Index() { + panic("not implemented") + } + f(n) +} + +func (s *testShifter) Insert(old, new *testValue) *testValue { + return new +} + +func (s *testShifter) InsertInto(old, new *testValue, dimension doctree.Dimension) *testValue { + return new +} + +func (s *testShifter) Delete(n *testValue, dimension doctree.Dimension) (bool, bool) { + return true, true +} + +func (s *testShifter) Shift(n *testValue, dimension doctree.Dimension, exact bool) (*testValue, bool, doctree.DimensionFlag) { + if s.echo { + return n, true, doctree.DimensionLanguage + } + if n.NoCopy { + if n.Lang == dimension[0] { + return n, true, doctree.DimensionLanguage + } + return nil, false, doctree.DimensionLanguage + } + c := *n + c.Lang = dimension[0] + return &c, true, doctree.DimensionLanguage +} + +func (s *testShifter) All(n *testValue) []*testValue { + return []*testValue{n} +} + +type testValue struct { + ID string + Lang int + + Weight int + IsBranch bool + + NoCopy bool +} + +func BenchmarkTreeInsert(b *testing.B) { + runBench := func(b *testing.B, numElements int) { + for i := 0; i < b.N; i++ { + tree := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{}, + }, + ) + + for i := 0; i < numElements; i++ { + lang := rand.Intn(2) + tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true}) + } + } + } + + b.Run("1000", func(b *testing.B) { + runBench(b, 1000) + }) + + b.Run("10000", func(b *testing.B) { + runBench(b, 10000) + }) + + b.Run("100000", func(b *testing.B) { + runBench(b, 100000) + }) + + b.Run("300000", func(b *testing.B) { + runBench(b, 300000) + }) +} + +func BenchmarkWalk(b *testing.B) { + const numElements = 1000 + + createTree := func() *doctree.NodeShiftTree[*testValue] { + tree := doctree.New( + doctree.Config[*testValue]{ + Shifter: &testShifter{}, + }, + ) + + for i := 0; i < numElements; i++ { + lang := rand.Intn(2) + tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true}) + } + + return tree + } + + handle := func(s string, t *testValue, match doctree.DimensionFlag) (bool, error) { + return false, nil + } + + for _, numElements := range []int{1000, 10000, 100000} { + + b.Run(fmt.Sprintf("Walk one dimension %d", numElements), func(b *testing.B) { + tree := createTree() + b.ResetTimer() + for i := 0; i < b.N; i++ { + w := &doctree.NodeShiftTreeWalker[*testValue]{ + Tree: tree, + Handle: handle, + } + if err := w.Walk(context.Background()); err != nil { + b.Fatal(err) + } + } + }) + + b.Run(fmt.Sprintf("Walk all dimensions %d", numElements), func(b *testing.B) { + base := createTree() + b.ResetTimer() + for i := 0; i < b.N; i++ { + for d1 := 0; d1 < 1; d1++ { + for d2 := 0; d2 < 2; d2++ { + tree := base.Shape(d1, d2) + w := &doctree.NodeShiftTreeWalker[*testValue]{ + Tree: tree, + Handle: handle, + } + if err := w.Walk(context.Background()); err != nil { + b.Fatal(err) + } + } + } + } + }) + + } +} diff --git a/hugolib/doctree/nodeshifttree.go b/hugolib/doctree/nodeshifttree.go new file mode 100644 index 000000000..1c1175305 --- /dev/null +++ b/hugolib/doctree/nodeshifttree.go @@ -0,0 +1,433 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree + +import ( + "context" + "fmt" + "path" + "strings" + "sync" + + radix "github.com/armon/go-radix" + "github.com/gohugoio/hugo/resources/resource" +) + +type ( + Config[T any] struct { + // Shifter handles tree transformations. + Shifter Shifter[T] + } + + // Shifter handles tree transformations. + Shifter[T any] interface { + // ForEeachInDimension will call the given function for each value in the given dimension. + // If the function returns true, the walk will stop. + ForEeachInDimension(n T, d int, f func(T) bool) + + // Insert inserts new into the tree into the dimension it provides. + // It may replace old. + // It returns a T (can be the same as old). + Insert(old, new T) T + + // Insert inserts new into the given dimension. + // It may replace old. + // It returns a T (can be the same as old). + InsertInto(old, new T, dimension Dimension) T + + // Delete deletes T from the given dimension and returns whether the dimension was deleted and if it's empty after the delete. + Delete(v T, dimension Dimension) (bool, bool) + + // Shift shifts T into the given dimension + // and returns the shifted T and a bool indicating if the shift was successful and + // how accurate a match T is according to its dimensions. + Shift(v T, dimension Dimension, exact bool) (T, bool, DimensionFlag) + } +) + +// NodeShiftTree is the root of a tree that can be shaped using the Shape method. +// Note that multipled shapes of the same tree is meant to be used concurrently, +// so use the applicable locking when needed. +type NodeShiftTree[T any] struct { + tree *radix.Tree + + // E.g. [language, role]. + dims Dimension + shifter Shifter[T] + + mu *sync.RWMutex +} + +func New[T any](cfg Config[T]) *NodeShiftTree[T] { + if cfg.Shifter == nil { + panic("Shifter is required") + } + + return &NodeShiftTree[T]{ + mu: &sync.RWMutex{}, + shifter: cfg.Shifter, + tree: radix.New(), + } +} + +func (r *NodeShiftTree[T]) Delete(key string) { + r.delete(key) +} + +func (r *NodeShiftTree[T]) DeleteAll(key string) { + r.tree.WalkPrefix(key, func(key string, value any) bool { + v, ok := r.tree.Delete(key) + if ok { + resource.MarkStale(v) + } + return false + }) +} + +func (r *NodeShiftTree[T]) DeletePrefix(prefix string) int { + count := 0 + var keys []string + r.tree.WalkPrefix(prefix, func(key string, value any) bool { + keys = append(keys, key) + return false + }) + for _, key := range keys { + if ok := r.delete(key); ok { + count++ + } + } + return count +} + +func (r *NodeShiftTree[T]) delete(key string) bool { + var wasDeleted bool + if v, ok := r.tree.Get(key); ok { + var isEmpty bool + wasDeleted, isEmpty = r.shifter.Delete(v.(T), r.dims) + if isEmpty { + r.tree.Delete(key) + } + } + return wasDeleted +} + +func (t *NodeShiftTree[T]) DeletePrefixAll(prefix string) int { + count := 0 + + t.tree.WalkPrefix(prefix, func(key string, value any) bool { + if v, ok := t.tree.Delete(key); ok { + resource.MarkStale(v) + count++ + } + return false + }) + + return count +} + +// Increment the value of dimension d by 1. +func (t *NodeShiftTree[T]) Increment(d int) *NodeShiftTree[T] { + return t.Shape(d, t.dims[d]+1) +} + +func (r *NodeShiftTree[T]) InsertIntoCurrentDimension(s string, v T) (T, bool) { + s = mustValidateKey(cleanKey(s)) + if vv, ok := r.tree.Get(s); ok { + v = r.shifter.InsertInto(vv.(T), v, r.dims) + } + r.tree.Insert(s, v) + return v, true +} + +func (r *NodeShiftTree[T]) InsertIntoValuesDimension(s string, v T) (T, bool) { + s = mustValidateKey(cleanKey(s)) + if vv, ok := r.tree.Get(s); ok { + v = r.shifter.Insert(vv.(T), v) + } + r.tree.Insert(s, v) + return v, true +} + +func (r *NodeShiftTree[T]) InsertRawWithLock(s string, v any) (any, bool) { + r.mu.Lock() + defer r.mu.Unlock() + return r.tree.Insert(s, v) +} + +func (r *NodeShiftTree[T]) InsertWithLock(s string, v T) (T, bool) { + r.mu.Lock() + defer r.mu.Unlock() + return r.InsertIntoValuesDimension(s, v) +} + +func (t *NodeShiftTree[T]) Len() int { + return t.tree.Len() +} + +func (t *NodeShiftTree[T]) CanLock() bool { + ok := t.mu.TryLock() + if ok { + t.mu.Unlock() + } + return ok +} + +// Lock locks the data store for read or read/write access until commit is invoked. +// Note that Root is not thread-safe outside of this transaction construct. +func (t *NodeShiftTree[T]) Lock(writable bool) (commit func()) { + if writable { + t.mu.Lock() + } else { + t.mu.RLock() + } + return func() { + if writable { + t.mu.Unlock() + } else { + t.mu.RUnlock() + } + } +} + +// LongestPrefix finds the longest prefix of s that exists in the tree that also matches the predicate (if set). +// Set exact to true to only match exact in the current dimension (e.g. language). +func (r *NodeShiftTree[T]) LongestPrefix(s string, exact bool, predicate func(v T) bool) (string, T) { + for { + longestPrefix, v, found := r.tree.LongestPrefix(s) + + if found { + if t, ok, _ := r.shift(v.(T), exact); ok && (predicate == nil || predicate(t)) { + return longestPrefix, t + } + } + + if s == "" || s == "/" { + var t T + return "", t + } + + // Walk up to find a node in the correct dimension. + s = path.Dir(s) + + } +} + +// LongestPrefixAll returns the longest prefix considering all tree dimensions. +func (r *NodeShiftTree[T]) LongestPrefixAll(s string) (string, bool) { + s, _, found := r.tree.LongestPrefix(s) + return s, found +} + +func (r *NodeShiftTree[T]) GetRaw(s string) (T, bool) { + v, ok := r.tree.Get(s) + if !ok { + var t T + return t, false + } + return v.(T), true +} + +func (r *NodeShiftTree[T]) WalkPrefixRaw(prefix string, walker func(key string, value T) bool) { + walker2 := func(key string, value any) bool { + return walker(key, value.(T)) + } + r.tree.WalkPrefix(prefix, walker2) +} + +// Shape the tree for dimension d to value v. +func (t *NodeShiftTree[T]) Shape(d, v int) *NodeShiftTree[T] { + x := t.clone() + x.dims[d] = v + return x +} + +func (t *NodeShiftTree[T]) String() string { + return fmt.Sprintf("Root{%v}", t.dims) +} + +func (r *NodeShiftTree[T]) Get(s string) T { + t, _ := r.get(s) + return t +} + +func (r *NodeShiftTree[T]) ForEeachInDimension(s string, d int, f func(T) bool) { + s = cleanKey(s) + v, ok := r.tree.Get(s) + if !ok { + return + } + r.shifter.ForEeachInDimension(v.(T), d, f) +} + +type WalkFunc[T any] func(string, T) (bool, error) + +type NodeShiftTreeWalker[T any] struct { + // The tree to walk. + Tree *NodeShiftTree[T] + + // Handle will be called for each node in the main tree. + // If the callback returns true, the walk will stop. + // The callback can optionally return a callback for the nested tree. + Handle func(s string, v T, exact DimensionFlag) (terminate bool, err error) + + // Optional prefix filter. + Prefix string + + // Enable read or write locking if needed. + LockType LockType + + // When set, no dimension shifting will be performed. + NoShift bool + + // Don't fall back to alternative dimensions (e.g. language). + Exact bool + + // Used in development only. + Debug bool + + // Optional context. + // Note that this is copied to the nested walkers using Extend. + // This means that walkers can pass data (down) and events (up) to + // the related walkers. + WalkContext *WalkContext[T] + + // Local state. + // This is scoped to the current walker and not copied to the nested walkers. + skipPrefixes []string +} + +// Extend returns a new NodeShiftTreeWalker with the same configuration as the +// and the same WalkContext as the original. +// Any local state is reset. +func (r NodeShiftTreeWalker[T]) Extend() *NodeShiftTreeWalker[T] { + r.resetLocalState() + return &r +} + +// SkipPrefix adds a prefix to be skipped in the walk. +func (r *NodeShiftTreeWalker[T]) SkipPrefix(prefix ...string) { + r.skipPrefixes = append(r.skipPrefixes, prefix...) +} + +// ShouldSkip returns whether the given key should be skipped in the walk. +func (r *NodeShiftTreeWalker[T]) ShouldSkip(s string) bool { + for _, prefix := range r.skipPrefixes { + if strings.HasPrefix(s, prefix) { + return true + } + } + return false +} + +func (r *NodeShiftTreeWalker[T]) Walk(ctx context.Context) error { + if r.Tree == nil { + panic("Tree is required") + } + r.resetLocalState() + + if r.LockType > LockTypeNone { + commit1 := r.Tree.Lock(r.LockType == LockTypeWrite) + defer commit1() + } + + main := r.Tree + + var err error + fnMain := func(s string, v interface{}) bool { + if r.ShouldSkip(s) { + return false + } + + t, ok, exact := r.toT(r.Tree, v) + if !ok { + return false + } + + var terminate bool + terminate, err = r.Handle(s, t, exact) + if terminate || err != nil { + return true + } + return false + } + + if r.Prefix != "" { + main.tree.WalkPrefix(r.Prefix, fnMain) + } else { + main.tree.Walk(fnMain) + } + + if err != nil { + return err + } + + return nil +} + +func (r *NodeShiftTreeWalker[T]) resetLocalState() { + r.skipPrefixes = nil +} + +func (r *NodeShiftTreeWalker[T]) toT(tree *NodeShiftTree[T], v any) (t T, ok bool, exact DimensionFlag) { + if r.NoShift { + t = v.(T) + ok = true + } else { + t, ok, exact = tree.shift(v.(T), r.Exact) + } + return +} + +func (r *NodeShiftTree[T]) Has(s string) bool { + _, ok := r.get(s) + return ok +} + +func (t NodeShiftTree[T]) clone() *NodeShiftTree[T] { + return &t +} + +func (r *NodeShiftTree[T]) shift(t T, exact bool) (T, bool, DimensionFlag) { + return r.shifter.Shift(t, r.dims, exact) +} + +func (r *NodeShiftTree[T]) get(s string) (T, bool) { + s = cleanKey(s) + v, ok := r.tree.Get(s) + if !ok { + var t T + return t, false + } + t, ok, _ := r.shift(v.(T), true) + return t, ok +} + +type WalkConfig[T any] struct { + // Optional prefix filter. + Prefix string + + // Callback will be called for each node in the tree. + // If the callback returns true, the walk will stop. + Callback func(ctx *WalkContext[T], s string, t T) (bool, error) + + // Enable read or write locking if needed. + LockType LockType + + // When set, no dimension shifting will be performed. + NoShift bool + + // Exact will only match exact in the current dimension (e.g. language), + // and will not look for alternatives. + Exact bool +} diff --git a/hugolib/doctree/simpletree.go b/hugolib/doctree/simpletree.go new file mode 100644 index 000000000..811a7ff80 --- /dev/null +++ b/hugolib/doctree/simpletree.go @@ -0,0 +1,91 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree + +import ( + "sync" + + radix "github.com/armon/go-radix" +) + +// Tree is a radix tree that holds T. +type Tree[T any] interface { + Get(s string) T + LongestPrefix(s string) (string, T) + Insert(s string, v T) T + WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error +} + +// NewSimpleTree creates a new SimpleTree. +func NewSimpleTree[T any]() *SimpleTree[T] { + return &SimpleTree[T]{tree: radix.New()} +} + +// SimpleTree is a thread safe radix tree that holds T. +type SimpleTree[T any] struct { + mu sync.RWMutex + tree *radix.Tree + zero T +} + +func (tree *SimpleTree[T]) Get(s string) T { + tree.mu.RLock() + defer tree.mu.RUnlock() + + if v, ok := tree.tree.Get(s); ok { + return v.(T) + } + return tree.zero +} + +func (tree *SimpleTree[T]) LongestPrefix(s string) (string, T) { + tree.mu.RLock() + defer tree.mu.RUnlock() + + if s, v, ok := tree.tree.LongestPrefix(s); ok { + return s, v.(T) + } + return "", tree.zero +} + +func (tree *SimpleTree[T]) Insert(s string, v T) T { + tree.mu.Lock() + defer tree.mu.Unlock() + + tree.tree.Insert(s, v) + return v +} + +func (tree *SimpleTree[T]) WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error { + switch lockType { + case LockTypeNone: + case LockTypeRead: + tree.mu.RLock() + defer tree.mu.RUnlock() + case LockTypeWrite: + tree.mu.Lock() + defer tree.mu.Unlock() + } + var err error + tree.tree.WalkPrefix(s, func(s string, v any) bool { + var b bool + b, err = f(s, v.(T)) + if err != nil { + return true + } + return b + }) + + return err +} diff --git a/hugolib/doctree/support.go b/hugolib/doctree/support.go new file mode 100644 index 000000000..8083df127 --- /dev/null +++ b/hugolib/doctree/support.go @@ -0,0 +1,251 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree + +import ( + "fmt" + "strings" + "sync" + + radix "github.com/armon/go-radix" +) + +var _ MutableTrees = MutableTrees{} + +const ( + LockTypeNone LockType = iota + LockTypeRead + LockTypeWrite +) + +// AddEventListener adds an event listener to the tree. +// Note that the handler func may not add listeners. +func (ctx *WalkContext[T]) AddEventListener(event, path string, handler func(*Event[T])) { + if ctx.eventHandlers == nil { + ctx.eventHandlers = make(eventHandlers[T]) + } + if ctx.eventHandlers[event] == nil { + ctx.eventHandlers[event] = make([]func(*Event[T]), 0) + } + + // We want to match all above the path, so we need to exclude any similar named siblings. + if !strings.HasSuffix(path, "/") { + path += "/" + } + + ctx.eventHandlers[event] = append( + ctx.eventHandlers[event], func(e *Event[T]) { + // Propagate events up the tree only. + if strings.HasPrefix(e.Path, path) { + handler(e) + } + }, + ) +} + +// AddPostHook adds a post hook to the tree. +// This will be run after the tree has been walked. +func (ctx *WalkContext[T]) AddPostHook(handler func() error) { + ctx.HooksPost = append(ctx.HooksPost, handler) +} + +func (ctx *WalkContext[T]) Data() *SimpleTree[any] { + ctx.dataInit.Do(func() { + ctx.data = &SimpleTree[any]{ + tree: radix.New(), + } + }) + return ctx.data +} + +// SendEvent sends an event up the tree. +func (ctx *WalkContext[T]) SendEvent(event *Event[T]) { + ctx.events = append(ctx.events, event) +} + +// StopPropagation stops the propagation of the event. +func (e *Event[T]) StopPropagation() { + e.stopPropagation = true +} + +// ValidateKey returns an error if the key is not valid. +func ValidateKey(key string) error { + if key == "" { + // Root node. + return nil + } + + if len(key) < 2 { + return fmt.Errorf("too short key: %q", key) + } + + if key[0] != '/' { + return fmt.Errorf("key must start with '/': %q", key) + } + + if key[len(key)-1] == '/' { + return fmt.Errorf("key must not end with '/': %q", key) + } + + return nil +} + +// Event is used to communicate events in the tree. +type Event[T any] struct { + Name string + Path string + Source T + stopPropagation bool +} + +type LockType int + +// MutableTree is a tree that can be modified. +type MutableTree interface { + Delete(key string) + DeleteAll(key string) + DeletePrefix(prefix string) int + DeletePrefixAll(prefix string) int + Lock(writable bool) (commit func()) + CanLock() bool // Used for troubleshooting only. +} + +// WalkableTree is a tree that can be walked. +type WalkableTree[T any] interface { + WalkPrefixRaw(prefix string, walker func(key string, value T) bool) +} + +var _ WalkableTree[any] = (*WalkableTrees[any])(nil) + +type WalkableTrees[T any] []WalkableTree[T] + +func (t WalkableTrees[T]) WalkPrefixRaw(prefix string, walker func(key string, value T) bool) { + for _, tree := range t { + tree.WalkPrefixRaw(prefix, walker) + } +} + +var _ MutableTree = MutableTrees(nil) + +type MutableTrees []MutableTree + +func (t MutableTrees) Delete(key string) { + for _, tree := range t { + tree.Delete(key) + } +} + +func (t MutableTrees) DeleteAll(key string) { + for _, tree := range t { + tree.DeleteAll(key) + } +} + +func (t MutableTrees) DeletePrefix(prefix string) int { + var count int + for _, tree := range t { + count += tree.DeletePrefix(prefix) + } + return count +} + +func (t MutableTrees) DeletePrefixAll(prefix string) int { + var count int + for _, tree := range t { + count += tree.DeletePrefixAll(prefix) + } + return count +} + +func (t MutableTrees) Lock(writable bool) (commit func()) { + commits := make([]func(), len(t)) + for i, tree := range t { + commits[i] = tree.Lock(writable) + } + return func() { + for _, commit := range commits { + commit() + } + } +} + +func (t MutableTrees) CanLock() bool { + for _, tree := range t { + if !tree.CanLock() { + return false + } + } + return true +} + +// WalkContext is passed to the Walk callback. +type WalkContext[T any] struct { + data *SimpleTree[any] + dataInit sync.Once + eventHandlers eventHandlers[T] + events []*Event[T] + + HooksPost []func() error +} + +type eventHandlers[T any] map[string][]func(*Event[T]) + +func cleanKey(key string) string { + if key == "/" { + // The path to the home page is logically "/", + // but for technical reasons, it's stored as "". + // This allows us to treat the home page as a section, + // and a prefix search for "/" will return the home page's descendants. + return "" + } + return key +} + +func (ctx *WalkContext[T]) HandleEvents() error { + for len(ctx.events) > 0 { + event := ctx.events[0] + ctx.events = ctx.events[1:] + + // Loop the event handlers in reverse order so + // that events created by the handlers themselves will + // be picked up further up the tree. + for i := len(ctx.eventHandlers[event.Name]) - 1; i >= 0; i-- { + ctx.eventHandlers[event.Name][i](event) + if event.stopPropagation { + break + } + } + } + return nil +} + +func (ctx *WalkContext[T]) HandleEventsAndHooks() error { + if err := ctx.HandleEvents(); err != nil { + return err + } + + for _, hook := range ctx.HooksPost { + if err := hook(); err != nil { + return err + } + } + return nil +} + +func mustValidateKey(key string) string { + if err := ValidateKey(key); err != nil { + panic(err) + } + return key +} diff --git a/hugolib/doctree/treeshifttree.go b/hugolib/doctree/treeshifttree.go new file mode 100644 index 000000000..f8a6d360b --- /dev/null +++ b/hugolib/doctree/treeshifttree.go @@ -0,0 +1,101 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree + +var _ Tree[string] = (*TreeShiftTree[string])(nil) + +type TreeShiftTree[T any] struct { + // This tree is shiftable in one dimension. + d int + + // The value of the current dimension. + v int + + // Will be of length equal to the length of the dimension. + trees []*SimpleTree[T] +} + +func NewTreeShiftTree[T any](d, length int) *TreeShiftTree[T] { + if length <= 0 { + panic("length must be > 0") + } + trees := make([]*SimpleTree[T], length) + for i := 0; i < length; i++ { + trees[i] = NewSimpleTree[T]() + } + return &TreeShiftTree[T]{d: d, trees: trees} +} + +func (t TreeShiftTree[T]) Shape(d, v int) *TreeShiftTree[T] { + if d != t.d { + panic("dimension mismatch") + } + if v >= len(t.trees) { + panic("value out of range") + } + t.v = v + return &t +} + +func (t *TreeShiftTree[T]) Get(s string) T { + return t.trees[t.v].Get(s) +} + +func (t *TreeShiftTree[T]) LongestPrefix(s string) (string, T) { + return t.trees[t.v].LongestPrefix(s) +} + +func (t *TreeShiftTree[T]) Insert(s string, v T) T { + return t.trees[t.v].Insert(s, v) +} + +func (t *TreeShiftTree[T]) WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error { + return t.trees[t.v].WalkPrefix(lockType, s, f) +} + +func (t *TreeShiftTree[T]) Delete(key string) { + for _, tt := range t.trees { + tt.tree.Delete(key) + } +} + +func (t *TreeShiftTree[T]) DeletePrefix(prefix string) int { + var count int + for _, tt := range t.trees { + count += tt.tree.DeletePrefix(prefix) + } + return count +} + +func (t *TreeShiftTree[T]) Lock(writable bool) (commit func()) { + if writable { + for _, tt := range t.trees { + tt.mu.Lock() + } + return func() { + for _, tt := range t.trees { + tt.mu.Unlock() + } + } + } + + for _, tt := range t.trees { + tt.mu.RLock() + } + return func() { + for _, tt := range t.trees { + tt.mu.RUnlock() + } + } +} diff --git a/hugolib/doctree/treeshifttree_test.go b/hugolib/doctree/treeshifttree_test.go new file mode 100644 index 000000000..c39ff38aa --- /dev/null +++ b/hugolib/doctree/treeshifttree_test.go @@ -0,0 +1,28 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doctree_test + +import ( + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/hugolib/doctree" +) + +func TestTreeShiftTree(t *testing.T) { + c := qt.New(t) + + tree := doctree.NewTreeShiftTree[string](0, 10) + c.Assert(tree, qt.IsNotNil) +} diff --git a/hugolib/fileInfo.go b/hugolib/fileInfo.go index 1cdd7041d..a01b37008 100644 --- a/hugolib/fileInfo.go +++ b/hugolib/fileInfo.go @@ -15,24 +15,14 @@ package hugolib import ( "fmt" - "strings" - - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/gohugoio/hugo/hugofs" "github.com/spf13/afero" "github.com/gohugoio/hugo/source" ) -// fileInfo implements the File and ReadableFile interface. -var ( - _ source.File = (*fileInfo)(nil) -) - type fileInfo struct { - source.File + *source.File overriddenLang string } @@ -59,57 +49,3 @@ func (fi *fileInfo) String() string { } return fi.Path() } - -// TODO(bep) rename -func newFileInfo(sp *source.SourceSpec, fi hugofs.FileMetaInfo) (*fileInfo, error) { - baseFi, err := sp.NewFileInfo(fi) - if err != nil { - return nil, err - } - - f := &fileInfo{ - File: baseFi, - } - - return f, nil -} - -type bundleDirType int - -const ( - bundleNot bundleDirType = iota - - // All from here are bundles in one form or another. - bundleLeaf - bundleBranch -) - -// Returns the given file's name's bundle type and whether it is a content -// file or not. -func classifyBundledFile(name string) (bundleDirType, bool) { - if !files.IsContentFile(name) { - return bundleNot, false - } - if strings.HasPrefix(name, "_index.") { - return bundleBranch, true - } - - if strings.HasPrefix(name, "index.") { - return bundleLeaf, true - } - - return bundleNot, true -} - -func (b bundleDirType) String() string { - switch b { - case bundleNot: - return "Not a bundle" - case bundleLeaf: - return "Regular bundle" - case bundleBranch: - return "Branch bundle" - } - - return "" -} diff --git a/hugolib/filesystems/basefs.go b/hugolib/filesystems/basefs.go index a7c2a6271..b37fb8cb4 100644 --- a/hugolib/filesystems/basefs.go +++ b/hugolib/filesystems/basefs.go @@ -19,12 +19,12 @@ import ( "fmt" "io" "os" - "path" "path/filepath" "strings" "sync" "github.com/bep/overlayfs" + "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/htesting" "github.com/gohugoio/hugo/hugofs/glob" @@ -60,9 +60,12 @@ type BaseFs struct { // SourceFilesystems contains the different source file systems. *SourceFilesystems - // The project source. + // The source filesystem (needs absolute filenames). SourceFs afero.Fs + // The project source. + ProjectSourceFs afero.Fs + // The filesystem used to publish the rendered site. // This usually maps to /my-project/public. PublishFs afero.Fs @@ -95,67 +98,84 @@ func (f *fakeLockfileMutex) Lock() (func(), error) { } // Tries to acquire a build lock. -func (fs *BaseFs) LockBuild() (unlock func(), err error) { - return fs.buildMu.Lock() +func (b *BaseFs) LockBuild() (unlock func(), err error) { + return b.buildMu.Lock() } -// TODO(bep) we can get regular files in here and that is fine, but -// we need to clean up the naming. -func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo { - var dirs []hugofs.FileMetaInfo - for _, dir := range fs.AllDirs() { - if dir.Meta().Watch { - dirs = append(dirs, dir) - } - } - return dirs -} +func (b *BaseFs) WatchFilenames() []string { + var filenames []string + sourceFs := b.SourceFs + + for _, rfs := range b.RootFss { + for _, component := range files.ComponentFolders { + fis, err := rfs.Mounts(component) + if err != nil { + continue + } + + for _, fim := range fis { + meta := fim.Meta() + if !meta.Watch { + continue + } + + if !fim.IsDir() { + filenames = append(filenames, meta.Filename) + continue + } + + w := hugofs.NewWalkway(hugofs.WalkwayConfig{ + Fs: sourceFs, + Root: meta.Filename, + WalkFn: func(path string, fi hugofs.FileMetaInfo) error { + if !fi.IsDir() { + return nil + } + if fi.Name() == ".git" || + fi.Name() == "node_modules" || fi.Name() == "bower_components" { + return filepath.SkipDir + } + filenames = append(filenames, fi.Meta().Filename) + return nil + }, + }) + + w.Walk() + } -func (fs *BaseFs) AllDirs() []hugofs.FileMetaInfo { - var dirs []hugofs.FileMetaInfo - for _, dirSet := range [][]hugofs.FileMetaInfo{ - fs.Archetypes.Dirs, - fs.I18n.Dirs, - fs.Data.Dirs, - fs.Content.Dirs, - fs.Assets.Dirs, - fs.Layouts.Dirs, - // fs.Resources.Dirs, - fs.StaticDirs, - } { - dirs = append(dirs, dirSet...) + } } - return dirs + return filenames } -// RelContentDir tries to create a path relative to the content root from -// the given filename. The return value is the path and language code. -func (b *BaseFs) RelContentDir(filename string) string { - for _, dir := range b.SourceFilesystems.Content.Dirs { - dirname := dir.Meta().Filename - if strings.HasPrefix(filename, dirname) { - rel := path.Join(dir.Meta().Path, strings.TrimPrefix(filename, dirname)) - return strings.TrimPrefix(rel, filePathSeparator) +func (b *BaseFs) mountsForComponent(component string) []hugofs.FileMetaInfo { + var result []hugofs.FileMetaInfo + for _, rfs := range b.RootFss { + dirs, err := rfs.Mounts(component) + if err == nil { + result = append(result, dirs...) } } - // Either not a content dir or already relative. - return filename + return result } // AbsProjectContentDir tries to construct a filename below the most // relevant content directory. func (b *BaseFs) AbsProjectContentDir(filename string) (string, string, error) { isAbs := filepath.IsAbs(filename) - for _, dir := range b.SourceFilesystems.Content.Dirs { - meta := dir.Meta() + for _, fi := range b.mountsForComponent(files.ComponentFolderContent) { + if !fi.IsDir() { + continue + } + meta := fi.Meta() if !meta.IsProject { continue } if isAbs { if strings.HasPrefix(filename, meta.Filename) { - return strings.TrimPrefix(filename, meta.Filename), filename, nil + return strings.TrimPrefix(filename, meta.Filename+filePathSeparator), filename, nil } } else { contentDir := strings.TrimPrefix(strings.TrimPrefix(meta.Filename, meta.BaseDir), filePathSeparator) + filePathSeparator @@ -173,7 +193,10 @@ func (b *BaseFs) AbsProjectContentDir(filename string) (string, string, error) { // A filename on the form "posts/mypage.md", put it inside // the first content folder, usually <workDir>/content. // Pick the first project dir (which is probably the most important one). - for _, dir := range b.SourceFilesystems.Content.Dirs { + for _, dir := range b.SourceFilesystems.Content.mounts() { + if !dir.IsDir() { + continue + } meta := dir.Meta() if meta.IsProject { return filename, filepath.Join(meta.Filename, filename), nil @@ -186,14 +209,14 @@ func (b *BaseFs) AbsProjectContentDir(filename string) (string, string, error) { // ResolveJSConfigFile resolves the JS-related config file to a absolute // filename. One example of such would be postcss.config.js. -func (fs *BaseFs) ResolveJSConfigFile(name string) string { +func (b *BaseFs) ResolveJSConfigFile(name string) string { // First look in assets/_jsconfig - fi, err := fs.Assets.Fs.Stat(filepath.Join(files.FolderJSConfig, name)) + fi, err := b.Assets.Fs.Stat(filepath.Join(files.FolderJSConfig, name)) if err == nil { return fi.(hugofs.FileMetaInfo).Meta().Filename } // Fall back to the work dir. - fi, err = fs.Work.Stat(name) + fi, err = b.Work.Stat(name) if err == nil { return fi.(hugofs.FileMetaInfo).Meta().Filename } @@ -201,27 +224,6 @@ func (fs *BaseFs) ResolveJSConfigFile(name string) string { return "" } -// MakePathRelative creates a relative path from the given filename. -// It returns both the component name (e.g. layouts) and the path relative to that. -func (fs *BaseFs) MakePathRelative(filename string) (string, string) { - for _, sfs := range fs.FileSystems() { - if sfs.Contains(filename) { - if s, found := sfs.MakePathRelative(filename); found { - return sfs.Name, s - } - } - } - // May be a static file. - if s := fs.MakeStaticPathRelative(filename); s != "" { - return files.ComponentFolderStatic, s - } - // Fall back to relative to the working dir. - if strings.HasPrefix(filename, fs.workingDir) { - return "", strings.TrimPrefix(filename, fs.workingDir) - } - return "", "" -} - // SourceFilesystems contains the different source file systems. These can be // composite file systems (theme and project etc.), and they have all root // set to the source type the provides: data, i18n, static, layouts. @@ -233,6 +235,10 @@ type SourceFilesystems struct { Archetypes *SourceFilesystem Assets *SourceFilesystem + AssetsWithDuplicatesPreserved *SourceFilesystem + + RootFss []*hugofs.RootMappingFs + // Writable filesystem on top the project's resources directory, // with any sub module's resource fs layered below. ResourcesCache afero.Fs @@ -246,23 +252,7 @@ type SourceFilesystems struct { // When in non-multihost mode there will be one entry in this map with a blank key. Static map[string]*SourceFilesystem - // All the /static dirs (including themes/modules). - StaticDirs []hugofs.FileMetaInfo -} - -// FileSystems returns the FileSystems relevant for the change detection -// in server mode. -// Note: This does currently not return any static fs. -func (s *SourceFilesystems) FileSystems() []*SourceFilesystem { - return []*SourceFilesystem{ - s.Content, - s.Assets, - s.Data, - s.I18n, - s.Layouts, - s.Archetypes, - // TODO(bep) static - } + conf config.AllProvider } // A SourceFilesystem holds the filesystem for a given source type in Hugo (data, @@ -275,32 +265,12 @@ type SourceFilesystem struct { // This is a virtual composite filesystem. It expects path relative to a context. Fs afero.Fs - // This filesystem as separate root directories, starting from project and down - // to the themes/modules. - Dirs []hugofs.FileMetaInfo - // When syncing a source folder to the target (e.g. /public), this may // be set to publish into a subfolder. This is used for static syncing // in multihost mode. PublishFolder string } -// ContentStaticAssetFs will create a new composite filesystem from the content, -// static, and asset filesystems. The site language is needed to pick the correct static filesystem. -// The order is content, static and then assets. -// TODO(bep) check usage -func (s SourceFilesystems) ContentStaticAssetFs(lang string) afero.Fs { - return overlayfs.New( - overlayfs.Options{ - Fss: []afero.Fs{ - s.Content.Fs, - s.StaticFs(lang), - s.Assets.Fs, - }, - }, - ) -} - // StaticFs returns the static filesystem for the given language. // This can be a composite filesystem. func (s SourceFilesystems) StaticFs(lang string) afero.Fs { @@ -349,24 +319,17 @@ func (s SourceFilesystems) IsContent(filename string) bool { return s.Content.Contains(filename) } -// IsLayout returns true if the given filename is a member of the layouts filesystem. -func (s SourceFilesystems) IsLayout(filename string) bool { - return s.Layouts.Contains(filename) -} - -// IsData returns true if the given filename is a member of the data filesystem. -func (s SourceFilesystems) IsData(filename string) bool { - return s.Data.Contains(filename) -} - -// IsAsset returns true if the given filename is a member of the asset filesystem. -func (s SourceFilesystems) IsAsset(filename string) bool { - return s.Assets.Contains(filename) -} - -// IsI18n returns true if the given filename is a member of the i18n filesystem. -func (s SourceFilesystems) IsI18n(filename string) bool { - return s.I18n.Contains(filename) +// ResolvePaths resolves the given filename to a list of paths in the filesystems. +func (s *SourceFilesystems) ResolvePaths(filename string, checkExists bool) []hugofs.ComponentPath { + var cpss []hugofs.ComponentPath + for _, rfs := range s.RootFss { + cps, err := rfs.ReverseLookup(filename, checkExists) + if err != nil { + panic(err) + } + cpss = append(cpss, cps...) + } + return cpss } // MakeStaticPathRelative makes an absolute static filename into a relative one. @@ -383,19 +346,53 @@ func (s SourceFilesystems) MakeStaticPathRelative(filename string) string { // MakePathRelative creates a relative path from the given filename. func (d *SourceFilesystem) MakePathRelative(filename string) (string, bool) { - for _, dir := range d.Dirs { - meta := dir.(hugofs.FileMetaInfo).Meta() - currentPath := meta.Filename - - if strings.HasPrefix(filename, currentPath) { - rel := strings.TrimPrefix(filename, currentPath) - if mp := meta.Path; mp != "" { - rel = filepath.Join(mp, rel) + cps, err := d.ReverseLookup(filename) + if err != nil { + panic(err) + } + if len(cps) == 0 { + return "", false + } + + return filepath.FromSlash(cps[0].Path), true +} + +// ReverseLookup returns the component paths for the given filename. +func (d *SourceFilesystem) ReverseLookup(filename string) ([]hugofs.ComponentPath, error) { + var cps []hugofs.ComponentPath + hugofs.WalkFilesystems(d.Fs, func(fs afero.Fs) bool { + if rfs, ok := fs.(hugofs.ReverseLookupProvder); ok { + if c, err := rfs.ReverseLookup(filename, true); err == nil { + cps = append(cps, c...) } - return strings.TrimPrefix(rel, filePathSeparator), true + } + return false + }) + return cps, nil +} + +func (d *SourceFilesystem) mounts() []hugofs.FileMetaInfo { + var m []hugofs.FileMetaInfo + hugofs.WalkFilesystems(d.Fs, func(fs afero.Fs) bool { + if rfs, ok := fs.(*hugofs.RootMappingFs); ok { + mounts, err := rfs.Mounts(d.Name) + if err == nil { + m = append(m, mounts...) + } + + } + return false + }) + // Filter out any mounts not belonging to this filesystem. + n := 0 + for _, mm := range m { + if mm.Meta().Component == d.Name { + m[n] = mm + n++ } } - return "", false + m = m[:n] + return m } func (d *SourceFilesystem) RealFilename(rel string) string { @@ -412,7 +409,10 @@ func (d *SourceFilesystem) RealFilename(rel string) string { // Contains returns whether the given filename is a member of the current filesystem. func (d *SourceFilesystem) Contains(filename string) bool { - for _, dir := range d.Dirs { + for _, dir := range d.mounts() { + if !dir.IsDir() { + continue + } if strings.HasPrefix(filename, dir.Meta().Filename) { return true } @@ -420,32 +420,18 @@ func (d *SourceFilesystem) Contains(filename string) bool { return false } -// Path returns the mount relative path to the given filename if it is a member of -// of the current filesystem, an empty string if not. -func (d *SourceFilesystem) Path(filename string) string { - for _, dir := range d.Dirs { - meta := dir.Meta() - if strings.HasPrefix(filename, meta.Filename) { - p := strings.TrimPrefix(strings.TrimPrefix(filename, meta.Filename), filePathSeparator) - if mountRoot := meta.MountRoot; mountRoot != "" { - return filepath.Join(mountRoot, p) - } - return p - } - } - return "" -} - // RealDirs gets a list of absolute paths to directories starting from the given // path. func (d *SourceFilesystem) RealDirs(from string) []string { var dirnames []string - for _, dir := range d.Dirs { - meta := dir.Meta() - dirname := filepath.Join(meta.Filename, from) - _, err := meta.Fs.Stat(from) - + for _, m := range d.mounts() { + if !m.IsDir() { + continue + } + meta := m.Meta() + _, err := d.Fs.Stat(from) if err == nil { + dirname := filepath.Join(meta.Filename, from) dirnames = append(dirnames, dirname) } } @@ -462,8 +448,6 @@ func WithBaseFs(b *BaseFs) func(*BaseFs) error { } } -var counter int - // NewBase builds the filesystems used by Hugo given the paths and options provided.NewBase func NewBase(p *paths.Paths, logger loggers.Logger, options ...func(*BaseFs) error) (*BaseFs, error) { fs := p.Fs @@ -472,7 +456,8 @@ func NewBase(p *paths.Paths, logger loggers.Logger, options ...func(*BaseFs) err } publishFs := hugofs.NewBaseFileDecorator(fs.PublishDir) - sourceFs := hugofs.NewBaseFileDecorator(afero.NewBasePathFs(fs.Source, p.Cfg.BaseConfig().WorkingDir)) + projectSourceFs := hugofs.NewBaseFileDecorator(hugofs.NewBasePathFs(fs.Source, p.Cfg.BaseConfig().WorkingDir)) + sourceFs := hugofs.NewBaseFileDecorator(fs.Source) publishFsStatic := fs.PublishDirStatic var buildMu Lockable @@ -484,6 +469,7 @@ func NewBase(p *paths.Paths, logger loggers.Logger, options ...func(*BaseFs) err b := &BaseFs{ SourceFs: sourceFs, + ProjectSourceFs: projectSourceFs, WorkDir: fs.WorkingDirReadOnly, PublishFs: publishFs, PublishFsStatic: publishFsStatic, @@ -523,14 +509,18 @@ type sourceFilesystemsBuilder struct { func newSourceFilesystemsBuilder(p *paths.Paths, logger loggers.Logger, b *BaseFs) *sourceFilesystemsBuilder { sourceFs := hugofs.NewBaseFileDecorator(p.Fs.Source) - return &sourceFilesystemsBuilder{p: p, logger: logger, sourceFs: sourceFs, theBigFs: b.theBigFs, result: &SourceFilesystems{}} + return &sourceFilesystemsBuilder{ + p: p, logger: logger, sourceFs: sourceFs, theBigFs: b.theBigFs, + result: &SourceFilesystems{ + conf: p.Cfg, + }, + } } -func (b *sourceFilesystemsBuilder) newSourceFilesystem(name string, fs afero.Fs, dirs []hugofs.FileMetaInfo) *SourceFilesystem { +func (b *sourceFilesystemsBuilder) newSourceFilesystem(name string, fs afero.Fs) *SourceFilesystem { return &SourceFilesystem{ Name: name, Fs: fs, - Dirs: dirs, } } @@ -544,64 +534,61 @@ func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) { b.theBigFs = theBigFs } - createView := func(componentID string) *SourceFilesystem { + createView := func(componentID string, overlayFs *overlayfs.OverlayFs) *SourceFilesystem { if b.theBigFs == nil || b.theBigFs.overlayMounts == nil { - return b.newSourceFilesystem(componentID, hugofs.NoOpFs, nil) + return b.newSourceFilesystem(componentID, hugofs.NoOpFs) } - dirs := b.theBigFs.overlayDirs[componentID] + fs := hugofs.NewComponentFs( + hugofs.ComponentFsOptions{ + Fs: overlayFs, + Component: componentID, + DefaultContentLanguage: b.p.Cfg.DefaultContentLanguage(), + PathParser: b.p.Cfg.PathParser(), + }, + ) - return b.newSourceFilesystem(componentID, afero.NewBasePathFs(b.theBigFs.overlayMounts, componentID), dirs) + return b.newSourceFilesystem(componentID, fs) } - b.result.Archetypes = createView(files.ComponentFolderArchetypes) - b.result.Layouts = createView(files.ComponentFolderLayouts) - b.result.Assets = createView(files.ComponentFolderAssets) + b.result.Archetypes = createView(files.ComponentFolderArchetypes, b.theBigFs.overlayMounts) + b.result.Layouts = createView(files.ComponentFolderLayouts, b.theBigFs.overlayMounts) + b.result.Assets = createView(files.ComponentFolderAssets, b.theBigFs.overlayMounts) b.result.ResourcesCache = b.theBigFs.overlayResources + b.result.RootFss = b.theBigFs.rootFss + + // data and i18n needs a different merge strategy. + overlayMountsPreserveDupes := b.theBigFs.overlayMounts.WithDirsMerger(hugofs.AppendDirsMerger) + b.result.Data = createView(files.ComponentFolderData, overlayMountsPreserveDupes) + b.result.I18n = createView(files.ComponentFolderI18n, overlayMountsPreserveDupes) + b.result.AssetsWithDuplicatesPreserved = createView(files.ComponentFolderAssets, overlayMountsPreserveDupes) + + contentFs := hugofs.NewComponentFs( + hugofs.ComponentFsOptions{ + Fs: b.theBigFs.overlayMountsContent, + Component: files.ComponentFolderContent, + DefaultContentLanguage: b.p.Cfg.DefaultContentLanguage(), + PathParser: b.p.Cfg.PathParser(), + }, + ) - // Data, i18n and content cannot use the overlay fs - dataDirs := b.theBigFs.overlayDirs[files.ComponentFolderData] - dataFs, err := hugofs.NewSliceFs(dataDirs...) - if err != nil { - return nil, err - } - - b.result.Data = b.newSourceFilesystem(files.ComponentFolderData, dataFs, dataDirs) - - i18nDirs := b.theBigFs.overlayDirs[files.ComponentFolderI18n] - i18nFs, err := hugofs.NewSliceFs(i18nDirs...) - if err != nil { - return nil, err - } - b.result.I18n = b.newSourceFilesystem(files.ComponentFolderI18n, i18nFs, i18nDirs) - - contentDirs := b.theBigFs.overlayDirs[files.ComponentFolderContent] - contentBfs := afero.NewBasePathFs(b.theBigFs.overlayMountsContent, files.ComponentFolderContent) - - contentFs, err := hugofs.NewLanguageFs(b.p.Cfg.LanguagesDefaultFirst().AsOrdinalSet(), contentBfs) - if err != nil { - return nil, fmt.Errorf("create content filesystem: %w", err) - } - - b.result.Content = b.newSourceFilesystem(files.ComponentFolderContent, contentFs, contentDirs) - - b.result.Work = afero.NewReadOnlyFs(b.theBigFs.overlayFull) + b.result.Content = b.newSourceFilesystem(files.ComponentFolderContent, contentFs) + b.result.Work = hugofs.NewReadOnlyFs(b.theBigFs.overlayFull) // Create static filesystem(s) ms := make(map[string]*SourceFilesystem) b.result.Static = ms - b.result.StaticDirs = b.theBigFs.overlayDirs[files.ComponentFolderStatic] if b.theBigFs.staticPerLanguage != nil { // Multihost mode for k, v := range b.theBigFs.staticPerLanguage { - sfs := b.newSourceFilesystem(files.ComponentFolderStatic, v, b.result.StaticDirs) + sfs := b.newSourceFilesystem(files.ComponentFolderStatic, v) sfs.PublishFolder = k ms[k] = sfs } } else { - bfs := afero.NewBasePathFs(b.theBigFs.overlayMountsStatic, files.ComponentFolderStatic) - ms[""] = b.newSourceFilesystem(files.ComponentFolderStatic, bfs, b.result.StaticDirs) + bfs := hugofs.NewBasePathFs(b.theBigFs.overlayMountsStatic, files.ComponentFolderStatic) + ms[""] = b.newSourceFilesystem(files.ComponentFolderStatic, bfs) } return b.result, nil @@ -619,8 +606,7 @@ func (b *sourceFilesystemsBuilder) createMainOverlayFs(p *paths.Paths) (*filesys collector := &filesystemsCollector{ sourceProject: b.sourceFs, - sourceModules: hugofs.NewNoSymlinkFs(b.sourceFs, b.logger, false), - overlayDirs: make(map[string][]hugofs.FileMetaInfo), + sourceModules: b.sourceFs, staticPerLanguage: staticFsMap, overlayMounts: overlayfs.New(overlayfs.Options{}), @@ -675,6 +661,7 @@ func (b *sourceFilesystemsBuilder) createOverlayFs( collector.overlayMounts = appendNopIfEmpty(collector.overlayMounts) collector.overlayMountsContent = appendNopIfEmpty(collector.overlayMountsContent) collector.overlayMountsStatic = appendNopIfEmpty(collector.overlayMountsStatic) + collector.overlayMountsFull = appendNopIfEmpty(collector.overlayMountsFull) collector.overlayFull = appendNopIfEmpty(collector.overlayFull) collector.overlayResources = appendNopIfEmpty(collector.overlayResources) @@ -712,15 +699,15 @@ func (b *sourceFilesystemsBuilder) createOverlayFs( base, filename := absPathify(mount.Source) rm := hugofs.RootMapping{ - From: mount.Target, - To: filename, - ToBasedir: base, - Module: md.Module.Path(), - IsProject: md.isMainProject, + From: mount.Target, + To: filename, + ToBase: base, + Module: md.Module.Path(), + ModuleOrdinal: md.ordinal, + IsProject: md.isMainProject, Meta: &hugofs.FileMeta{ Watch: md.Watch(), Weight: mountWeight, - Classifier: files.ContentClassContent, InclusionFilter: inclusionFilter, }, } @@ -747,7 +734,8 @@ func (b *sourceFilesystemsBuilder) createOverlayFs( if !md.isMainProject { modBase = collector.sourceModules } - sourceStatic := hugofs.NewNoSymlinkFs(modBase, b.logger, true) + + sourceStatic := modBase rmfs, err := hugofs.NewRootMappingFs(modBase, fromTo...) if err != nil { @@ -762,11 +750,10 @@ func (b *sourceFilesystemsBuilder) createOverlayFs( return err } - // We need to keep the ordered list of directories for watching and - // some special merge operations (data, i18n). - collector.addDirs(rmfs) - collector.addDirs(rmfsContent) - collector.addDirs(rmfsStatic) + // We need to keep the list of directories for watching. + collector.addRootFs(rmfs) + collector.addRootFs(rmfsContent) + collector.addRootFs(rmfsStatic) if collector.staticPerLanguage != nil { for _, l := range b.p.Cfg.Languages() { @@ -776,7 +763,7 @@ func (b *sourceFilesystemsBuilder) createOverlayFs( rlang := rm.Meta.Lang return rlang == "" || rlang == lang }) - bfs := afero.NewBasePathFs(lfs, files.ComponentFolderStatic) + bfs := hugofs.NewBasePathFs(lfs, files.ComponentFolderStatic) collector.staticPerLanguage[lang] = collector.staticPerLanguage[lang].Append(bfs) } } @@ -792,14 +779,15 @@ func (b *sourceFilesystemsBuilder) createOverlayFs( collector.overlayMounts = collector.overlayMounts.Append(rmfs) collector.overlayMountsContent = collector.overlayMountsContent.Append(rmfsContent) collector.overlayMountsStatic = collector.overlayMountsStatic.Append(rmfsStatic) - collector.overlayFull = collector.overlayFull.Append(afero.NewBasePathFs(modBase, md.dir)) - collector.overlayResources = collector.overlayResources.Append(afero.NewBasePathFs(modBase, getResourcesDir())) + collector.overlayFull = collector.overlayFull.Append(hugofs.NewBasePathFs(modBase, md.dir)) + collector.overlayResources = collector.overlayResources.Append(hugofs.NewBasePathFs(modBase, getResourcesDir())) } return nil } +//lint:ignore U1000 useful for debugging func printFs(fs afero.Fs, path string, w io.Writer) { if fs == nil { return @@ -827,36 +815,23 @@ type filesystemsCollector struct { overlayMounts *overlayfs.OverlayFs overlayMountsContent *overlayfs.OverlayFs overlayMountsStatic *overlayfs.OverlayFs + overlayMountsFull *overlayfs.OverlayFs overlayFull *overlayfs.OverlayFs overlayResources *overlayfs.OverlayFs - // Maps component type (layouts, static, content etc.) an ordered list of - // directories representing the overlay filesystems above. - overlayDirs map[string][]hugofs.FileMetaInfo + rootFss []*hugofs.RootMappingFs // Set if in multihost mode staticPerLanguage map[string]*overlayfs.OverlayFs - - finalizerInit sync.Once } -func (c *filesystemsCollector) addDirs(rfs *hugofs.RootMappingFs) { - for _, componentFolder := range files.ComponentFolders { - c.addDir(rfs, componentFolder) - } -} - -func (c *filesystemsCollector) addDir(rfs *hugofs.RootMappingFs, componentFolder string) { - dirs, err := rfs.Dirs(componentFolder) - - if err == nil { - c.overlayDirs[componentFolder] = append(c.overlayDirs[componentFolder], dirs...) - } +func (c *filesystemsCollector) addRootFs(rfs *hugofs.RootMappingFs) { + c.rootFss = append(c.rootFss, rfs) } type mountsDescriptor struct { modules.Module dir string isMainProject bool - ordinal int + ordinal int // zero based starting from the project. } diff --git a/hugolib/filesystems/basefs_test.go b/hugolib/filesystems/basefs_test.go index 1724f3838..5398055ed 100644 --- a/hugolib/filesystems/basefs_test.go +++ b/hugolib/filesystems/basefs_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,11 +18,13 @@ import ( "fmt" "os" "path/filepath" + "runtime" "strings" "testing" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/testconfig" + "github.com/gohugoio/hugo/hugolib" "github.com/spf13/afero" @@ -54,22 +56,22 @@ func TestNewBaseFs(t *testing.T) { base := filepath.Join(workingDir, "themes", theme, dir) filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme)) filenameOverlap := filepath.Join(base, "f3.txt") - afs.Mkdir(base, 0755) + afs.Mkdir(base, 0o755) content := []byte(fmt.Sprintf("content:%s:%s", theme, dir)) - afero.WriteFile(afs, filenameTheme, content, 0755) - afero.WriteFile(afs, filenameOverlap, content, 0755) + afero.WriteFile(afs, filenameTheme, content, 0o755) + afero.WriteFile(afs, filenameOverlap, content, 0o755) } // Write some files to the root of the theme base := filepath.Join(workingDir, "themes", theme) - afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), []byte(fmt.Sprintf("content:%s", theme)), 0755) - afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), []byte(fmt.Sprintf("content:%s", theme)), 0755) + afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), []byte(fmt.Sprintf("content:%s", theme)), 0o755) + afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), []byte(fmt.Sprintf("content:%s", theme)), 0o755) } - afero.WriteFile(afs, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0755) + afero.WriteFile(afs, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0o755) afero.WriteFile(afs, filepath.Join(workingDir, "themes", "btheme", "config.toml"), []byte(` theme = ["atheme"] -`), 0755) +`), 0o755) setConfigAndWriteSomeFilesTo(afs, v, "contentDir", "mycontent", 3) setConfigAndWriteSomeFilesTo(afs, v, "i18nDir", "myi18n", 4) @@ -113,17 +115,10 @@ theme = ["atheme"] checkFileCount(bfs.Assets.Fs, "", c, 9) checkFileCount(bfs.Work, "", c, 90) - c.Assert(bfs.IsData(filepath.Join(workingDir, "mydata", "file1.txt")), qt.Equals, true) - c.Assert(bfs.IsI18n(filepath.Join(workingDir, "myi18n", "file1.txt")), qt.Equals, true) - c.Assert(bfs.IsLayout(filepath.Join(workingDir, "mylayouts", "file1.txt")), qt.Equals, true) c.Assert(bfs.IsStatic(filepath.Join(workingDir, "mystatic", "file1.txt")), qt.Equals, true) - c.Assert(bfs.IsAsset(filepath.Join(workingDir, "myassets", "file1.txt")), qt.Equals, true) contentFilename := filepath.Join(workingDir, "mycontent", "file1.txt") c.Assert(bfs.IsContent(contentFilename), qt.Equals, true) - rel := bfs.RelContentDir(contentFilename) - c.Assert(rel, qt.Equals, "file1.txt") - // Check Work fs vs theme checkFileContent(bfs.Work, "file-root.txt", c, "content-project") checkFileContent(bfs.Work, "theme-root-atheme.txt", c, "content:atheme") @@ -168,34 +163,29 @@ func TestRealDirs(t *testing.T) { v.Set("assetDir", "myassets") v.Set("theme", "mytheme") - afs := hugofs.Os - - defer func() { - os.RemoveAll(root) - os.RemoveAll(themesDir) - }() + afs := &hugofs.OpenFilesFs{Fs: hugofs.Os} - c.Assert(afs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf1"), 0755), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf2"), 0755), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2"), 0755), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3"), 0755), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(root, "resources"), 0755), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(themesDir, "mytheme", "resources"), 0755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf1"), 0o755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf2"), 0o755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2"), 0o755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3"), 0o755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(root, "resources"), 0o755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(themesDir, "mytheme", "resources"), 0o755), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(root, "myassets", "js", "f2"), 0755), qt.IsNil) + c.Assert(afs.MkdirAll(filepath.Join(root, "myassets", "js", "f2"), 0o755), qt.IsNil) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf1", "a1.scss")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf2", "a3.scss")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "scss", "a2.scss")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2", "a3.scss")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3", "a4.scss")), []byte("content"), 0755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf1", "a1.scss")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf2", "a3.scss")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "scss", "a2.scss")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2", "a3.scss")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3", "a4.scss")), []byte("content"), 0o755) - afero.WriteFile(afs, filepath.Join(filepath.Join(themesDir, "mytheme", "resources", "t1.txt")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "resources", "p1.txt")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "resources", "p2.txt")), []byte("content"), 0755) + afero.WriteFile(afs, filepath.Join(filepath.Join(themesDir, "mytheme", "resources", "t1.txt")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "resources", "p1.txt")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "resources", "p2.txt")), []byte("content"), 0o755) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "js", "f2", "a1.js")), []byte("content"), 0755) - afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "js", "a2.js")), []byte("content"), 0755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "js", "f2", "a1.js")), []byte("content"), 0o755) + afero.WriteFile(afs, filepath.Join(filepath.Join(root, "myassets", "js", "a2.js")), []byte("content"), 0o755) conf := testconfig.GetTestConfig(afs, v) fs := hugofs.NewFrom(afs, conf.BaseConfig()) @@ -212,6 +202,83 @@ func TestRealDirs(t *testing.T) { c.Assert(realDirs[0], qt.Equals, filepath.Join(root, "myassets/scss")) c.Assert(realDirs[len(realDirs)-1], qt.Equals, filepath.Join(themesDir, "mytheme/assets/scss")) + realDirs = bfs.Assets.RealDirs("foo") + c.Assert(len(realDirs), qt.Equals, 0) + + c.Assert(afs.OpenFiles(), qt.HasLen, 0) +} + +func TestWatchFilenames(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +theme = "t1" +[[module.mounts]] +source = 'content' +target = 'content' +[[module.mounts]] +source = 'content2' +target = 'content/c2' +[[module.mounts]] +source = "hugo_stats.json" +target = "assets/watching/hugo_stats.json" +-- hugo_stats.json -- +Some stats. +-- content/foo.md -- +foo +-- content2/bar.md -- +-- themes/t1/layouts/_default/single.html -- +{{ .Content }} +-- themes/t1/static/f1.txt -- +` + b := hugolib.Test(t, files) + bfs := b.H.BaseFs + watchFilenames := bfs.WatchFilenames() + b.Assert(watchFilenames, qt.HasLen, 6) +} + +func TestNoSymlinks(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("skip on Windows") + } + files := ` +-- hugo.toml -- +theme = "t1" +-- content/a/foo.md -- +foo +-- static/a/f1.txt -- +F1 text +-- themes/t1/layouts/_default/single.html -- +{{ .Content }} +-- themes/t1/static/a/f1.txt -- +` + tmpDir := t.TempDir() + + wd, _ := os.Getwd() + + for _, component := range []string{"content", "static"} { + aDir := filepath.Join(tmpDir, component, "a") + bDir := filepath.Join(tmpDir, component, "b") + os.MkdirAll(aDir, 0o755) + os.MkdirAll(bDir, 0o755) + os.Chdir(bDir) + os.Symlink("../a", "c") + } + + os.Chdir(wd) + + b := hugolib.NewIntegrationTestBuilder( + hugolib.IntegrationTestConfig{ + T: t, + TxtarString: files, + NeedsOsFS: true, + WorkingDir: tmpDir, + }, + ).Build() + + bfs := b.H.BaseFs + watchFilenames := bfs.WatchFilenames() + b.Assert(watchFilenames, qt.HasLen, 10) } func TestStaticFs(t *testing.T) { @@ -228,10 +295,10 @@ func TestStaticFs(t *testing.T) { themeStaticDir := filepath.Join(workDir, "themes", "t1", "static") themeStaticDir2 := filepath.Join(workDir, "themes", "t2", "static") - afero.WriteFile(afs, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0755) - afero.WriteFile(afs, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755) - afero.WriteFile(afs, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755) - afero.WriteFile(afs, filepath.Join(themeStaticDir2, "f2.txt"), []byte("Hugo Themes Rocks in t2!"), 0755) + afero.WriteFile(afs, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0o755) + afero.WriteFile(afs, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0o755) + afero.WriteFile(afs, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0o755) + afero.WriteFile(afs, filepath.Join(themeStaticDir2, "f2.txt"), []byte("Hugo Themes Rocks in t2!"), 0o755) conf := testconfig.GetTestConfig(afs, v) fs := hugofs.NewFrom(afs, conf.BaseConfig()) @@ -273,17 +340,15 @@ func TestStaticFsMultiHost(t *testing.T) { themeStaticDir := filepath.Join(workDir, "themes", "t1", "static") - afero.WriteFile(afs, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0755) - afero.WriteFile(afs, filepath.Join(workDir, "static_no", "f1.txt"), []byte("Hugo Rocks in Norway!"), 0755) + afero.WriteFile(afs, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0o755) + afero.WriteFile(afs, filepath.Join(workDir, "static_no", "f1.txt"), []byte("Hugo Rocks in Norway!"), 0o755) - afero.WriteFile(afs, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755) - afero.WriteFile(afs, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755) + afero.WriteFile(afs, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0o755) + afero.WriteFile(afs, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0o755) conf := testconfig.GetTestConfig(afs, v) fs := hugofs.NewFrom(afs, conf.BaseConfig()) - fmt.Println("IS", conf.IsMultihost()) - p, err := paths.New(fs, conf) c.Assert(err, qt.IsNil) bfs, err := filesystems.NewBase(p, nil) @@ -298,54 +363,119 @@ func TestStaticFsMultiHost(t *testing.T) { } func TestMakePathRelative(t *testing.T) { - c := qt.New(t) - v := config.New() - afs := afero.NewMemMapFs() - workDir := "mywork" - v.Set("workingDir", workDir) + files := ` +-- hugo.toml -- +[[module.mounts]] +source = "bar.txt" +target = "assets/foo/baz.txt" +[[module.imports]] +path = "t1" +[[module.imports.mounts]] +source = "src" +target = "assets/foo/bar" +-- bar.txt -- +Bar. +-- themes/t1/src/main.js -- +Main. +` + b := hugolib.Test(t, files) + + rel, found := b.H.BaseFs.Assets.MakePathRelative(filepath.FromSlash("/themes/t1/src/main.js")) + b.Assert(found, qt.Equals, true) + b.Assert(rel, qt.Equals, filepath.FromSlash("foo/bar/main.js")) + + rel, found = b.H.BaseFs.Assets.MakePathRelative(filepath.FromSlash("/bar.txt")) + b.Assert(found, qt.Equals, true) + b.Assert(rel, qt.Equals, filepath.FromSlash("foo/baz.txt")) +} - c.Assert(afs.MkdirAll(filepath.Join(workDir, "dist", "d1"), 0777), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(workDir, "static", "d2"), 0777), qt.IsNil) - c.Assert(afs.MkdirAll(filepath.Join(workDir, "dust", "d2"), 0777), qt.IsNil) - - moduleCfg := map[string]any{ - "mounts": []any{ - map[string]any{ - "source": "dist", - "target": "static/mydist", - }, - map[string]any{ - "source": "dust", - "target": "static/foo/bar", - }, - map[string]any{ - "source": "static", - "target": "static", - }, +func TestAbsProjectContentDir(t *testing.T) { + tempDir := t.TempDir() + + files := ` +-- hugo.toml -- +[[module.mounts]] +source = "content" +target = "content" +-- content/foo.md -- +--- +title: "Foo" +--- +` + + b := hugolib.NewIntegrationTestBuilder( + hugolib.IntegrationTestConfig{ + T: t, + WorkingDir: tempDir, + TxtarString: files, }, - } - - v.Set("module", moduleCfg) - - conf := testconfig.GetTestConfig(afs, v) - fs := hugofs.NewFrom(afs, conf.BaseConfig()) + ).Build() + + abs1 := filepath.Join(tempDir, "content", "foo.md") + rel, abs2, err := b.H.BaseFs.AbsProjectContentDir("foo.md") + b.Assert(err, qt.IsNil) + b.Assert(abs2, qt.Equals, abs1) + b.Assert(rel, qt.Equals, filepath.FromSlash("foo.md")) + rel2, abs3, err := b.H.BaseFs.AbsProjectContentDir(abs1) + b.Assert(err, qt.IsNil) + b.Assert(abs3, qt.Equals, abs1) + b.Assert(rel2, qt.Equals, rel) +} - p, err := paths.New(fs, conf) - c.Assert(err, qt.IsNil) - bfs, err := filesystems.NewBase(p, nil) - c.Assert(err, qt.IsNil) +func TestContentReverseLookup(t *testing.T) { + files := ` +-- README.md -- +--- +title: README +--- +-- blog/b1.md -- +--- +title: b1 +--- +-- docs/d1.md -- +--- +title: d1 +--- +-- hugo.toml -- +baseURL = "https://example.com/" +[module] +[[module.mounts]] +source = "layouts" +target = "layouts" +[[module.mounts]] +source = "README.md" +target = "content/_index.md" +[[module.mounts]] +source = "blog" +target = "content/posts" +[[module.mounts]] +source = "docs" +target = "content/mydocs" +-- layouts/index.html -- +Home. + +` + b := hugolib.Test(t, files) + + b.AssertFileContent("public/index.html", "Home.") + + stat := func(path string) hugofs.FileMetaInfo { + ps, err := b.H.BaseFs.Content.ReverseLookup(filepath.FromSlash(path)) + b.Assert(err, qt.IsNil) + b.Assert(ps, qt.HasLen, 1) + first := ps[0] + fi, err := b.H.BaseFs.Content.Fs.Stat(filepath.FromSlash(first.Path)) + b.Assert(err, qt.IsNil) + b.Assert(fi, qt.Not(qt.IsNil)) + return fi.(hugofs.FileMetaInfo) + } - sfs := bfs.Static[""] - c.Assert(sfs, qt.Not(qt.IsNil)) + sfs := b.H.Fs.Source - makeRel := func(s string) string { - r, _ := sfs.MakePathRelative(s) - return r - } + _, err := sfs.Stat("blog/b1.md") + b.Assert(err, qt.Not(qt.IsNil)) - c.Assert(makeRel(filepath.Join(workDir, "dist", "d1", "foo.txt")), qt.Equals, filepath.FromSlash("mydist/d1/foo.txt")) - c.Assert(makeRel(filepath.Join(workDir, "static", "d2", "foo.txt")), qt.Equals, filepath.FromSlash("d2/foo.txt")) - c.Assert(makeRel(filepath.Join(workDir, "dust", "d3", "foo.txt")), qt.Equals, filepath.FromSlash("foo/bar/d3/foo.txt")) + _ = stat("blog/b1.md") } func checkFileCount(fs afero.Fs, dirname string, c *qt.C, expected int) { @@ -374,10 +504,7 @@ func countFilesAndGetFilenames(fs afero.Fs, dirname string) (int, []string, erro counter := 0 var filenames []string - wf := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } + wf := func(path string, info hugofs.FileMetaInfo) error { if !info.IsDir() { counter++ } @@ -403,9 +530,9 @@ func countFilesAndGetFilenames(fs afero.Fs, dirname string) (int, []string, erro func setConfigAndWriteSomeFilesTo(fs afero.Fs, v config.Provider, key, val string, num int) { workingDir := v.GetString("workingDir") v.Set(key, val) - fs.Mkdir(val, 0755) + fs.Mkdir(val, 0o755) for i := 0; i < num; i++ { filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1)) - afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0755) + afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0o755) } } diff --git a/hugolib/frontmatter_test.go b/hugolib/frontmatter_test.go index ade779c38..3a2080b0e 100644 --- a/hugolib/frontmatter_test.go +++ b/hugolib/frontmatter_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -43,5 +43,4 @@ Strings: {{ printf "%T" .Params.strings }} {{ range .Params.strings }}Strings: { b.AssertFileContent("public/post/one/index.html", "Ints: []interface {} Int: 1 (int)|Int: 2 (int)|Int: 3 (int)|") b.AssertFileContent("public/post/one/index.html", "Mixed: []interface {} Mixed: 1 (string)|Mixed: 2 (int)|Mixed: 3 (int)|") b.AssertFileContent("public/post/one/index.html", "Strings: []string Strings: 1 (string)|Strings: 2 (string)|Strings: 3 (string)|") - } diff --git a/hugolib/hugo_modules_test.go b/hugolib/hugo_modules_test.go index 44500cae5..b37cf0e78 100644 --- a/hugolib/hugo_modules_test.go +++ b/hugolib/hugo_modules_test.go @@ -28,8 +28,6 @@ import ( "github.com/spf13/afero" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" @@ -159,7 +157,7 @@ JS imported in module: | }`) b.Build(BuildCfg{}) - b.Assert(npm.Pack(b.H.BaseFs.SourceFs, b.H.BaseFs.Assets.Dirs), qt.IsNil) + b.Assert(npm.Pack(b.H.BaseFs.ProjectSourceFs, b.H.BaseFs.AssetsWithDuplicatesPreserved.Fs), qt.IsNil) b.AssertFileContentFn("package.json", func(s string) bool { return s == `{ @@ -218,7 +216,7 @@ JS imported in module: | b.WithSourceFile("package.json", origPackageJSON) b.Build(BuildCfg{}) - b.Assert(npm.Pack(b.H.BaseFs.SourceFs, b.H.BaseFs.Assets.Dirs), qt.IsNil) + b.Assert(npm.Pack(b.H.BaseFs.ProjectSourceFs, b.H.BaseFs.AssetsWithDuplicatesPreserved.Fs), qt.IsNil) b.AssertFileContentFn("package.json", func(s string) bool { return s == `{ @@ -265,7 +263,7 @@ JS imported in module: | b := newTestBuilder(t, "") b.Build(BuildCfg{}) - b.Assert(npm.Pack(b.H.BaseFs.SourceFs, b.H.BaseFs.Assets.Dirs), qt.IsNil) + b.Assert(npm.Pack(b.H.BaseFs.ProjectSourceFs, b.H.BaseFs.AssetsWithDuplicatesPreserved.Fs), qt.IsNil) b.AssertFileContentFn("package.json", func(s string) bool { return s == `{ @@ -656,139 +654,6 @@ min_version = 0.55.0 c.Assert(logger.LoggCount(logg.LevelWarn), qt.Equals, 3) } -func TestModulesSymlinks(t *testing.T) { - skipSymlink(t) - - wd, _ := os.Getwd() - defer func() { - os.Chdir(wd) - }() - - c := qt.New(t) - workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mod-sym") - c.Assert(err, qt.IsNil) - - // We need to use the OS fs for this. - cfg := config.New() - cfg.Set("workingDir", workingDir) - cfg.Set("publishDir", "public") - fs := hugofs.NewFromOld(hugofs.Os, cfg) - - defer clean() - - const homeTemplate = ` -Data: {{ .Site.Data }} -` - - createDirsAndFiles := func(baseDir string) { - for _, dir := range files.ComponentFolders { - realDir := filepath.Join(baseDir, dir, "real") - c.Assert(os.MkdirAll(realDir, 0o777), qt.IsNil) - c.Assert(afero.WriteFile(fs.Source, filepath.Join(realDir, "data.toml"), []byte("[hello]\nother = \"hello\""), 0o777), qt.IsNil) - } - - c.Assert(afero.WriteFile(fs.Source, filepath.Join(baseDir, "layouts", "index.html"), []byte(homeTemplate), 0o777), qt.IsNil) - } - - // Create project dirs and files. - createDirsAndFiles(workingDir) - // Create one module inside the default themes folder. - themeDir := filepath.Join(workingDir, "themes", "mymod") - createDirsAndFiles(themeDir) - - createSymlinks := func(baseDir, id string) { - for _, dir := range files.ComponentFolders { - // Issue #9119: private use language tags cannot exceed 8 characters. - if dir != "i18n" { - c.Assert(os.Chdir(filepath.Join(baseDir, dir)), qt.IsNil) - c.Assert(os.Symlink("real", fmt.Sprintf("realsym%s", id)), qt.IsNil) - c.Assert(os.Chdir(filepath.Join(baseDir, dir, "real")), qt.IsNil) - c.Assert(os.Symlink("data.toml", fmt.Sprintf(filepath.FromSlash("datasym%s.toml"), id)), qt.IsNil) - } - } - } - - createSymlinks(workingDir, "project") - createSymlinks(themeDir, "mod") - - config := ` -baseURL = "https://example.com" -theme="mymod" -defaultContentLanguage="nn" -defaultContentLanguageInSubDir=true - -[languages] -[languages.nn] -weight = 1 -[languages.en] -weight = 2 - - -` - - b := newTestSitesBuilder(t).WithNothingAdded().WithWorkingDir(workingDir) - b.WithLogger(loggers.NewDefault()) - b.Fs = fs - - b.WithConfigFile("toml", config) - c.Assert(os.Chdir(workingDir), qt.IsNil) - - b.Build(BuildCfg{}) - - b.AssertFileContentFn(filepath.Join("public", "en", "index.html"), func(s string) bool { - // Symbolic links only followed in project. There should be WARNING logs. - return !strings.Contains(s, "symmod") && strings.Contains(s, "symproject") - }) - - bfs := b.H.BaseFs - - for i, componentFs := range []afero.Fs{ - bfs.Static[""].Fs, - bfs.Archetypes.Fs, - bfs.Content.Fs, - bfs.Data.Fs, - bfs.Assets.Fs, - bfs.I18n.Fs, - } { - - if i != 0 { - continue - } - - for j, id := range []string{"mod", "project"} { - - statCheck := func(fs afero.Fs, filename string, isDir bool) { - shouldFail := j == 0 - if !shouldFail && i == 0 { - // Static dirs only supports symlinks for files - shouldFail = isDir - } - - _, err := fs.Stat(filepath.FromSlash(filename)) - if err != nil { - if i > 0 && strings.HasSuffix(filename, "toml") && strings.Contains(err.Error(), "files not supported") { - // OK - return - } - } - - if shouldFail { - c.Assert(err, qt.Not(qt.IsNil)) - c.Assert(err, qt.Equals, hugofs.ErrPermissionSymlink) - } else { - c.Assert(err, qt.IsNil) - } - } - - c.Logf("Checking %d:%d %q", i, j, id) - - statCheck(componentFs, fmt.Sprintf("realsym%s", id), true) - statCheck(componentFs, fmt.Sprintf("real/datasym%s.toml", id), false) - - } - } -} - func TestMountsProject(t *testing.T) { t.Parallel() @@ -820,248 +685,21 @@ title: "My Page" // https://github.com/gohugoio/hugo/issues/6684 func TestMountsContentFile(t *testing.T) { - t.Parallel() - c := qt.New(t) - workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-modules-content-file") - c.Assert(err, qt.IsNil) - defer clean() - - configTemplate := ` -baseURL = "https://example.com" -title = "My Modular Site" -workingDir = %q - + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT", "page", "section"] +disableLiveReload = true [module] - [[module.mounts]] - source = "README.md" - target = "content/_index.md" - [[module.mounts]] - source = "mycontent" - target = "content/blog" - -` - - tomlConfig := fmt.Sprintf(configTemplate, workingDir) - - b := newTestSitesBuilder(t).Running() - - cfg := config.New() - cfg.Set("workingDir", workingDir) - cfg.Set("publishDir", "public") - - b.Fs = hugofs.NewDefault(cfg) - - b.WithWorkingDir(workingDir).WithConfigFile("toml", tomlConfig) - b.WithTemplatesAdded("index.html", ` -{{ .Title }} -{{ .Content }} - -{{ $readme := .Site.GetPage "/README.md" }} -{{ with $readme }}README: {{ .Title }}|Filename: {{ path.Join .File.Filename }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }} - - -{{ $mypage := .Site.GetPage "/blog/mypage.md" }} -{{ with $mypage }}MYPAGE: {{ .Title }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }} -{{ $mybundle := .Site.GetPage "/blog/mybundle" }} -{{ with $mybundle }}MYBUNDLE: {{ .Title }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }} - - -`, "_default/_markup/render-link.html", ` -{{ $link := .Destination }} -{{ $isRemote := strings.HasPrefix $link "http" }} -{{- if not $isRemote -}} -{{ $url := urls.Parse .Destination }} -{{ $fragment := "" }} -{{- with $url.Fragment }}{{ $fragment = printf "#%s" . }}{{ end -}} -{{- with .Page.GetPage $url.Path }}{{ $link = printf "%s%s" .Permalink $fragment }}{{ end }}{{ end -}} -<a href="{{ $link | safeURL }}"{{ with .Title}} title="{{ . }}"{{ end }}{{ if $isRemote }} target="_blank"{{ end }}>{{ .Text | safeHTML }}</a> -`) - - os.Mkdir(filepath.Join(workingDir, "mycontent"), 0o777) - os.Mkdir(filepath.Join(workingDir, "mycontent", "mybundle"), 0o777) - - b.WithSourceFile("README.md", `--- -title: "Readme Title" ---- - -Readme Content. -`, - filepath.Join("mycontent", "mypage.md"), ` ---- -title: "My Page" ---- - - -* [Relative Link From Page](mybundle) -* [Relative Link From Page, filename](mybundle/index.md) -* [Link using original path](/mycontent/mybundle/index.md) - - -`, filepath.Join("mycontent", "mybundle", "index.md"), ` ---- -title: "My Bundle" ---- - -* [Dot Relative Link From Bundle](../mypage.md) -* [Link using original path](/mycontent/mypage.md) -* [Link to Home](/) -* [Link to Home, README.md](/README.md) -* [Link to Home, _index.md](/_index.md) - -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -README: Readme Title -/README.md|Path: _index.md|FilePath: README.md -Readme Content. -MYPAGE: My Page|Path: blog/mypage.md|FilePath: mycontent/mypage.md| -MYBUNDLE: My Bundle|Path: blog/mybundle/index.md|FilePath: mycontent/mybundle/index.md| -`) - b.AssertFileContent("public/blog/mypage/index.html", ` -<a href="https://example.com/blog/mybundle/">Relative Link From Page</a> -<a href="https://example.com/blog/mybundle/">Relative Link From Page, filename</a> -<a href="https://example.com/blog/mybundle/">Link using original path</a> - -`) - b.AssertFileContent("public/blog/mybundle/index.html", ` -<a href="https://example.com/blog/mypage/">Dot Relative Link From Bundle</a> -<a href="https://example.com/blog/mypage/">Link using original path</a> -<a href="https://example.com/">Link to Home</a> -<a href="https://example.com/">Link to Home, README.md</a> -<a href="https://example.com/">Link to Home, _index.md</a> -`) - - b.EditFiles("README.md", `--- -title: "Readme Edit" ---- -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Readme Edit -`) -} - -func TestMountsPaths(t *testing.T) { - c := qt.New(t) - - type test struct { - b *sitesBuilder - clean func() - workingDir string - } - - prepare := func(c *qt.C, mounts string) test { - workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mounts-paths") - c.Assert(err, qt.IsNil) - - configTemplate := ` -baseURL = "https://example.com" -title = "My Modular Site" -workingDir = %q - -%s - -` - tomlConfig := fmt.Sprintf(configTemplate, workingDir, mounts) - tomlConfig = strings.Replace(tomlConfig, "WORKING_DIR", workingDir, -1) - - b := newTestSitesBuilder(c).Running() - - cfg := config.New() - cfg.Set("workingDir", workingDir) - cfg.Set("publishDir", "public") - b.Fs = hugofs.NewDefault(cfg) - - os.MkdirAll(filepath.Join(workingDir, "content", "blog"), 0o777) - - b.WithWorkingDir(workingDir).WithConfigFile("toml", tomlConfig) - - return test{ - b: b, - clean: clean, - workingDir: workingDir, - } - } - - c.Run("Default", func(c *qt.C) { - mounts := `` - - test := prepare(c, mounts) - b := test.b - defer test.clean() - - b.WithContent("blog/p1.md", `--- -title: P1 ----`) - - b.Build(BuildCfg{}) - - p := b.GetPage("blog/p1.md") - f := p.File().FileInfo().Meta() - b.Assert(filepath.ToSlash(f.Path), qt.Equals, "blog/p1.md") - b.Assert(filepath.ToSlash(f.PathFile()), qt.Equals, "content/blog/p1.md") - - b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(test.workingDir, "layouts", "_default", "single.html")), qt.Equals, filepath.FromSlash("_default/single.html")) - }) - - c.Run("Mounts", func(c *qt.C) { - absDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mounts-paths-abs") - c.Assert(err, qt.IsNil) - defer clean() - - mounts := `[module] - [[module.mounts]] - source = "README.md" - target = "content/_index.md" - [[module.mounts]] - source = "mycontent" - target = "content/blog" - [[module.mounts]] - source = "subdir/mypartials" - target = "layouts/partials" - [[module.mounts]] - source = %q - target = "layouts/shortcodes" +[[module.mounts]] +source = "README.md" +target = "content/_index.md" +-- README.md -- +# Hello World +-- layouts/index.html -- +Home: {{ .Title }}|{{ .Content }}| ` - mounts = fmt.Sprintf(mounts, filepath.Join(absDir, "/abs/myshortcodes")) - - test := prepare(c, mounts) - b := test.b - defer test.clean() - - subContentDir := filepath.Join(test.workingDir, "mycontent", "sub") - os.MkdirAll(subContentDir, 0o777) - myPartialsDir := filepath.Join(test.workingDir, "subdir", "mypartials") - os.MkdirAll(myPartialsDir, 0o777) - - absShortcodesDir := filepath.Join(absDir, "abs", "myshortcodes") - os.MkdirAll(absShortcodesDir, 0o777) - - b.WithSourceFile("README.md", "---\ntitle: Readme\n---") - b.WithSourceFile("mycontent/sub/p1.md", "---\ntitle: P1\n---") - - b.WithSourceFile(filepath.Join(absShortcodesDir, "myshort.html"), "MYSHORT") - b.WithSourceFile(filepath.Join(myPartialsDir, "mypartial.html"), "MYPARTIAL") - - b.Build(BuildCfg{}) - - p1_1 := b.GetPage("/blog/sub/p1.md") - p1_2 := b.GetPage("/mycontent/sub/p1.md") - b.Assert(p1_1, qt.Not(qt.IsNil)) - b.Assert(p1_2, qt.Equals, p1_1) - - f := p1_1.File().FileInfo().Meta() - b.Assert(filepath.ToSlash(f.Path), qt.Equals, "blog/sub/p1.md") - b.Assert(filepath.ToSlash(f.PathFile()), qt.Equals, "mycontent/sub/p1.md") - b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(myPartialsDir, "mypartial.html")), qt.Equals, filepath.FromSlash("partials/mypartial.html")) - b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(absShortcodesDir, "myshort.html")), qt.Equals, filepath.FromSlash("shortcodes/myshort.html")) - b.Assert(b.H.BaseFs.Content.Path(filepath.Join(subContentDir, "p1.md")), qt.Equals, filepath.FromSlash("blog/sub/p1.md")) - b.Assert(b.H.BaseFs.Content.Path(filepath.Join(test.workingDir, "README.md")), qt.Equals, filepath.FromSlash("_index.md")) - }) + b := Test(t, files) + b.AssertFileContent("public/index.html", "Home: |<h1 id=\"hello-world\">Hello World</h1>\n|") } // https://github.com/gohugoio/hugo/issues/6299 diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index f3f5c3eb2..80e754453 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,27 +17,25 @@ import ( "context" "fmt" "io" - "path/filepath" - "sort" "strings" "sync" "sync/atomic" "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/hugofs/glob" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/fsnotify/fsnotify" - "github.com/gohugoio/hugo/identity" - - radix "github.com/armon/go-radix" - "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/common/hugo" + "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/para" + "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/source" @@ -47,9 +45,7 @@ import ( "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/lazy" - "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/page/pagemeta" ) // HugoSites represents the sites to build. Each site represents a language. @@ -74,13 +70,19 @@ type HugoSites struct { // As loaded from the /data dirs data map[string]any - contentInit sync.Once - content *pageMaps + // Cache for page listings. + cachePages *dynacache.Partition[string, page.Pages] - postRenderInit sync.Once + // Before Hugo 0.122.0 we managed all translations in a map using a translationKey + // that could be overridden in front matter. + // Now the different page dimensions (e.g. language) are built-in to the page trees above. + // But we sill need to support the overridden translationKey, but that should + // be relatively rare and low volume. + translationKeyPages *maps.SliceCache[page.Page] - // Keeps track of bundle directories and symlinks to enable partial rebuilding. - ContentChanges *contentChangeMap + pageTrees *pageTrees + + postRenderInit sync.Once // File change events with filename stored in this map will be skipped. skipRebuildForFilenamesMu sync.Mutex @@ -88,11 +90,12 @@ type HugoSites struct { init *hugoSitesInit - workers *para.Workers - numWorkers int + workersSite *para.Workers + numWorkersSites int + numWorkers int *fatalErrorHandler - *testCounters + *buildCounters } // ShouldSkipFileChangeEvent allows skipping filesystem event early before @@ -103,31 +106,17 @@ func (h *HugoSites) ShouldSkipFileChangeEvent(ev fsnotify.Event) bool { return h.skipRebuildForFilenames[ev.Name] } -func (h *HugoSites) getContentMaps() *pageMaps { - h.contentInit.Do(func() { - h.content = newPageMaps(h) - }) - return h.content -} - // Only used in tests. -type testCounters struct { - contentRenderCounter uint64 - pageRenderCounter uint64 +type buildCounters struct { + contentRenderCounter atomic.Uint64 + pageRenderCounter atomic.Uint64 } -func (h *testCounters) IncrContentRender() { - if h == nil { - return +func (c *buildCounters) loggFields() logg.Fields { + return logg.Fields{ + {Name: "pages", Value: c.pageRenderCounter.Load()}, + {Name: "content", Value: c.contentRenderCounter.Load()}, } - atomic.AddUint64(&h.contentRenderCounter, 1) -} - -func (h *testCounters) IncrPageRender() { - if h == nil { - return - } - atomic.AddUint64(&h.pageRenderCounter, 1) } type fatalErrorHandler struct { @@ -172,16 +161,6 @@ type hugoSitesInit struct { // Loads the Git info and CODEOWNERS for all the pages if enabled. gitInfo *lazy.Init - - // Maps page translations. - translations *lazy.Init -} - -func (h *hugoSitesInit) Reset() { - h.data.Reset() - h.layouts.Reset() - h.gitInfo.Reset() - h.translations.Reset() } func (h *HugoSites) Data() map[string]any { @@ -192,6 +171,41 @@ func (h *HugoSites) Data() map[string]any { return h.data } +// Pages returns all pages for all sites. +func (h *HugoSites) Pages() page.Pages { + key := "pages" + v, err := h.cachePages.GetOrCreate(key, func(string) (page.Pages, error) { + var pages page.Pages + for _, s := range h.Sites { + pages = append(pages, s.Pages()...) + } + page.SortByDefault(pages) + return pages, nil + }) + if err != nil { + panic(err) + } + return v +} + +// Pages returns all regularpages for all sites. +func (h *HugoSites) RegularPages() page.Pages { + key := "regular-pages" + v, err := h.cachePages.GetOrCreate(key, func(string) (page.Pages, error) { + var pages page.Pages + for _, s := range h.Sites { + pages = append(pages, s.RegularPages()...) + } + page.SortByDefault(pages) + + return pages, nil + }) + if err != nil { + panic(err) + } + return v +} + func (h *HugoSites) gitInfoForPage(p page.Page) (source.GitInfo, error) { if _, err := h.init.gitInfo.Do(context.Background()); err != nil { return source.GitInfo{}, err @@ -283,16 +297,24 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) { func (h *HugoSites) GetContentPage(filename string) page.Page { var p page.Page - h.getContentMaps().walkBundles(func(b *contentNode) bool { - if b.p == nil || b.fi == nil { + h.withPage(func(s string, p2 *pageState) bool { + if p2.File() == nil { return false } - if b.fi.Meta().Filename == filename { - p = b.p + if p2.File().FileInfo().Meta().Filename == filename { + p = p2 return true } + for _, r := range p2.Resources().ByType(pageResourceType) { + p3 := r.(page.Page) + if p3.File() != nil && p3.File().FileInfo().Meta().Filename == filename { + p = p3 + return true + } + } + return false }) @@ -320,20 +342,10 @@ func (h *HugoSites) loadGitInfo() error { // Reset resets the sites and template caches etc., making it ready for a full rebuild. func (h *HugoSites) reset(config *BuildCfg) { - if config.ResetState { - for _, s := range h.Sites { - if r, ok := s.Fs.PublishDir.(hugofs.Reseter); ok { - r.Reset() - } - } - } - h.fatalErrorHandler = &fatalErrorHandler{ h: h, donec: make(chan bool), } - - h.init.Reset() } // resetLogs resets the log counters etc. Used to do a new build on the same sites. @@ -345,43 +357,42 @@ func (h *HugoSites) resetLogs() { } func (h *HugoSites) withSite(fn func(s *Site) error) error { - if h.workers == nil { - for _, s := range h.Sites { - if err := fn(s); err != nil { - return err - } + for _, s := range h.Sites { + if err := fn(s); err != nil { + return err } - return nil } + return nil +} - g, _ := h.workers.Start(context.Background()) - for _, s := range h.Sites { - s := s - g.Run(func() error { - return fn(s) - }) - } - return g.Wait() +func (h *HugoSites) withPage(fn func(s string, p *pageState) bool) { + h.withSite(func(s *Site) error { + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + LockType: doctree.LockTypeRead, + Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + return fn(s, n.(*pageState)), nil + }, + } + return w.Walk(context.Background()) + }) } // BuildCfg holds build options used to, as an example, skip the render step. type BuildCfg struct { - // Reset site state before build. Use to force full rebuilds. - ResetState bool // Skip rendering. Useful for testing. SkipRender bool // Use this to indicate what changed (for rebuilds). whatChanged *whatChanged - // This is a partial re-render of some selected pages. This means - // we should skip most of the processing. + // This is a partial re-render of some selected pages. PartialReRender bool // Set in server mode when the last build failed for some reason. ErrRecovery bool // Recently visited URLs. This is used for partial re-rendering. - RecentlyVisited map[string]bool + RecentlyVisited *types.EvictingStringQueue // Can be set to build only with a sub set of the content source. ContentInclusionFilter *glob.FilenameFilter @@ -389,174 +400,95 @@ type BuildCfg struct { // Set when the buildlock is already acquired (e.g. the archetype content builder). NoBuildLock bool - testCounters *testCounters + testCounters *buildCounters } -// shouldRender is used in the Fast Render Mode to determine if we need to re-render -// a Page: If it is recently visited (the home pages will always be in this set) or changed. -// Note that a page does not have to have a content page / file. -// For regular builds, this will always return true. -// TODO(bep) rename/work this. +// shouldRender returns whether this output format should be rendered or not. func (cfg *BuildCfg) shouldRender(p *pageState) bool { - if p == nil { - return false - } - - if p.forceRender { - return true - } - - if len(cfg.RecentlyVisited) == 0 { - return true - } - - if cfg.RecentlyVisited[p.RelPermalink()] { + if !p.renderOnce { return true } - if cfg.whatChanged != nil && !p.File().IsZero() { - return cfg.whatChanged.files[p.File().Filename()] - } - - return false -} - -func (h *HugoSites) renderCrossSitesSitemap() error { - if h.Conf.IsMultihost() || !(h.Conf.DefaultContentLanguageInSubdir() || h.Conf.IsMultiLingual()) { - return nil - } - - sitemapEnabled := false - for _, s := range h.Sites { - if s.conf.IsKindEnabled(kinds.KindSitemap) { - sitemapEnabled = true - break - } - } - - if !sitemapEnabled { - return nil - } + // The render state is incremented on render and reset when a related change is detected. + // Note that this is set per output format. + shouldRender := p.renderState == 0 - s := h.Sites[0] - // We don't have any page context to pass in here. - ctx := context.Background() - - templ := s.lookupLayouts("sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml") - return s.renderAndWriteXML(ctx, &s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex", - s.conf.Sitemap.Filename, h.Sites, templ) -} - -func (h *HugoSites) renderCrossSitesRobotsTXT() error { - if h.Configs.IsMultihost { - return nil - } - if !h.Configs.Base.EnableRobotsTXT { - return nil + if !shouldRender { + return false } - s := h.Sites[0] + fastRenderMode := cfg.RecentlyVisited.Len() > 0 - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.KindRobotsTXT, - urlPaths: pagemeta.URLPath{ - URL: "robots.txt", - }, - }, - output.RobotsTxtFormat) - if err != nil { - return err + if !fastRenderMode { + // Not in fast render mode or first time render. + return shouldRender } if !p.render { - return nil + // Not be to rendered for this output format. + return false } - templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt") - - return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", "robots.txt", p, templ) -} - -func (h *HugoSites) removePageByFilename(filename string) { - h.getContentMaps().withMaps(func(m *pageMap) error { - m.deleteBundleMatching(func(b *contentNode) bool { - if b.p == nil { - return false - } - - if b.fi == nil { - return false - } - - return b.fi.Meta().Filename == filename - }) - return nil - }) -} + if p.outputFormat().IsHTML { + // This is fast render mode and the output format is HTML, + // rerender if this page is one of the recently visited. + return cfg.RecentlyVisited.Contains(p.RelPermalink()) + } -func (h *HugoSites) createPageCollections() error { - allPages := newLazyPagesFactory(func() page.Pages { - var pages page.Pages - for _, s := range h.Sites { - pages = append(pages, s.Pages()...) + // In fast render mode, we want to avoid re-rendering the sitemaps etc. and + // other big listings whenever we e.g. change a content file, + // but we want partial renders of the recently visited pages to also include + // alternative formats of the same HTML page (e.g. RSS, JSON). + for _, po := range p.pageOutputs { + if po.render && po.f.IsHTML && cfg.RecentlyVisited.Contains(po.RelPermalink()) { + return true } - - page.SortByDefault(pages) - - return pages - }) - - allRegularPages := newLazyPagesFactory(func() page.Pages { - return h.findPagesByKindIn(kinds.KindPage, allPages.get()) - }) - - for _, s := range h.Sites { - s.PageCollections.allPages = allPages - s.PageCollections.allRegularPages = allRegularPages } - return nil + return false } func (s *Site) preparePagesForRender(isRenderingSite bool, idx int) error { var err error - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - if err = p.initOutputFormat(isRenderingSite, idx); err != nil { - return true + + initPage := func(p *pageState) error { + if err = p.shiftToOutputFormat(isRenderingSite, idx); err != nil { + return err } - return false - }) - return nil -} + return nil + } -// Pages returns all pages for all sites. -func (h *HugoSites) Pages() page.Pages { - return h.Sites[0].AllPages() + return s.pageMap.forEeachPageIncludingBundledPages(nil, + func(p *pageState) (bool, error) { + return false, initPage(p) + }, + ) } -func (h *HugoSites) loadData(fis []hugofs.FileMetaInfo) (err error) { - spec := source.NewSourceSpec(h.PathSpec, nil, nil) - +func (h *HugoSites) loadData() error { h.data = make(map[string]any) - for _, fi := range fis { - basePath := fi.Meta().Path - fileSystem := spec.NewFilesystemFromFileMetaInfo(fi) - files, err := fileSystem.Files() - if err != nil { - return err - } - for _, r := range files { - if err := h.handleDataFile(basePath, r); err != nil { - return err - } - } - } + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Fs: h.PathSpec.BaseFs.Data.Fs, + WalkFn: func(path string, fi hugofs.FileMetaInfo) error { + if fi.IsDir() { + return nil + } + pi := fi.Meta().PathInfo + if pi == nil { + panic("no path info") + } + return h.handleDataFile(source.NewFileInfo(fi)) + }, + }) - return + if err := w.Walk(); err != nil { + return err + } + return nil } -func (h *HugoSites) handleDataFile(basePath string, r source.File) error { +func (h *HugoSites) handleDataFile(r *source.File) error { var current map[string]any f, err := r.FileInfo().Meta().Open() @@ -567,8 +499,8 @@ func (h *HugoSites) handleDataFile(basePath string, r source.File) error { // Crawl in data tree to insert data current = h.data - dataPath := filepath.Join(basePath, r.Dir()) - keyParts := strings.Split(dataPath, helpers.FilePathSeparator) + dataPath := r.FileInfo().Meta().PathInfo.Dir()[1:] + keyParts := strings.Split(dataPath, "/") for _, key := range keyParts { if key != "" { @@ -635,17 +567,12 @@ func (h *HugoSites) handleDataFile(basePath string, r source.File) error { return nil } -func (h *HugoSites) errWithFileContext(err error, f source.File) error { - fim, ok := f.FileInfo().(hugofs.FileMetaInfo) - if !ok { - return err - } - realFilename := fim.Meta().Filename - - return herrors.NewFileErrorFromFile(err, realFilename, h.SourceSpec.Fs.Source, nil) +func (h *HugoSites) errWithFileContext(err error, f *source.File) error { + realFilename := f.FileInfo().Meta().Filename + return herrors.NewFileErrorFromFile(err, realFilename, h.Fs.Source, nil) } -func (h *HugoSites) readData(f source.File) (any, error) { +func (h *HugoSites) readData(f *source.File) (any, error) { file, err := f.FileInfo().Meta().Open() if err != nil { return nil, fmt.Errorf("readData: failed to open data file: %w", err) @@ -656,178 +583,3 @@ func (h *HugoSites) readData(f source.File) (any, error) { format := metadecoders.FormatFromString(f.Ext()) return metadecoders.Default.Unmarshal(content, format) } - -func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { - return h.Sites[0].findPagesByKindIn(kind, inPages) -} - -func (h *HugoSites) resetPageState() { - h.getContentMaps().walkBundles(func(n *contentNode) bool { - if n.p == nil { - return false - } - p := n.p - for _, po := range p.pageOutputs { - if po.cp == nil { - continue - } - po.cp.Reset() - } - - return false - }) -} - -func (h *HugoSites) resetPageStateFromEvents(idset identity.Identities) { - h.getContentMaps().walkBundles(func(n *contentNode) bool { - if n.p == nil { - return false - } - p := n.p - OUTPUTS: - for _, po := range p.pageOutputs { - if po.cp == nil { - continue - } - for id := range idset { - if po.cp.dependencyTracker.Search(id) != nil { - po.cp.Reset() - continue OUTPUTS - } - } - } - - if p.shortcodeState == nil { - return false - } - - for _, s := range p.shortcodeState.shortcodes { - for _, templ := range s.templs { - sid := templ.(identity.Manager) - for id := range idset { - if sid.Search(id) != nil { - for _, po := range p.pageOutputs { - if po.cp != nil { - po.cp.Reset() - } - } - return false - } - } - } - } - return false - }) -} - -// Used in partial reloading to determine if the change is in a bundle. -type contentChangeMap struct { - mu sync.RWMutex - - // Holds directories with leaf bundles. - leafBundles *radix.Tree - - // Holds directories with branch bundles. - branchBundles map[string]bool - - pathSpec *helpers.PathSpec - - // Hugo supports symlinked content (both directories and files). This - // can lead to situations where the same file can be referenced from several - // locations in /content -- which is really cool, but also means we have to - // go an extra mile to handle changes. - // This map is only used in watch mode. - // It maps either file to files or the real dir to a set of content directories - // where it is in use. - symContentMu sync.Mutex - symContent map[string]map[string]bool -} - -func (m *contentChangeMap) add(dirname string, tp bundleDirType) { - m.mu.Lock() - if !strings.HasSuffix(dirname, helpers.FilePathSeparator) { - dirname += helpers.FilePathSeparator - } - switch tp { - case bundleBranch: - m.branchBundles[dirname] = true - case bundleLeaf: - m.leafBundles.Insert(dirname, true) - default: - m.mu.Unlock() - panic("invalid bundle type") - } - m.mu.Unlock() -} - -func (m *contentChangeMap) resolveAndRemove(filename string) (string, bundleDirType) { - m.mu.RLock() - defer m.mu.RUnlock() - - // Bundles share resources, so we need to start from the virtual root. - relFilename := m.pathSpec.RelContentDir(filename) - dir, name := filepath.Split(relFilename) - if !strings.HasSuffix(dir, helpers.FilePathSeparator) { - dir += helpers.FilePathSeparator - } - - if _, found := m.branchBundles[dir]; found { - delete(m.branchBundles, dir) - return dir, bundleBranch - } - - if key, _, found := m.leafBundles.LongestPrefix(dir); found { - m.leafBundles.Delete(key) - dir = string(key) - return dir, bundleLeaf - } - - fileTp, isContent := classifyBundledFile(name) - if isContent && fileTp != bundleNot { - // A new bundle. - return dir, fileTp - } - - return dir, bundleNot -} - -func (m *contentChangeMap) addSymbolicLinkMapping(fim hugofs.FileMetaInfo) { - meta := fim.Meta() - if !meta.IsSymlink { - return - } - m.symContentMu.Lock() - - from, to := meta.Filename, meta.OriginalFilename - if fim.IsDir() { - if !strings.HasSuffix(from, helpers.FilePathSeparator) { - from += helpers.FilePathSeparator - } - } - - mm, found := m.symContent[from] - - if !found { - mm = make(map[string]bool) - m.symContent[from] = mm - } - mm[to] = true - m.symContentMu.Unlock() -} - -func (m *contentChangeMap) GetSymbolicLinkMappings(dir string) []string { - mm, found := m.symContent[dir] - if !found { - return nil - } - dirs := make([]string, len(mm)) - i := 0 - for dir := range mm { - dirs[i] = dir - i++ - } - - sort.Strings(dirs) - - return dirs -} diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index a2b87b5e7..257949334 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,38 +19,47 @@ import ( "encoding/json" "errors" "fmt" + "os" + "path" "path/filepath" "strings" "time" "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" + "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/hugofs/glob" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/publisher" + "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/para" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/resources/page/siteidentities" "github.com/gohugoio/hugo/resources/postpub" "github.com/spf13/afero" - "github.com/gohugoio/hugo/output" - "github.com/fsnotify/fsnotify" ) // Build builds all sites. If filesystem events are provided, // this is considered to be a potential partial rebuild. func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { - if h == nil { - return errors.New("cannot build nil *HugoSites") - } + infol := h.Log.InfoCommand("build") + defer loggers.TimeTrackf(infol, time.Now(), nil, "") if h.Deps == nil { - return errors.New("cannot build nil *Deps") + panic("must have deps") } if !config.NoBuildLock { @@ -67,8 +76,6 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { } }() - infol := h.Log.InfoCommand("build") - errCollector := h.StartErrorCollector() errs := make(chan error) @@ -91,14 +98,16 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { h.Metrics.Reset() } - h.testCounters = config.testCounters + h.buildCounters = config.testCounters + if h.buildCounters == nil { + h.buildCounters = &buildCounters{} + } // Need a pointer as this may be modified. conf := &config - if conf.whatChanged == nil { // Assume everything has changed - conf.whatChanged = &whatChanged{source: true} + conf.whatChanged = &whatChanged{contentChanged: true} } var prepareErr error @@ -124,11 +133,13 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { return nil } - if err := h.process(infol, conf, init, events...); err != nil { + ctx := context.Background() + + if err := h.process(ctx, infol, conf, init, events...); err != nil { return fmt.Errorf("process: %w", err) } - if err := h.assemble(infol, conf); err != nil { + if err := h.assemble(ctx, infol, conf); err != nil { return fmt.Errorf("assemble: %w", err) } @@ -190,16 +201,17 @@ func (h *HugoSites) initSites(config *BuildCfg) error { } func (h *HugoSites) initRebuild(config *BuildCfg) error { - if config.ResetState { - return errors.New("rebuild does not support 'ResetState'") - } - if !h.Configs.Base.Internal.Watch { return errors.New("rebuild called when not in watch mode") } + h.pageTrees.treePagesResources.WalkPrefixRaw("", func(key string, n contentNodeI) bool { + n.resetBuildState() + return false + }) + for _, s := range h.Sites { - s.resetBuildState(config.whatChanged.source) + s.resetBuildState(config.whatChanged.contentChanged) } h.reset(config) @@ -208,70 +220,102 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error { return nil } -func (h *HugoSites) process(l logg.LevelLogger, config *BuildCfg, init func(config *BuildCfg) error, events ...fsnotify.Event) error { - defer h.timeTrack(l, time.Now(), "process") - - // We should probably refactor the Site and pull up most of the logic from there to here, - // but that seems like a daunting task. - // So for now, if there are more than one site (language), - // we pre-process the first one, then configure all the sites based on that. - - firstSite := h.Sites[0] +// process prepares the Sites' sources for a full or partial rebuild. +// This will also parse the source and create all the Page objects. +func (h *HugoSites) process(ctx context.Context, l logg.LevelLogger, config *BuildCfg, init func(config *BuildCfg) error, events ...fsnotify.Event) error { + l = l.WithField("step", "process") + defer loggers.TimeTrackf(l, time.Now(), nil, "") if len(events) > 0 { // This is a rebuild - return firstSite.processPartial(config, init, events) + return h.processPartial(ctx, l, config, init, events) } - - return firstSite.process(*config) + return h.processFull(ctx, l, *config) } -func (h *HugoSites) assemble(l logg.LevelLogger, bcfg *BuildCfg) error { - defer h.timeTrack(l, time.Now(), "assemble") +// assemble creates missing sections, applies agregate values (e.g. dates, cascading params), +// removes disabled pages etc. +func (h *HugoSites) assemble(ctx context.Context, l logg.LevelLogger, bcfg *BuildCfg) error { + l = l.WithField("step", "assemble") + defer loggers.TimeTrackf(l, time.Now(), nil, "") - if !bcfg.whatChanged.source { + if !bcfg.whatChanged.contentChanged { return nil } - if err := h.getContentMaps().AssemblePages(); err != nil { - return err + h.translationKeyPages.Reset() + assemblers := make([]*sitePagesAssembler, len(h.Sites)) + // Changes detected during assembly (e.g. aggregate date changes) + assembleChanges := &whatChanged{ + identitySet: make(map[identity.Identity]bool), + } + for i, s := range h.Sites { + assemblers[i] = &sitePagesAssembler{ + Site: s, + watching: s.watching(), + incomingChanges: bcfg.whatChanged, + assembleChanges: assembleChanges, + ctx: ctx, + } } - if err := h.createPageCollections(); err != nil { + g, _ := h.workersSite.Start(ctx) + for _, s := range assemblers { + s := s + g.Run(func() error { + return s.assemblePagesStep1(ctx) + }) + } + if err := g.Wait(); err != nil { return err } - return nil -} + changes := assembleChanges.Changes() -func (h *HugoSites) timeTrack(l logg.LevelLogger, start time.Time, name string) { - elapsed := time.Since(start) - l.WithField("step", name).WithField("duration", elapsed).Logf("running") + // Changes from the assemble step (e.g. lastMod, cascase) needs a re-calculation + // of what needs to be re-built. + if len(changes) > 0 { + if err := h.resolveAndClearStateForIdentities(ctx, l, nil, changes); err != nil { + return err + } + } + h.renderFormats = output.Formats{} + for _, s := range h.Sites { + s.s.initRenderFormats() + h.renderFormats = append(h.renderFormats, s.renderFormats...) + } + + for _, s := range assemblers { + if err := s.assemblePagesStep2(); err != nil { + return err + } + } + + h.renderFormats = output.Formats{} + for _, s := range h.Sites { + h.renderFormats = append(h.renderFormats, s.renderFormats...) + } + + return nil } +// render renders the sites. func (h *HugoSites) render(l logg.LevelLogger, config *BuildCfg) error { - defer h.timeTrack(l, time.Now(), "render") + l = l.WithField("step", "render") + start := time.Now() + defer func() { + loggers.TimeTrackf(l, start, h.buildCounters.loggFields(), "") + }() + if _, err := h.init.layouts.Do(context.Background()); err != nil { return err } siteRenderContext := &siteRenderContext{cfg: config, multihost: h.Configs.IsMultihost} - if !config.PartialReRender { - h.renderFormats = output.Formats{} - h.withSite(func(s *Site) error { - s.initRenderFormats() - return nil - }) - - for _, s := range h.Sites { - h.renderFormats = append(h.renderFormats, s.renderFormats...) - } - } - i := 0 - for _, s := range h.Sites { + siteRenderContext.languageIdx = s.languagei h.currentSite = s for siteOutIdx, renderFormat := range s.renderFormats { siteRenderContext.outIdx = siteOutIdx @@ -293,6 +337,12 @@ func (h *HugoSites) render(l logg.LevelLogger, config *BuildCfg) error { } } if !config.SkipRender { + ll := l.WithField("substep", "pages"). + WithField("site", s.language.Lang). + WithField("outputFormat", renderFormat.Name) + + start := time.Now() + if config.PartialReRender { if err := s.renderPages(siteRenderContext); err != nil { return err @@ -302,24 +352,16 @@ func (h *HugoSites) render(l logg.LevelLogger, config *BuildCfg) error { return err } } + loggers.TimeTrackf(ll, start, nil, "") } } - - } - } - - if !config.SkipRender { - if err := h.renderCrossSitesSitemap(); err != nil { - return err - } - if err := h.renderCrossSitesRobotsTXT(); err != nil { - return err } } return nil } +// / postRenderOnce runs some post processing that only needs to be done once, e.g. printing of unused templates. func (h *HugoSites) postRenderOnce() error { h.postRenderInit.Do(func() { conf := h.Configs.Base @@ -347,8 +389,10 @@ func (h *HugoSites) postRenderOnce() error { return nil } +// postProcess runs the post processors, e.g. writing the hugo_stats.json file. func (h *HugoSites) postProcess(l logg.LevelLogger) error { - defer h.timeTrack(l, time.Now(), "postProcess") + l = l.WithField("step", "postProcess") + defer loggers.TimeTrackf(l, time.Now(), nil, "") // Make sure to write any build stats to disk first so it's available // to the post processors. @@ -360,35 +404,40 @@ func (h *HugoSites) postProcess(l logg.LevelLogger) error { // imports that resolves to the project or a module. // Write a jsconfig.json file to the project's /asset directory // to help JS IntelliSense in VS Code etc. - if !h.ResourceSpec.BuildConfig().NoJSConfigInAssets && h.BaseFs.Assets.Dirs != nil { - fi, err := h.BaseFs.Assets.Fs.Stat("") - if err != nil { - h.Log.Warnf("Failed to resolve jsconfig.json dir: %s", err) - } else { + if !h.ResourceSpec.BuildConfig().NoJSConfigInAssets { + handleJSConfig := func(fi os.FileInfo) { m := fi.(hugofs.FileMetaInfo).Meta() - assetsDir := m.SourceRoot - if strings.HasPrefix(assetsDir, h.Configs.LoadingInfo.BaseConfig.WorkingDir) { - if jsConfig := h.ResourceSpec.JSConfigBuilder.Build(assetsDir); jsConfig != nil { + if !m.IsProject { + return + } - b, err := json.MarshalIndent(jsConfig, "", " ") - if err != nil { - h.Log.Warnf("Failed to create jsconfig.json: %s", err) - } else { - filename := filepath.Join(assetsDir, "jsconfig.json") - if h.Configs.Base.Internal.Running { - h.skipRebuildForFilenamesMu.Lock() - h.skipRebuildForFilenames[filename] = true - h.skipRebuildForFilenamesMu.Unlock() - } - // Make sure it's written to the OS fs as this is used by - // editors. - if err := afero.WriteFile(hugofs.Os, filename, b, 0o666); err != nil { - h.Log.Warnf("Failed to write jsconfig.json: %s", err) - } + if jsConfig := h.ResourceSpec.JSConfigBuilder.Build(m.SourceRoot); jsConfig != nil { + b, err := json.MarshalIndent(jsConfig, "", " ") + if err != nil { + h.Log.Warnf("Failed to create jsconfig.json: %s", err) + } else { + filename := filepath.Join(m.SourceRoot, "jsconfig.json") + if h.Configs.Base.Internal.Running { + h.skipRebuildForFilenamesMu.Lock() + h.skipRebuildForFilenames[filename] = true + h.skipRebuildForFilenamesMu.Unlock() + } + // Make sure it's written to the OS fs as this is used by + // editors. + if err := afero.WriteFile(hugofs.Os, filename, b, 0o666); err != nil { + h.Log.Warnf("Failed to write jsconfig.json: %s", err) } } } + } + fi, err := h.BaseFs.Assets.Fs.Stat("") + if err != nil { + if !herrors.IsNotExist(err) { + h.Log.Warnf("Failed to resolve jsconfig.json dir: %s", err) + } + } else { + handleJSConfig(fi) } } @@ -466,10 +515,6 @@ func (h *HugoSites) postProcess(l logg.LevelLogger) error { return g.Wait() } -type publishStats struct { - CSSClasses string `json:"cssClasses"` -} - func (h *HugoSites) writeBuildStats() error { if h.ResourceSpec == nil { panic("h.ResourceSpec is nil") @@ -523,3 +568,383 @@ func (h *HugoSites) writeBuildStats() error { return nil } + +type pathChange struct { + // The path to the changed file. + p *paths.Path + + // If true, this is a delete operation (a delete or a rename). + delete bool + + // If true, this is a directory. + isDir bool +} + +// processPartial prepares the Sites' sources for a partial rebuild. +func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error { + h.Log.Trace(logg.StringFunc(func() string { + var sb strings.Builder + sb.WriteString("File events:\n") + for _, ev := range events { + sb.WriteString(ev.String()) + sb.WriteString("\n") + } + return sb.String() + })) + + events = h.fileEventsFilter(events) + events = h.fileEventsTranslate(events) + + logger := h.Log + + var ( + tmplAdded bool + tmplChanged bool + i18nChanged bool + contentChanged bool + ) + + changedPaths := struct { + changedFiles []*paths.Path + changedDirs []*paths.Path + deleted []*paths.Path + }{} + + removeDuplicatePaths := func(ps []*paths.Path) []*paths.Path { + seen := make(map[string]bool) + var filtered []*paths.Path + for _, p := range ps { + if !seen[p.Path()] { + seen[p.Path()] = true + filtered = append(filtered, p) + } + } + return filtered + } + + var ( + cacheBusters []func(string) bool + deletedDirs []string + addedContentPaths []*paths.Path + ) + + for _, ev := range events { + removed := false + added := false + + if ev.Op&fsnotify.Remove == fsnotify.Remove { + removed = true + } + + fi, statErr := h.Fs.Source.Stat(ev.Name) + + // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file + // Sometimes a rename operation means that file has been renamed other times it means + // it's been updated. + if ev.Op.Has(fsnotify.Rename) { + // If the file is still on disk, it's only been updated, if it's not, it's been moved + if statErr != nil { + removed = true + } + } + if ev.Op.Has(fsnotify.Create) { + added = true + } + + isChangedDir := statErr == nil && fi.IsDir() + + cpss := h.BaseFs.ResolvePaths(ev.Name, !removed) + pss := make([]*paths.Path, len(cpss)) + for i, cps := range cpss { + p := cps.Path + if removed && !paths.HasExt(p) { + // Assume this is a renamed/removed directory. + // For deletes, we walk up the tree to find the container (e.g. branch bundle), + // so we will catch this even if it is a file without extension. + // This avoids us walking up to the home page bundle for the common case + // of renaming root sections. + p = p + "/_index.md" + deletedDirs = append(deletedDirs, cps.Path) + } + + pss[i] = h.Configs.ContentPathParser.Parse(cps.Component, p) + if added && !isChangedDir && cps.Component == files.ComponentFolderContent { + addedContentPaths = append(addedContentPaths, pss[i]) + } + + // Compile cache buster. + np := glob.NormalizePath(path.Join(cps.Component, cps.Path)) + g, err := h.ResourceSpec.BuildConfig().MatchCacheBuster(h.Log, np) + if err == nil && g != nil { + cacheBusters = append(cacheBusters, g) + } + } + + if removed { + changedPaths.deleted = append(changedPaths.deleted, pss...) + } else if isChangedDir { + changedPaths.changedDirs = append(changedPaths.changedDirs, pss...) + } else { + changedPaths.changedFiles = append(changedPaths.changedFiles, pss...) + } + } + + var ( + addedOrChangedContent []pathChange + changes []identity.Identity + ) + + // Find the most specific identity possible. + handleChange := func(pathInfo *paths.Path, delete, isDir bool) { + switch pathInfo.Component() { + case files.ComponentFolderContent: + logger.Println("Source changed", pathInfo.Path()) + base := pathInfo.Base() + + if ids := h.pageTrees.collectIdentities(base); len(ids) > 0 { + changes = append(changes, ids...) + } + + contentChanged = true + + if config.RecentlyVisited != nil { + // Fast render mode. Adding them to the visited queue + // avoids rerendering them on navigation. + for _, id := range changes { + if p, ok := id.(page.Page); ok { + config.RecentlyVisited.Add(p.RelPermalink()) + } + } + } + + h.pageTrees.treeTaxonomyEntries.DeletePrefix("") + + if delete { + _, ok := h.pageTrees.treePages.LongestPrefixAll(pathInfo.Base()) + if ok { + h.pageTrees.treePages.DeleteAll(pathInfo.Base()) + if pathInfo.IsBundle() { + // Assume directory removed. + h.pageTrees.treePages.DeletePrefixAll(pathInfo.Base() + "/") + h.pageTrees.resourceTrees.DeletePrefixAll(pathInfo.Base() + "/") + } + } else { + h.pageTrees.resourceTrees.DeleteAll(pathInfo.Base()) + } + } + + addedOrChangedContent = append(addedOrChangedContent, pathChange{p: pathInfo, delete: delete, isDir: isDir}) + + case files.ComponentFolderLayouts: + tmplChanged = true + h.init.layouts.Reset() + templatePath := pathInfo.TrimLeadingSlash().PathNoLang() + if !h.Tmpl().HasTemplate(templatePath) { + tmplAdded = true + } + + if tmplAdded { + logger.Println("Template added", pathInfo.Path()) + // A new template may require a more coarse grained build. + base := pathInfo.Base() + if strings.Contains(base, "_markup") { + // It's hard to determine the exact change set of this, + // so be very coarse grained. + changes = append(changes, identity.GenghisKhan) + } + if strings.Contains(base, "shortcodes") { + changes = append(changes, identity.NewGlobIdentity(fmt.Sprintf("shortcodes/%s*", pathInfo.BaseNameNoIdentifier()))) + } else { + changes = append(changes, pathInfo) + } + } else { + logger.Println("Template changed", pathInfo.Path()) + if templ, found := h.Tmpl().GetIdentity(templatePath); found { + changes = append(changes, templ) + } else { + changes = append(changes, pathInfo) + } + } + case files.ComponentFolderAssets: + logger.Println("Asset changed", pathInfo.Path()) + r, _ := h.ResourceSpec.ResourceCache.Get(context.Background(), dynacache.CleanKey(pathInfo.Base())) + var hasID bool + identity.WalkIdentitiesShallow(r, func(level int, rid identity.Identity) bool { + hasID = true + changes = append(changes, rid) + return false + }) + if !hasID { + changes = append(changes, pathInfo) + } + case files.ComponentFolderData: + logger.Println("Data changed", pathInfo.Path()) + + // This should cover all usage of site.Data. + // Currently very coarse grained. + changes = append(changes, siteidentities.Data) + h.init.data.Reset() + case files.ComponentFolderI18n: + logger.Println("i18n changed", pathInfo.Path()) + i18nChanged = true + changes = append(changes, pathInfo) + default: + panic(fmt.Sprintf("unknown component: %q", pathInfo.Component())) + } + } + + changedPaths.deleted = removeDuplicatePaths(changedPaths.deleted) + changedPaths.changedFiles = removeDuplicatePaths(changedPaths.changedFiles) + + h.Log.Trace(logg.StringFunc(func() string { + var sb strings.Builder + sb.WriteString("Resolved paths:\n") + sb.WriteString("Deleted:\n") + for _, p := range changedPaths.deleted { + sb.WriteString("path: " + p.Path()) + sb.WriteString("\n") + } + sb.WriteString("Changed:\n") + for _, p := range changedPaths.changedFiles { + sb.WriteString("path: " + p.Path()) + sb.WriteString("\n") + } + return sb.String() + })) + + for _, deletedDir := range deletedDirs { + prefix := deletedDir + "/" + predicate := func(id identity.Identity) bool { + // This will effectively reset all pages below this dir. + return strings.HasPrefix(paths.AddLeadingSlash(id.IdentifierBase()), prefix) + } + // Test in both directions. + changes = append(changes, identity.NewPredicateIdentity( + // Is dependent. + predicate, + // Is dependency. + predicate, + ), + ) + } + + if len(addedContentPaths) > 0 { + // These content files are new and not in use anywhere. + // To make sure that these gets listed in any site.RegularPages ranges or similar + // we could invalidate everything, but first try to collect a sample set + // from the surrounding pages. + var surroundingIDs []identity.Identity + for _, p := range addedContentPaths { + if ids := h.pageTrees.collectIdentitiesSurrounding(p.Base(), 10); len(ids) > 0 { + surroundingIDs = append(surroundingIDs, ids...) + } + } + + if len(surroundingIDs) > 0 { + changes = append(changes, surroundingIDs...) + } else { + // No surrounding pages found, so invalidate everything. + changes = append(changes, identity.GenghisKhan) + } + } + + for _, deleted := range changedPaths.deleted { + handleChange(deleted, true, false) + } + + for _, id := range changedPaths.changedFiles { + handleChange(id, false, false) + } + + for _, id := range changedPaths.changedDirs { + handleChange(id, false, true) + } + + resourceFiles := h.fileEventsContentPaths(addedOrChangedContent) + + changed := &whatChanged{ + contentChanged: contentChanged, + identitySet: make(identity.Identities), + } + changed.Add(changes...) + + config.whatChanged = changed + + if err := init(config); err != nil { + return err + } + + var cacheBusterOr func(string) bool + if len(cacheBusters) > 0 { + cacheBusterOr = func(s string) bool { + for _, cb := range cacheBusters { + if cb(s) { + return true + } + } + return false + } + } + + // Removes duplicates. + changes = changed.identitySet.AsSlice() + + if err := h.resolveAndClearStateForIdentities(ctx, l, cacheBusterOr, changes); err != nil { + return err + } + + if tmplChanged || i18nChanged { + if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { + // TODO(bep) this could probably be optimized to somehow + // only load the changed templates and its dependencies, but that is non-trivial. + ll := l.WithField("substep", "rebuild templates") + var prototype *deps.Deps + for i, s := range h.Sites { + if err := s.Deps.Compile(prototype); err != nil { + return ll, err + } + if i == 0 { + prototype = s.Deps + } + } + return ll, nil + }); err != nil { + return err + } + } + + if resourceFiles != nil { + if err := h.processFiles(ctx, l, *config, resourceFiles...); err != nil { + return err + } + } + + return nil +} + +func (h *HugoSites) processFull(ctx context.Context, l logg.LevelLogger, config BuildCfg) (err error) { + if err = h.processFiles(ctx, l, config); err != nil { + err = fmt.Errorf("readAndProcessContent: %w", err) + return + } + return err +} + +func (s *HugoSites) processFiles(ctx context.Context, l logg.LevelLogger, buildConfig BuildCfg, filenames ...pathChange) error { + if s.Deps == nil { + panic("nil deps on site") + } + + sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs) + + // For inserts, we can pick an arbitrary pageMap. + pageMap := s.Sites[0].pageMap + + c := newPagesCollector(ctx, s.h, sourceSpec, s.Log, l, pageMap, filenames) + + if err := c.Collect(); err != nil { + return err + } + + return nil +} diff --git a/hugolib/hugo_sites_build_errors_test.go b/hugolib/hugo_sites_build_errors_test.go index e29fd060e..5a8b9f76f 100644 --- a/hugolib/hugo_sites_build_errors_test.go +++ b/hugolib/hugo_sites_build_errors_test.go @@ -7,8 +7,6 @@ import ( "strings" "testing" - "github.com/gohugoio/hugo/htesting" - qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/common/herrors" ) @@ -57,7 +55,6 @@ func TestSiteBuildErrors(t *testing.T) { assertCreateError func(a testSiteBuildErrorAsserter, err error) assertBuildError func(a testSiteBuildErrorAsserter, err error) }{ - { name: "Base template parse failed", fileType: base, @@ -139,7 +136,6 @@ func TestSiteBuildErrors(t *testing.T) { // Make sure that it contains both the content file and template a.assertErrorMessage(`"content/myyaml.md:7:10": failed to render shortcode "sc": failed to process shortcode: "layouts/shortcodes/sc.html:4:22": execute of template failed: template: shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate field Titles in type page.Page`, fe.Error()) a.c.Assert(fe.Position().LineNumber, qt.Equals, 7) - }, }, { @@ -312,7 +308,6 @@ Some content. } }) } - } // Issue 9852 @@ -345,7 +340,6 @@ minify = true b.Assert(fe.Error(), qt.Contains, "unexpected = in expression on line 2 and column 9") b.Assert(filepath.ToSlash(fe.Position().Filename), qt.Contains, "hugo-transform-error") b.Assert(os.Remove(fe.Position().Filename), qt.IsNil) - } func TestErrorNestedRender(t *testing.T) { @@ -394,7 +388,6 @@ line 4 b.Assert(errors[3].Position().LineNumber, qt.Equals, 3) b.Assert(errors[3].Position().ColumnNumber, qt.Equals, 6) b.Assert(errors[3].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "line 2", "123{{ .ThisDoesNotExist }}", "line 4"}) - } func TestErrorNestedShortcode(t *testing.T) { @@ -437,16 +430,15 @@ line 4 b.Assert(err, qt.IsNotNil) errors := herrors.UnwrapFileErrorsWithErrorContext(err) - b.Assert(errors, qt.HasLen, 3) - - b.Assert(errors[0].Position().LineNumber, qt.Equals, 6) - b.Assert(errors[0].Position().ColumnNumber, qt.Equals, 1) - b.Assert(errors[0].ErrorContext().ChromaLexer, qt.Equals, "md") - b.Assert(errors[0].Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:6:1": failed to render shortcode "hello": failed to process shortcode: "/layouts/shortcodes/hello.html:2:5":`)) - b.Assert(errors[0].ErrorContext().Lines, qt.DeepEquals, []string{"", "## Hello", "{{< hello >}}", ""}) - b.Assert(errors[1].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "12{{ partial \"foo.html\" . }}", "line 4", "line 5"}) - b.Assert(errors[2].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "line 2", "123{{ .ThisDoesNotExist }}", "line 4"}) + b.Assert(errors, qt.HasLen, 4) + b.Assert(errors[1].Position().LineNumber, qt.Equals, 6) + b.Assert(errors[1].Position().ColumnNumber, qt.Equals, 1) + b.Assert(errors[1].ErrorContext().ChromaLexer, qt.Equals, "md") + b.Assert(errors[1].Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:6:1": failed to render shortcode "hello": failed to process shortcode: "/layouts/shortcodes/hello.html:2:5":`)) + b.Assert(errors[1].ErrorContext().Lines, qt.DeepEquals, []string{"", "## Hello", "{{< hello >}}", ""}) + b.Assert(errors[2].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "12{{ partial \"foo.html\" . }}", "line 4", "line 5"}) + b.Assert(errors[3].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "line 2", "123{{ .ThisDoesNotExist }}", "line 4"}) } func TestErrorRenderHookHeading(t *testing.T) { @@ -483,9 +475,8 @@ line 5 b.Assert(err, qt.IsNotNil) errors := herrors.UnwrapFileErrorsWithErrorContext(err) - b.Assert(errors, qt.HasLen, 2) + b.Assert(errors, qt.HasLen, 3) b.Assert(errors[0].Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:1:1": "/layouts/_default/_markup/render-heading.html:2:5": execute of template failed`)) - } func TestErrorRenderHookCodeblock(t *testing.T) { @@ -527,10 +518,9 @@ line 5 b.Assert(err, qt.IsNotNil) errors := herrors.UnwrapFileErrorsWithErrorContext(err) - b.Assert(errors, qt.HasLen, 2) + b.Assert(errors, qt.HasLen, 3) first := errors[0] b.Assert(first.Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:7:1": "/layouts/_default/_markup/render-codeblock-foo.html:2:5": execute of template failed`)) - } func TestErrorInBaseTemplate(t *testing.T) { @@ -580,7 +570,6 @@ toc line 4 b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`render of "home" failed: "/layouts/baseof.html:4:6"`)) - }) t.Run("index template", func(t *testing.T) { @@ -595,7 +584,6 @@ toc line 4 b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`render of "home" failed: "/layouts/index.html:3:7"`)) - }) t.Run("partial from define", func(t *testing.T) { @@ -611,17 +599,11 @@ toc line 4 b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`render of "home" failed: "/layouts/index.html:7:8": execute of template failed`)) b.Assert(err.Error(), qt.Contains, `execute of template failed: template: partials/toc.html:2:8: executing "partials/toc.html"`) - }) - } // https://github.com/gohugoio/hugo/issues/5375 func TestSiteBuildTimeout(t *testing.T) { - if !htesting.IsCI() { - //defer leaktest.CheckTimeout(t, 10*time.Second)() - } - b := newTestSitesBuilder(t) b.WithConfigFile("toml", ` timeout = 5 diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index bdbf4270e..4c2bf452c 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -5,150 +5,44 @@ import ( "path/filepath" "strings" "testing" - "time" qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/htesting" "github.com/gohugoio/hugo/resources/kinds" - "github.com/fortytw2/leaktest" - "github.com/fsnotify/fsnotify" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/spf13/afero" ) func TestMultiSitesMainLangInRoot(t *testing.T) { - t.Parallel() - for _, b := range []bool{false} { - doTestMultiSitesMainLangInRoot(t, b) - } -} - -func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { - c := qt.New(t) - - siteConfig := map[string]any{ - "DefaultContentLanguage": "fr", - "DefaultContentLanguageInSubdir": defaultInSubDir, - } - - b := newMultiSiteTestBuilder(t, "toml", multiSiteTOMLConfigTemplate, siteConfig) - - pathMod := func(s string) string { - return s - } - - if !defaultInSubDir { - pathMod = func(s string) string { - return strings.Replace(s, "/fr/", "/", -1) - } - } - - b.CreateSites() - b.Build(BuildCfg{}) - - sites := b.H.Sites - c.Assert(len(sites), qt.Equals, 4) - - enSite := sites[0] - frSite := sites[1] - - c.Assert(enSite.LanguagePrefix(), qt.Equals, "/en") - - if defaultInSubDir { - c.Assert(frSite.LanguagePrefix(), qt.Equals, "/fr") - } else { - c.Assert(frSite.LanguagePrefix(), qt.Equals, "") - } - - c.Assert(enSite.PathSpec.RelURL("foo", true), qt.Equals, "/blog/en/foo") - - doc1en := enSite.RegularPages()[0] - doc1fr := frSite.RegularPages()[0] - - enPerm := doc1en.Permalink() - enRelPerm := doc1en.RelPermalink() - c.Assert(enPerm, qt.Equals, "http://example.com/blog/en/sect/doc1-slug/") - c.Assert(enRelPerm, qt.Equals, "/blog/en/sect/doc1-slug/") - - frPerm := doc1fr.Permalink() - frRelPerm := doc1fr.RelPermalink() - - b.AssertFileContent(pathMod("public/fr/sect/doc1/index.html"), "Single", "Bonjour") - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Hello") - - if defaultInSubDir { - c.Assert(frPerm, qt.Equals, "http://example.com/blog/fr/sect/doc1/") - c.Assert(frRelPerm, qt.Equals, "/blog/fr/sect/doc1/") - - // should have a redirect on top level. - b.AssertFileContent("public/index.html", `<meta http-equiv="refresh" content="0; url=http://example.com/blog/fr">`) - } else { - // Main language in root - c.Assert(frPerm, qt.Equals, "http://example.com/blog/sect/doc1/") - c.Assert(frRelPerm, qt.Equals, "/blog/sect/doc1/") + files := ` +-- hugo.toml -- +defaultContentLanguage = "fr" +defaultContentLanguageInSubdir = false +disableKinds = ["taxonomy", "term"] +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +-- content/sect/doc1.en.md -- +--- +title: doc1 en +--- +-- content/sect/doc1.fr.md -- +--- +title: doc1 fr +slug: doc1-fr +--- +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Lang }}|{{ .RelPermalink }}| - // should have redirect back to root - b.AssertFileContent("public/fr/index.html", `<meta http-equiv="refresh" content="0; url=http://example.com/blog">`) - } - b.AssertFileContent(pathMod("public/fr/index.html"), "Home", "Bonjour") - b.AssertFileContent("public/en/index.html", "Home", "Hello") - - // Check list pages - b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour") - b.AssertFileContent("public/en/sect/index.html", "List", "Hello") - b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour") - b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello") - - // Check sitemaps - // Sitemaps behaves different: In a multilanguage setup there will always be a index file and - // one sitemap in each lang folder. - b.AssertFileContent("public/sitemap.xml", - "<loc>http://example.com/blog/en/sitemap.xml</loc>", - "<loc>http://example.com/blog/fr/sitemap.xml</loc>") - - if defaultInSubDir { - b.AssertFileContent("public/fr/sitemap.xml", "<loc>http://example.com/blog/fr/</loc>") - } else { - b.AssertFileContent("public/fr/sitemap.xml", "<loc>http://example.com/blog/</loc>") - } - b.AssertFileContent("public/en/sitemap.xml", "<loc>http://example.com/blog/en/</loc>") - - // Check rss - b.AssertFileContent(pathMod("public/fr/index.xml"), pathMod(`<atom:link href="http://example.com/blog/fr/index.xml"`), - `rel="self" type="application/rss+xml"`) - b.AssertFileContent("public/en/index.xml", `<atom:link href="http://example.com/blog/en/index.xml"`) - b.AssertFileContent( - pathMod("public/fr/sect/index.xml"), - pathMod(`<atom:link href="http://example.com/blog/fr/sect/index.xml"`)) - b.AssertFileContent("public/en/sect/index.xml", `<atom:link href="http://example.com/blog/en/sect/index.xml"`) - b.AssertFileContent( - pathMod("public/fr/plaques/FRtag1/index.xml"), - pathMod(`<atom:link href="http://example.com/blog/fr/plaques/FRtag1/index.xml"`)) - b.AssertFileContent("public/en/tags/tag1/index.xml", `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`) - - // Check paginators - b.AssertFileContent(pathMod("public/fr/page/1/index.html"), pathMod(`refresh" content="0; url=http://example.com/blog/fr/"`)) - b.AssertFileContent("public/en/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/"`) - b.AssertFileContent(pathMod("public/fr/page/2/index.html"), "Home Page 2", "Bonjour", pathMod("http://example.com/blog/fr/")) - b.AssertFileContent("public/en/page/2/index.html", "Home Page 2", "Hello", "http://example.com/blog/en/") - b.AssertFileContent(pathMod("public/fr/sect/page/1/index.html"), pathMod(`refresh" content="0; url=http://example.com/blog/fr/sect/"`)) - b.AssertFileContent("public/en/sect/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/sect/"`) - b.AssertFileContent(pathMod("public/fr/sect/page/2/index.html"), "List Page 2", "Bonjour", pathMod("http://example.com/blog/fr/sect/")) - b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/sect/") - b.AssertFileContent( - pathMod("public/fr/plaques/FRtag1/page/1/index.html"), - pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/FRtag1/"`)) - b.AssertFileContent("public/en/tags/tag1/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`) - b.AssertFileContent( - pathMod("public/fr/plaques/FRtag1/page/2/index.html"), "List Page 2", "Bonjour", - pathMod("http://example.com/blog/fr/plaques/FRtag1/")) - b.AssertFileContent("public/en/tags/tag1/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/") - // nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian) - b.AssertFileContent("public/nn/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nn/"`) - b.AssertFileContent("public/nb/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nb/"`) +` + b := Test(t, files) + b.AssertFileContent("public/sect/doc1-fr/index.html", "Single: doc1 fr|fr|/sect/doc1-fr/|") + b.AssertFileContent("public/en/sect/doc1/index.html", "Single: doc1 en|en|/en/sect/doc1/|") } func TestMultiSitesWithTwoLanguages(t *testing.T) { @@ -182,12 +76,12 @@ p1 = "p1en" c.Assert(len(sites), qt.Equals, 2) nnSite := sites[0] - nnHome := nnSite.getPage(kinds.KindHome) + nnHome := nnSite.getPageOldVersion(kinds.KindHome) c.Assert(len(nnHome.AllTranslations()), qt.Equals, 2) c.Assert(len(nnHome.Translations()), qt.Equals, 1) c.Assert(nnHome.IsTranslated(), qt.Equals, true) - enHome := sites[1].getPage(kinds.KindHome) + enHome := sites[1].getPageOldVersion(kinds.KindHome) p1, err := enHome.Param("p1") c.Assert(err, qt.IsNil) @@ -198,403 +92,6 @@ p1 = "p1en" c.Assert(p1, qt.Equals, "p1nn") } -func TestMultiSitesBuild(t *testing.T) { - for _, config := range []struct { - content string - suffix string - }{ - {multiSiteTOMLConfigTemplate, "toml"}, - {multiSiteYAMLConfigTemplate, "yml"}, - {multiSiteJSONConfigTemplate, "json"}, - } { - config := config - t.Run(config.suffix, func(t *testing.T) { - t.Parallel() - doTestMultiSitesBuild(t, config.content, config.suffix) - }) - } -} - -func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { - c := qt.New(t) - - b := newMultiSiteTestBuilder(t, configSuffix, configTemplate, nil) - b.CreateSites() - - sites := b.H.Sites - c.Assert(len(sites), qt.Equals, 4) - - b.Build(BuildCfg{}) - - // Check site config - for _, s := range sites { - c.Assert(s.conf.DefaultContentLanguageInSubdir, qt.Equals, true) - c.Assert(s.conf.C.DisabledKinds, qt.Not(qt.IsNil)) - } - - gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md")) - c.Assert(gp1, qt.Not(qt.IsNil)) - c.Assert(gp1.Title(), qt.Equals, "doc1") - gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md")) - c.Assert(gp2, qt.IsNil) - - enSite := sites[0] - enSiteHome := enSite.getPage(kinds.KindHome) - c.Assert(enSiteHome.IsTranslated(), qt.Equals, true) - - c.Assert(enSite.language.Lang, qt.Equals, "en") - - c.Assert(len(enSite.RegularPages()), qt.Equals, 5) - - //dumpPages(enSite.AllPages()...) - - //c.Assert(len(enSite.AllPages()), qt.Equals, 32) - - // Check 404s - b.AssertFileContent("public/en/404.html", "404|en|404 Page not found") - b.AssertFileContent("public/fr/404.html", "404|fr|404 Page not found") - - // Check robots.txt - // the domain root is the public directory, so the robots.txt has to be created there and not in the language directories - b.AssertFileContent("public/robots.txt", "robots") - b.AssertFileDoesNotExist("public/en/robots.txt") - b.AssertFileDoesNotExist("public/nn/robots.txt") - - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Permalink: http://example.com/blog/en/sect/doc1-slug/") - b.AssertFileContent("public/en/sect/doc2/index.html", "Permalink: http://example.com/blog/en/sect/doc2/") - b.AssertFileContent("public/superbob/index.html", "Permalink: http://example.com/blog/superbob/") - - doc2 := enSite.RegularPages()[1] - doc3 := enSite.RegularPages()[2] - c.Assert(doc3, qt.Equals, doc2.Prev()) - doc1en := enSite.RegularPages()[0] - doc1fr := doc1en.Translations()[0] - b.AssertFileContent("public/fr/sect/doc1/index.html", "Permalink: http://example.com/blog/fr/sect/doc1/") - - c.Assert(doc1fr, qt.Equals, doc1en.Translations()[0]) - c.Assert(doc1en, qt.Equals, doc1fr.Translations()[0]) - c.Assert(doc1fr.Language().Lang, qt.Equals, "fr") - - doc4 := enSite.AllPages()[4] - c.Assert(len(doc4.Translations()), qt.Equals, 0) - - // Taxonomies and their URLs - c.Assert(len(enSite.Taxonomies()), qt.Equals, 1) - tags := enSite.Taxonomies()["tags"] - c.Assert(len(tags), qt.Equals, 2) - c.Assert(doc1en, qt.Equals, tags["tag1"][0].Page) - - frSite := sites[1] - c.Assert(frSite.language.Lang, qt.Equals, "fr") - c.Assert(len(frSite.RegularPages()), qt.Equals, 4) - c.Assert(frSite.home.Title(), qt.Equals, "Le Français") - c.Assert(len(frSite.AllPages()), qt.Equals, 32) - - for _, frenchPage := range frSite.RegularPages() { - p := frenchPage - c.Assert(p.Language().Lang, qt.Equals, "fr") - } - - // See https://github.com/gohugoio/hugo/issues/4285 - // Before Hugo 0.33 you had to be explicit with the content path to get the correct Page, which - // isn't ideal in a multilingual setup. You want a way to get the current language version if available. - // Now you can do lookups with translation base name to get that behaviour. - // Let us test all the regular page variants: - getPageDoc1En := enSite.getPage(kinds.KindPage, filepath.ToSlash(doc1en.File().Path())) - getPageDoc1EnBase := enSite.getPage(kinds.KindPage, "sect/doc1") - getPageDoc1Fr := frSite.getPage(kinds.KindPage, filepath.ToSlash(doc1fr.File().Path())) - getPageDoc1FrBase := frSite.getPage(kinds.KindPage, "sect/doc1") - c.Assert(getPageDoc1En, qt.Equals, doc1en) - c.Assert(getPageDoc1Fr, qt.Equals, doc1fr) - c.Assert(getPageDoc1EnBase, qt.Equals, doc1en) - c.Assert(getPageDoc1FrBase, qt.Equals, doc1fr) - - // Check redirect to main language, French - b.AssertFileContent("public/index.html", "0; url=http://example.com/blog/fr") - - // check home page content (including data files rendering) - b.AssertFileContent("public/en/index.html", "Default Home Page 1", "Hello", "Hugo Rocks!") - b.AssertFileContent("public/fr/index.html", "French Home Page 1", "Bonjour", "Hugo Rocks!") - - // check single page content - b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour", "LingoFrench") - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello", "LingoDefault") - - // Check node translations - homeEn := enSite.getPage(kinds.KindHome) - c.Assert(homeEn, qt.Not(qt.IsNil)) - c.Assert(len(homeEn.Translations()), qt.Equals, 3) - c.Assert(homeEn.Translations()[0].Language().Lang, qt.Equals, "fr") - c.Assert(homeEn.Translations()[1].Language().Lang, qt.Equals, "nn") - c.Assert(homeEn.Translations()[1].Title(), qt.Equals, "På nynorsk") - c.Assert(homeEn.Translations()[2].Language().Lang, qt.Equals, "nb") - c.Assert(homeEn.Translations()[2].Title(), qt.Equals, "På bokmål") - c.Assert(homeEn.Translations()[2].Language().LanguageName, qt.Equals, "Bokmål") - - sectFr := frSite.getPage(kinds.KindSection, "sect") - c.Assert(sectFr, qt.Not(qt.IsNil)) - - c.Assert(sectFr.Language().Lang, qt.Equals, "fr") - c.Assert(len(sectFr.Translations()), qt.Equals, 1) - c.Assert(sectFr.Translations()[0].Language().Lang, qt.Equals, "en") - c.Assert(sectFr.Translations()[0].Title(), qt.Equals, "Sects") - - nnSite := sites[2] - c.Assert(nnSite.language.Lang, qt.Equals, "nn") - taxNn := nnSite.getPage(kinds.KindTaxonomy, "lag") - c.Assert(taxNn, qt.Not(qt.IsNil)) - c.Assert(len(taxNn.Translations()), qt.Equals, 1) - c.Assert(taxNn.Translations()[0].Language().Lang, qt.Equals, "nb") - - taxTermNn := nnSite.getPage(kinds.KindTerm, "lag", "sogndal") - c.Assert(taxTermNn, qt.Not(qt.IsNil)) - c.Assert(nnSite.getPage(kinds.KindTerm, "LAG", "SOGNDAL"), qt.Equals, taxTermNn) - c.Assert(len(taxTermNn.Translations()), qt.Equals, 1) - c.Assert(taxTermNn.Translations()[0].Language().Lang, qt.Equals, "nb") - - // Check sitemap(s) - b.AssertFileContent("public/sitemap.xml", - "<loc>http://example.com/blog/en/sitemap.xml</loc>", - "<loc>http://example.com/blog/fr/sitemap.xml</loc>") - b.AssertFileContent("public/en/sitemap.xml", "http://example.com/blog/en/sect/doc2/") - b.AssertFileContent("public/fr/sitemap.xml", "http://example.com/blog/fr/sect/doc1/") - - // Check taxonomies - enTags := enSite.Taxonomies()["tags"] - frTags := frSite.Taxonomies()["plaques"] - c.Assert(len(enTags), qt.Equals, 2, qt.Commentf("Tags in en: %v", enTags)) - c.Assert(len(frTags), qt.Equals, 2, qt.Commentf("Tags in fr: %v", frTags)) - c.Assert(enTags["tag1"], qt.Not(qt.IsNil)) - c.Assert(frTags["FRtag1"], qt.Not(qt.IsNil)) - b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/") - - // en and nn have custom site menus - c.Assert(len(frSite.Menus()), qt.Equals, 0) - c.Assert(len(enSite.Menus()), qt.Equals, 1) - c.Assert(len(nnSite.Menus()), qt.Equals, 1) - - c.Assert(enSite.Menus()["main"].ByName()[0].Name, qt.Equals, "Home") - c.Assert(nnSite.Menus()["main"].ByName()[0].Name, qt.Equals, "Heim") - - // Issue #3108 - prevPage := enSite.RegularPages()[0].Prev() - c.Assert(prevPage, qt.Not(qt.IsNil)) - c.Assert(prevPage.Kind(), qt.Equals, kinds.KindPage) - - for { - if prevPage == nil { - break - } - c.Assert(prevPage.Kind(), qt.Equals, kinds.KindPage) - prevPage = prevPage.Prev() - } - - // Check bundles - b.AssertFileContent("public/fr/bundles/b1/index.html", "RelPermalink: /blog/fr/bundles/b1/|") - bundleFr := frSite.getPage(kinds.KindPage, "bundles/b1/index.md") - c.Assert(bundleFr, qt.Not(qt.IsNil)) - c.Assert(len(bundleFr.Resources()), qt.Equals, 1) - logoFr := bundleFr.Resources().GetMatch("logo*") - logoFrGet := bundleFr.Resources().Get("logo.png") - c.Assert(logoFrGet, qt.Equals, logoFr) - c.Assert(logoFr, qt.Not(qt.IsNil)) - b.AssertFileContent("public/fr/bundles/b1/index.html", "Resources: image/png: /blog/fr/bundles/b1/logo.png") - b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") - - bundleEn := enSite.getPage(kinds.KindPage, "bundles/b1/index.en.md") - c.Assert(bundleEn, qt.Not(qt.IsNil)) - b.AssertFileContent("public/en/bundles/b1/index.html", "RelPermalink: /blog/en/bundles/b1/|") - c.Assert(len(bundleEn.Resources()), qt.Equals, 1) - logoEn := bundleEn.Resources().GetMatch("logo*") - c.Assert(logoEn, qt.Not(qt.IsNil)) - b.AssertFileContent("public/en/bundles/b1/index.html", "Resources: image/png: /blog/en/bundles/b1/logo.png") - b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data") -} - -func TestMultiSitesRebuild(t *testing.T) { - // t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4 - // This leaktest seems to be a little bit shaky on Travis. - if !htesting.IsCI() { - defer leaktest.CheckTimeout(t, 10*time.Second)() - } - - c := qt.New(t) - - b := newMultiSiteTestDefaultBuilder(t).Running().CreateSites().Build(BuildCfg{}) - - sites := b.H.Sites - fs := b.Fs - - b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|", "\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>") - - enSite := sites[0] - frSite := sites[1] - - c.Assert(len(enSite.RegularPages()), qt.Equals, 5) - c.Assert(len(frSite.RegularPages()), qt.Equals, 4) - - // Verify translations - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello") - b.AssertFileContent("public/fr/sect/doc1/index.html", "Bonjour") - - // check single page content - b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour") - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello") - - homeEn := enSite.getPage(kinds.KindHome) - c.Assert(homeEn, qt.Not(qt.IsNil)) - c.Assert(len(homeEn.Translations()), qt.Equals, 3) - - contentFs := b.H.Fs.Source - - for i, this := range []struct { - preFunc func(t *testing.T) - events []fsnotify.Event - assertFunc func(t *testing.T) - }{ - // * Remove doc - // * Add docs existing languages - // (Add doc new language: TODO(bep) we should load config.toml as part of these so we can add languages). - // * Rename file - // * Change doc - // * Change a template - // * Change language file - { - func(t *testing.T) { - fs.Source.Remove("content/sect/doc2.en.md") - }, - []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}}, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 4, qt.Commentf("1 en removed")) - }, - }, - { - func(t *testing.T) { - writeNewContentFile(t, contentFs, "new_en_1", "2016-07-31", "content/new1.en.md", -5) - writeNewContentFile(t, contentFs, "new_en_2", "1989-07-30", "content/new2.en.md", -10) - writeNewContentFile(t, contentFs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10) - }, - []fsnotify.Event{ - {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create}, - {Name: filepath.FromSlash("content/new2.en.md"), Op: fsnotify.Create}, - {Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create}, - }, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 6) - c.Assert(len(enSite.AllPages()), qt.Equals, 34) - c.Assert(len(frSite.RegularPages()), qt.Equals, 5) - c.Assert(frSite.RegularPages()[3].Title(), qt.Equals, "new_fr_1") - c.Assert(enSite.RegularPages()[0].Title(), qt.Equals, "new_en_2") - c.Assert(enSite.RegularPages()[1].Title(), qt.Equals, "new_en_1") - - rendered := readWorkingDir(t, fs, "public/en/new1/index.html") - c.Assert(strings.Contains(rendered, "new_en_1"), qt.Equals, true) - }, - }, - { - func(t *testing.T) { - p := "content/sect/doc1.en.md" - doc1 := readFileFromFs(t, contentFs, p) - doc1 += "CHANGED" - writeToFs(t, contentFs, p, doc1) - }, - []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}}, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 6) - doc1 := readWorkingDir(t, fs, "public/en/sect/doc1-slug/index.html") - c.Assert(strings.Contains(doc1, "CHANGED"), qt.Equals, true) - }, - }, - // Rename a file - { - func(t *testing.T) { - if err := contentFs.Rename("content/new1.en.md", "content/new1renamed.en.md"); err != nil { - t.Fatalf("Rename failed: %s", err) - } - }, - []fsnotify.Event{ - {Name: filepath.FromSlash("content/new1renamed.en.md"), Op: fsnotify.Rename}, - {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename}, - }, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 6, qt.Commentf("Rename")) - c.Assert(enSite.RegularPages()[1].Title(), qt.Equals, "new_en_1") - rendered := readWorkingDir(t, fs, "public/en/new1renamed/index.html") - c.Assert(rendered, qt.Contains, "new_en_1") - }, - }, - { - // Change a template - func(t *testing.T) { - template := "layouts/_default/single.html" - templateContent := readSource(t, fs, template) - templateContent += "{{ print \"Template Changed\"}}" - writeSource(t, fs, template, templateContent) - }, - []fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}}, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 6) - c.Assert(len(enSite.AllPages()), qt.Equals, 34) - c.Assert(len(frSite.RegularPages()), qt.Equals, 5) - doc1 := readWorkingDir(t, fs, "public/en/sect/doc1-slug/index.html") - c.Assert(strings.Contains(doc1, "Template Changed"), qt.Equals, true) - }, - }, - { - // Change a language file - func(t *testing.T) { - languageFile := "i18n/fr.yaml" - langContent := readSource(t, fs, languageFile) - langContent = strings.Replace(langContent, "Bonjour", "Salut", 1) - writeSource(t, fs, languageFile, langContent) - }, - []fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}}, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 6) - c.Assert(len(enSite.AllPages()), qt.Equals, 34) - c.Assert(len(frSite.RegularPages()), qt.Equals, 5) - docEn := readWorkingDir(t, fs, "public/en/sect/doc1-slug/index.html") - c.Assert(strings.Contains(docEn, "Hello"), qt.Equals, true) - docFr := readWorkingDir(t, fs, "public/fr/sect/doc1/index.html") - c.Assert(strings.Contains(docFr, "Salut"), qt.Equals, true) - - homeEn := enSite.getPage(kinds.KindHome) - c.Assert(homeEn, qt.Not(qt.IsNil)) - c.Assert(len(homeEn.Translations()), qt.Equals, 3) - c.Assert(homeEn.Translations()[0].Language().Lang, qt.Equals, "fr") - }, - }, - // Change a shortcode - { - func(t *testing.T) { - writeSource(t, fs, "layouts/shortcodes/shortcode.html", "Modified Shortcode: {{ i18n \"hello\" }}") - }, - []fsnotify.Event{ - {Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write}, - }, - func(t *testing.T) { - c.Assert(len(enSite.RegularPages()), qt.Equals, 6) - c.Assert(len(enSite.AllPages()), qt.Equals, 34) - c.Assert(len(frSite.RegularPages()), qt.Equals, 5) - b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut") - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello") - }, - }, - } { - - if this.preFunc != nil { - this.preFunc(t) - } - - err := b.H.Build(BuildCfg{}, this.events...) - if err != nil { - t.Fatalf("[%d] Failed to rebuild sites: %s", i, err) - } - - this.assertFunc(t) - } -} - // https://github.com/gohugoio/hugo/issues/4706 func TestContentStressTest(t *testing.T) { b := newTestSitesBuilder(t) @@ -774,8 +271,8 @@ categories: ["mycat"] t.Run(path, func(t *testing.T) { c := qt.New(t) - s1, _ := b.H.Sites[0].getPageNew(nil, path) - s2, _ := b.H.Sites[1].getPageNew(nil, path) + s1, _ := b.H.Sites[0].getPage(nil, path) + s2, _ := b.H.Sites[1].getPage(nil, path) c.Assert(s1, qt.Not(qt.IsNil)) c.Assert(s2, qt.Not(qt.IsNil)) @@ -794,310 +291,6 @@ categories: ["mycat"] } } -var tocShortcode = ` -TOC1: {{ .Page.TableOfContents }} - -TOC2: {{ .Page.TableOfContents }} -` - -func TestSelfReferencedContentInShortcode(t *testing.T) { - t.Parallel() - - b := newMultiSiteTestDefaultBuilder(t) - - var ( - shortcode = `{{- .Page.Content -}}{{- .Page.Summary -}}{{- .Page.Plain -}}{{- .Page.PlainWords -}}{{- .Page.WordCount -}}{{- .Page.ReadingTime -}}` - - page = `--- -title: sctest ---- -Empty:{{< mycontent >}}: -` - ) - - b.WithTemplatesAdded("layouts/shortcodes/mycontent.html", shortcode) - b.WithContent("post/simple.en.md", page) - - b.CreateSites().Build(BuildCfg{}) - - b.AssertFileContent("public/en/post/simple/index.html", "Empty:[]00:") -} - -var tocPageSimple = `--- -title: tocTest -publishdate: "2000-01-01" ---- -{{< toc >}} -# Heading 1 {#1} -Some text. -## Subheading 1.1 {#1-1} -Some more text. -# Heading 2 {#2} -Even more text. -## Subheading 2.1 {#2-1} -Lorem ipsum... -` - -var tocPageVariants1 = `--- -title: tocTest -publishdate: "2000-01-01" ---- -Variant 1: -{{% wrapper %}} -{{< toc >}} -{{% /wrapper %}} -# Heading 1 - -Variant 3: -{{% toc %}} - -` - -var tocPageVariants2 = `--- -title: tocTest -publishdate: "2000-01-01" ---- -Variant 1: -{{% wrapper %}} -{{< toc >}} -{{% /wrapper %}} -# Heading 1 - -Variant 2: -{{< wrapper >}} -{{< toc >}} -{{< /wrapper >}} - -Variant 3: -{{% toc %}} - -` - -var tocPageSimpleExpected = `<nav id="TableOfContents"> -<ul> -<li><a href="#1">Heading 1</a> -<ul> -<li><a href="#1-1">Subheading 1.1</a></li> -</ul></li> -<li><a href="#2">Heading 2</a> -<ul> -<li><a href="#2-1">Subheading 2.1</a></li> -</ul></li> -</ul> -</nav>` - -var tocPageWithShortcodesInHeadings = `--- -title: tocTest -publishdate: "2000-01-01" ---- - -{{< toc >}} - -# Heading 1 {#1} - -Some text. - -## Subheading 1.1 {{< shortcode >}} {#1-1} - -Some more text. - -# Heading 2 {{% shortcode %}} {#2} - -Even more text. - -## Subheading 2.1 {#2-1} - -Lorem ipsum... -` - -var tocPageWithShortcodesInHeadingsExpected = `<nav id="TableOfContents"> -<ul> -<li><a href="#1">Heading 1</a> -<ul> -<li><a href="#1-1">Subheading 1.1 Shortcode: Hello</a></li> -</ul></li> -<li><a href="#2">Heading 2 Shortcode: Hello</a> -<ul> -<li><a href="#2-1">Subheading 2.1</a></li> -</ul></li> -</ul> -</nav>` - -var multiSiteTOMLConfigTemplate = ` -baseURL = "http://example.com/blog" - -paginate = 1 -disablePathToLower = true -defaultContentLanguage = "{{ .DefaultContentLanguage }}" -defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }} -enableRobotsTXT = true - -[permalinks] -other = "/somewhere/else/:filename" - -[Taxonomies] -tag = "tags" - -[Languages] -[Languages.en] -weight = 10 -title = "In English" -languageName = "English" -[[Languages.en.menu.main]] -url = "/" -name = "Home" -weight = 0 - -[Languages.fr] -weight = 20 -title = "Le Français" -languageName = "Français" -[Languages.fr.Taxonomies] -plaque = "plaques" - -[Languages.nn] -weight = 30 -title = "På nynorsk" -languageName = "Nynorsk" -paginatePath = "side" -[Languages.nn.Taxonomies] -lag = "lag" -[[Languages.nn.menu.main]] -url = "/" -name = "Heim" -weight = 1 - -[Languages.nb] -weight = 40 -title = "På bokmål" -languageName = "Bokmål" -paginatePath = "side" -[Languages.nb.Taxonomies] -lag = "lag" -` - -var multiSiteYAMLConfigTemplate = ` -baseURL: "http://example.com/blog" - -disablePathToLower: true -paginate: 1 -defaultContentLanguage: "{{ .DefaultContentLanguage }}" -defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }} -enableRobotsTXT: true - -permalinks: - other: "/somewhere/else/:filename" - -Taxonomies: - tag: "tags" - -Languages: - en: - weight: 10 - title: "In English" - languageName: "English" - menu: - main: - - url: "/" - name: "Home" - weight: 0 - fr: - weight: 20 - title: "Le Français" - languageName: "Français" - Taxonomies: - plaque: "plaques" - nn: - weight: 30 - title: "På nynorsk" - languageName: "Nynorsk" - paginatePath: "side" - Taxonomies: - lag: "lag" - menu: - main: - - url: "/" - name: "Heim" - weight: 1 - nb: - weight: 40 - title: "På bokmål" - languageName: "Bokmål" - paginatePath: "side" - Taxonomies: - lag: "lag" - -` - -// TODO(bep) clean move -var multiSiteJSONConfigTemplate = ` -{ - "baseURL": "http://example.com/blog", - "paginate": 1, - "disablePathToLower": true, - "defaultContentLanguage": "{{ .DefaultContentLanguage }}", - "defaultContentLanguageInSubdir": true, - "enableRobotsTXT": true, - "permalinks": { - "other": "/somewhere/else/:filename" - }, - "Taxonomies": { - "tag": "tags" - }, - "Languages": { - "en": { - "weight": 10, - "title": "In English", - "languageName": "English", - "menu": { - "main": [ - { - "url": "/", - "name": "Home", - "weight": 0 - } - ] - } - }, - "fr": { - "weight": 20, - "title": "Le Français", - "languageName": "Français", - "Taxonomies": { - "plaque": "plaques" - } - }, - "nn": { - "weight": 30, - "title": "På nynorsk", - "paginatePath": "side", - "languageName": "Nynorsk", - "Taxonomies": { - "lag": "lag" - }, - "menu": { - "main": [ - { - "url": "/", - "name": "Heim", - "weight": 1 - } - ] - } - }, - "nb": { - "weight": 40, - "title": "På bokmål", - "paginatePath": "side", - "languageName": "Bokmål", - "Taxonomies": { - "lag": "lag" - } - } - } -} -` - func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { t.Helper() writeToFs(t, fs.Source, filename, content) @@ -1105,7 +298,7 @@ func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { func writeToFs(t testing.TB, fs afero.Fs, filename, content string) { t.Helper() - if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil { + if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0o755); err != nil { t.Fatalf("Failed to write file: %s", err) } } @@ -1123,10 +316,6 @@ func workingDirExists(fs *hugofs.Fs, filename string) bool { return b } -func readSource(t *testing.T, fs *hugofs.Fs, filename string) string { - return readFileFromFs(t, fs.Source, filename) -} - func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string { t.Helper() filename = filepath.Clean(filename) @@ -1171,228 +360,6 @@ func newTestPage(title, date string, weight int) string { return fmt.Sprintf(testPageTemplate, title, date, weight, title) } -func writeNewContentFile(t *testing.T, fs afero.Fs, title, date, filename string, weight int) { - content := newTestPage(title, date, weight) - writeToFs(t, fs, filename, content) -} - -type multiSiteTestBuilder struct { - configData any - config string - configFormat string - - *sitesBuilder -} - -func newMultiSiteTestDefaultBuilder(t testing.TB) *multiSiteTestBuilder { - return newMultiSiteTestBuilder(t, "", "", nil) -} - -func (b *multiSiteTestBuilder) WithNewConfig(config string) *multiSiteTestBuilder { - b.WithConfigTemplate(b.configData, b.configFormat, config) - return b -} - -func (b *multiSiteTestBuilder) WithNewConfigData(data any) *multiSiteTestBuilder { - b.WithConfigTemplate(data, b.configFormat, b.config) - return b -} - -func newMultiSiteTestBuilder(t testing.TB, configFormat, config string, configData any) *multiSiteTestBuilder { - if configData == nil { - configData = map[string]any{ - "DefaultContentLanguage": "fr", - "DefaultContentLanguageInSubdir": true, - } - } - - if config == "" { - config = multiSiteTOMLConfigTemplate - } - - if configFormat == "" { - configFormat = "toml" - } - - b := newTestSitesBuilder(t).WithConfigTemplate(configData, configFormat, config) - b.WithContent("root.en.md", `--- -title: root -weight: 10000 -slug: root -publishdate: "2000-01-01" ---- -# root -`, - "sect/doc1.en.md", `--- -title: doc1 -weight: 1 -slug: doc1-slug -tags: - - tag1 -publishdate: "2000-01-01" ---- -# doc1 -*some "content"* - -{{< shortcode >}} - -{{< lingo >}} - -NOTE: slug should be used as URL -`, - "sect/doc1.fr.md", `--- -title: doc1 -weight: 1 -plaques: - - FRtag1 - - FRtag2 -publishdate: "2000-01-04" ---- -# doc1 -*quelque "contenu"* - -{{< shortcode >}} - -{{< lingo >}} - -NOTE: should be in the 'en' Page's 'Translations' field. -NOTE: date is after "doc3" -`, - "sect/doc2.en.md", `--- -title: doc2 -weight: 2 -publishdate: "2000-01-02" ---- -# doc2 -*some content* -NOTE: without slug, "doc2" should be used, without ".en" as URL -`, - "sect/doc3.en.md", `--- -title: doc3 -weight: 3 -publishdate: "2000-01-03" -aliases: [/en/al/alias1,/al/alias2/] -tags: - - tag2 - - tag1 -url: /superbob/ ---- -# doc3 -*some content* -NOTE: third 'en' doc, should trigger pagination on home page. -`, - "sect/doc4.md", `--- -title: doc4 -weight: 4 -plaques: - - FRtag1 -publishdate: "2000-01-05" ---- -# doc4 -*du contenu francophone* -NOTE: should use the defaultContentLanguage and mark this doc as 'fr'. -NOTE: doesn't have any corresponding translation in 'en' -`, - "other/doc5.fr.md", `--- -title: doc5 -weight: 5 -publishdate: "2000-01-06" ---- -# doc5 -*autre contenu francophone* -NOTE: should use the "permalinks" configuration with :filename -`, - // Add some for the stats - "stats/expired.fr.md", `--- -title: expired -publishdate: "2000-01-06" -expiryDate: "2001-01-06" ---- -# Expired -`, - "stats/future.fr.md", `--- -title: future -weight: 6 -publishdate: "2100-01-06" ---- -# Future -`, - "stats/expired.en.md", `--- -title: expired -weight: 7 -publishdate: "2000-01-06" -expiryDate: "2001-01-06" ---- -# Expired -`, - "stats/future.en.md", `--- -title: future -weight: 6 -publishdate: "2100-01-06" ---- -# Future -`, - "stats/draft.en.md", `--- -title: expired -publishdate: "2000-01-06" -draft: true ---- -# Draft -`, - "stats/tax.nn.md", `--- -title: Tax NN -weight: 8 -publishdate: "2000-01-06" -weight: 1001 -lag: -- Sogndal ---- -# Tax NN -`, - "stats/tax.nb.md", `--- -title: Tax NB -weight: 8 -publishdate: "2000-01-06" -weight: 1002 -lag: -- Sogndal ---- -# Tax NB -`, - // Bundle - "bundles/b1/index.en.md", `--- -title: Bundle EN -publishdate: "2000-01-06" -weight: 2001 ---- -# Bundle Content EN -`, - "bundles/b1/index.md", `--- -title: Bundle Default -publishdate: "2000-01-06" -weight: 2002 ---- -# Bundle Content Default -`, - "bundles/b1/logo.png", ` -PNG Data -`) - - i18nContent := func(id, value string) string { - return fmt.Sprintf(` -[%s] -other = %q -`, id, value) - } - - b.WithSourceFile("i18n/en.toml", i18nContent("hello", "Hello")) - b.WithSourceFile("i18n/fr.toml", i18nContent("hello", "Bonjour")) - b.WithSourceFile("i18n/nb.toml", i18nContent("hello", "Hallo")) - b.WithSourceFile("i18n/nn.toml", i18nContent("hello", "Hallo")) - - return &multiSiteTestBuilder{sitesBuilder: b, configFormat: configFormat, config: config, configData: configData} -} - func TestRebuildOnAssetChange(t *testing.T) { b := newTestSitesBuilder(t).Running().WithLogger(loggers.NewDefault()) b.WithTemplatesAdded("index.html", ` diff --git a/hugolib/hugo_sites_multihost_test.go b/hugolib/hugo_sites_multihost_test.go index a37310987..a9aad67b2 100644 --- a/hugolib/hugo_sites_multihost_test.go +++ b/hugolib/hugo_sites_multihost_test.go @@ -3,19 +3,15 @@ package hugolib import ( "testing" - "github.com/gohugoio/hugo/resources/kinds" - qt "github.com/frankban/quicktest" ) -func TestMultihosts(t *testing.T) { +func TestMultihost(t *testing.T) { t.Parallel() - c := qt.New(t) - - configTemplate := ` + files := ` +-- hugo.toml -- paginate = 1 -disablePathToLower = true defaultContentLanguage = "fr" defaultContentLanguageInSubdir = false staticDir = ["s1", "s2"] @@ -24,98 +20,189 @@ enableRobotsTXT = true [permalinks] other = "/somewhere/else/:filename" -[Taxonomies] +[taxonomies] tag = "tags" -[Languages] -[Languages.en] -staticDir2 = ["ens1", "ens2"] +[languages] +[languages.en] +staticDir2 = ["staticen"] baseURL = "https://example.com/docs" weight = 10 title = "In English" languageName = "English" - -[Languages.fr] -staticDir2 = ["frs1", "frs2"] +[languages.fr] +staticDir2 = ["staticfr"] baseURL = "https://example.fr" weight = 20 title = "Le Français" languageName = "Français" +-- assets/css/main.css -- +body { color: red; } +-- content/mysect/mybundle/index.md -- +--- +tags: [a, b] +title: "My Bundle fr" +--- +My Bundle +-- content/mysect/mybundle/index.en.md -- +--- +tags: [c, d] +title: "My Bundle en" +--- +My Bundle +-- content/mysect/mybundle/foo.txt -- +Foo +-- layouts/_default/list.html -- +List|{{ .Title }}|{{ .Lang }}|{{ .Permalink}}|{{ .RelPermalink }}| +-- layouts/_default/single.html -- +Single|{{ .Title }}|{{ .Lang }}|{{ .Permalink}}|{{ .RelPermalink }}| +{{ $foo := .Resources.Get "foo.txt" | fingerprint }} +Foo: {{ $foo.Permalink }}| +{{ $css := resources.Get "css/main.css" | fingerprint }} +CSS: {{ $css.Permalink }}|{{ $css.RelPermalink }}| +-- layouts/robots.txt -- +robots|{{ site.Language.Lang }} +-- layouts/404.html -- +404|{{ site.Language.Lang }} + + + +` + + b := Test(t, files) + + b.Assert(b.H.Conf.IsMultiLingual(), qt.Equals, true) + b.Assert(b.H.Conf.IsMultihost(), qt.Equals, true) + + // helpers.PrintFs(b.H.Fs.PublishDir, "", os.Stdout) -[Languages.nn] -staticDir2 = ["nns1", "nns2"] -baseURL = "https://example.no" -weight = 30 -title = "På nynorsk" -languageName = "Nynorsk" + // Check regular pages. + b.AssertFileContent("public/en/mysect/mybundle/index.html", "Single|My Bundle en|en|https://example.com/docs/mysect/mybundle/|") + b.AssertFileContent("public/fr/mysect/mybundle/index.html", "Single|My Bundle fr|fr|https://example.fr/mysect/mybundle/|") + + // Check robots.txt + b.AssertFileContent("public/en/robots.txt", "robots|en") + b.AssertFileContent("public/fr/robots.txt", "robots|fr") + + // Check sitemap.xml + b.AssertFileContent("public/en/sitemap.xml", "https://example.com/docs/mysect/mybundle/") + b.AssertFileContent("public/fr/sitemap.xml", "https://example.fr/mysect/mybundle/") + + // Check 404 + b.AssertFileContent("public/en/404.html", "404|en") + b.AssertFileContent("public/fr/404.html", "404|fr") + + // Check tags. + b.AssertFileContent("public/en/tags/d/index.html", "List|D|en|https://example.com/docs/tags/d/") + b.AssertFileContent("public/fr/tags/b/index.html", "List|B|fr|https://example.fr/tags/b/") + b.AssertFileExists("public/en/tags/b/index.html", false) + b.AssertFileExists("public/fr/tags/d/index.html", false) + + // en/mysect/mybundle/foo.txt fingerprinted + b.AssertFileContent("public/en/mysect/mybundle/foo.1cbec737f863e4922cee63cc2ebbfaafcd1cff8b790d8cfd2e6a5d550b648afa.txt", "Foo") + b.AssertFileContent("public/en/mysect/mybundle/index.html", "Foo: https://example.com/docs/mysect/mybundle/foo.1cbec737f863e4922cee63cc2ebbfaafcd1cff8b790d8cfd2e6a5d550b648afa.txt|") + b.AssertFileContent("public/fr/mysect/mybundle/foo.1cbec737f863e4922cee63cc2ebbfaafcd1cff8b790d8cfd2e6a5d550b648afa.txt", "Foo") + b.AssertFileContent("public/fr/mysect/mybundle/index.html", "Foo: https://example.fr/mysect/mybundle/foo.1cbec737f863e4922cee63cc2ebbfaafcd1cff8b790d8cfd2e6a5d550b648afa.txt|") + + // Assets CSS fingerprinted + b.AssertFileContent("public/en/mysect/mybundle/index.html", "CSS: https://example.fr/css/main.5de625c36355cce7c1d5408826a0b21abfb49fb6c0e1f16c945a6f2aef38200c.css|") + b.AssertFileContent("public/en/css/main.5de625c36355cce7c1d5408826a0b21abfb49fb6c0e1f16c945a6f2aef38200c.css", "body { color: red; }") + b.AssertFileContent("public/fr/mysect/mybundle/index.html", "CSS: https://example.fr/css/main.5de625c36355cce7c1d5408826a0b21abfb49fb6c0e1f16c945a6f2aef38200c.css|") + b.AssertFileContent("public/fr/css/main.5de625c36355cce7c1d5408826a0b21abfb49fb6c0e1f16c945a6f2aef38200c.css", "body { color: red; }") +} + +func TestMultihostResourcePerLanguageMultihostMinify(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubDir = true +[languages] +[languages.en] +baseURL = "https://example.en" +weight = 1 +contentDir = "content/en" +[languages.fr] +baseURL = "https://example.fr" +weight = 2 +contentDir = "content/fr" +-- content/en/section/mybundle/index.md -- +--- +title: "Mybundle en" +--- +-- content/fr/section/mybundle/index.md -- +--- +title: "Mybundle fr" +--- +-- content/en/section/mybundle/styles.css -- +.body { + color: english; +} +-- content/fr/section/mybundle/styles.css -- +.body { + color: french; +} +-- layouts/_default/single.html -- +{{ $data := .Resources.GetMatch "styles*" | minify }} +{{ .Lang }}: {{ $data.Content}}|{{ $data.RelPermalink }}| ` + b := Test(t, files) + + b.AssertFileContent("public/fr/section/mybundle/index.html", + "fr: .body{color:french}|/section/mybundle/styles.min.css|", + ) + + b.AssertFileContent("public/en/section/mybundle/index.html", + "en: .body{color:english}|/section/mybundle/styles.min.css|", + ) - b := newMultiSiteTestDefaultBuilder(t).WithConfigFile("toml", configTemplate) - b.CreateSites().Build(BuildCfg{}) + b.AssertFileContent("public/en/section/mybundle/styles.min.css", ".body{color:english}") + b.AssertFileContent("public/fr/section/mybundle/styles.min.css", ".body{color:french}") +} - b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello") +func TestMultihostResourceOneBaseURLWithSuPath(t *testing.T) { + files := ` +-- hugo.toml -- +defaultContentLanguage = "en" +[languages] +[languages.en] +baseURL = "https://example.com/docs" +weight = 1 +contentDir = "content/en" +[languages.en.permalinks] +section = "/enpages/:slug/" +[languages.fr] +baseURL = "https://example.fr" +contentDir = "content/fr" +-- content/en/section/mybundle/index.md -- +--- +title: "Mybundle en" +--- +-- content/fr/section/mybundle/index.md -- +--- +title: "Mybundle fr" +--- +-- content/fr/section/mybundle/file1.txt -- +File 1 fr. +-- content/en/section/mybundle/file1.txt -- +File 1 en. +-- content/en/section/mybundle/file2.txt -- +File 2 en. +-- layouts/_default/single.html -- +{{ $files := .Resources.Match "file*" }} +Files: {{ range $files }}{{ .Permalink }}|{{ end }}$ - s1 := b.H.Sites[0] +` - s1h := s1.getPage(kinds.KindHome) - c.Assert(s1h.IsTranslated(), qt.Equals, true) - c.Assert(len(s1h.Translations()), qt.Equals, 2) - c.Assert(s1h.Permalink(), qt.Equals, "https://example.com/docs/") + b := Test(t, files) - // For “regular multilingual” we kept the aliases pages with url in front matter - // as a literal value that we use as is. - // There is an ambiguity in the guessing. - // For multihost, we never want any content in the root. - // - // check url in front matter: - pageWithURLInFrontMatter := s1.getPage(kinds.KindPage, "sect/doc3.en.md") - c.Assert(pageWithURLInFrontMatter, qt.Not(qt.IsNil)) - c.Assert(pageWithURLInFrontMatter.RelPermalink(), qt.Equals, "/docs/superbob/") - b.AssertFileContent("public/en/superbob/index.html", "doc3|Hello|en") + b.AssertFileContent("public/en/enpages/mybundle-en/index.html", "Files: https://example.com/docs/enpages/mybundle-en/file1.txt|https://example.com/docs/enpages/mybundle-en/file2.txt|$") + b.AssertFileContent("public/fr/section/mybundle/index.html", "Files: https://example.fr/section/mybundle/file1.txt|https://example.fr/section/mybundle/file2.txt|$") - // the domain root is the language directory for each language, so the robots.txt is created in the language directories - b.AssertFileContent("public/en/robots.txt", "robots|en") - b.AssertFileContent("public/fr/robots.txt", "robots|fr") - b.AssertFileContent("public/nn/robots.txt", "robots|nn") - b.AssertFileDoesNotExist("public/robots.txt") - - // check alias: - b.AssertFileContent("public/en/al/alias1/index.html", `content="0; url=https://example.com/docs/superbob/"`) - b.AssertFileContent("public/en/al/alias2/index.html", `content="0; url=https://example.com/docs/superbob/"`) - - s2 := b.H.Sites[1] - - s2h := s2.getPage(kinds.KindHome) - c.Assert(s2h.Permalink(), qt.Equals, "https://example.fr/") - - // See https://github.com/gohugoio/hugo/issues/10912 - b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /docs/text/pipes.txt") - b.AssertFileContent("public/fr/text/pipes.txt", "Hugo Pipes") - b.AssertFileContent("public/en/index.html", "Default Home Page", "String Resource: /docs/text/pipes.txt") - b.AssertFileContent("public/en/text/pipes.txt", "Hugo Pipes") - b.AssertFileContent("public/nn/index.html", "Default Home Page", "String Resource: /docs/text/pipes.txt") - - // Check paginators - b.AssertFileContent("public/en/page/1/index.html", `refresh" content="0; url=https://example.com/docs/"`) - b.AssertFileContent("public/nn/page/1/index.html", `refresh" content="0; url=https://example.no/"`) - b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "https://example.com/docs/sect/", "\"/docs/sect/page/3/") - b.AssertFileContent("public/fr/sect/page/2/index.html", "List Page 2", "Bonjour", "https://example.fr/sect/") - - // Check bundles - - bundleEn := s1.getPage(kinds.KindPage, "bundles/b1/index.en.md") - c.Assert(bundleEn, qt.Not(qt.IsNil)) - c.Assert(bundleEn.RelPermalink(), qt.Equals, "/docs/bundles/b1/") - c.Assert(len(bundleEn.Resources()), qt.Equals, 1) - - b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data") - b.AssertFileContent("public/en/bundles/b1/index.html", " image/png: /docs/bundles/b1/logo.png") - - bundleFr := s2.getPage(kinds.KindPage, "bundles/b1/index.md") - c.Assert(bundleFr, qt.Not(qt.IsNil)) - c.Assert(bundleFr.RelPermalink(), qt.Equals, "/bundles/b1/") - c.Assert(len(bundleFr.Resources()), qt.Equals, 1) - b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") - b.AssertFileContent("public/fr/bundles/b1/index.html", " image/png: /bundles/b1/logo.png") + b.AssertFileContent("public/en/enpages/mybundle-en/file1.txt", "File 1 en.") + b.AssertFileContent("public/fr/section/mybundle/file1.txt", "File 1 fr.") + b.AssertFileContent("public/en/enpages/mybundle-en/file2.txt", "File 2 en.") + b.AssertFileContent("public/fr/section/mybundle/file2.txt", "File 2 en.") } diff --git a/hugolib/hugo_sites_rebuild_test.go b/hugolib/hugo_sites_rebuild_test.go deleted file mode 100644 index d312d2199..000000000 --- a/hugolib/hugo_sites_rebuild_test.go +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "testing" - - qt "github.com/frankban/quicktest" -) - -func TestSitesRebuild(t *testing.T) { - configFile := ` -baseURL = "https://example.com" -title = "Rebuild this" -contentDir = "content" -enableInlineShortcodes = true -timeout = "5s" - - -` - - var ( - contentFilename = "content/blog/page1.md" - dataFilename = "data/mydata.toml" - ) - - createSiteBuilder := func(t testing.TB) *sitesBuilder { - b := newTestSitesBuilder(t).WithConfigFile("toml", configFile).Running() - - b.WithSourceFile(dataFilename, `hugo = "Rocks!"`) - - b.WithContent("content/_index.md", `--- -title: Home, Sweet Home! ---- - -`) - - b.WithContent(contentFilename, ` ---- -title: "Page 1" -summary: "Initial summary" -paginate: 3 ---- - -Content. - -{{< badge.inline >}} -Data Inline: {{ site.Data.mydata.hugo }} -{{< /badge.inline >}} -`) - - // For .Page.Render tests - b.WithContent("prender.md", `--- -title: Page 1 ---- - -Content for Page 1. - -{{< dorender >}} - -`) - - b.WithTemplatesAdded( - "layouts/shortcodes/dorender.html", ` -{{ $p := .Page }} -Render {{ $p.RelPermalink }}: {{ $p.Render "single" }} - -`) - - b.WithTemplatesAdded("index.html", ` -{{ range (.Paginate .Site.RegularPages).Pages }} -* Page Paginate: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }} -{{ end }} -{{ range .Site.RegularPages }} -* Page Pages: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }} -{{ end }} -Content: {{ .Content }} -Data: {{ site.Data.mydata.hugo }} -`) - - b.WithTemplatesAdded("layouts/partials/mypartial1.html", `Mypartial1`) - b.WithTemplatesAdded("layouts/partials/mypartial2.html", `Mypartial2`) - b.WithTemplatesAdded("layouts/partials/mypartial3.html", `Mypartial3`) - b.WithTemplatesAdded("_default/single.html", `{{ define "main" }}Single Main: {{ .Title }}|Mypartial1: {{ partial "mypartial1.html" }}{{ end }}`) - b.WithTemplatesAdded("_default/list.html", `{{ define "main" }}List Main: {{ .Title }}{{ end }}`) - b.WithTemplatesAdded("_default/baseof.html", `Baseof:{{ block "main" . }}Baseof Main{{ end }}|Mypartial3: {{ partial "mypartial3.html" }}:END`) - - return b - } - - t.Run("Refresh paginator on edit", func(t *testing.T) { - b := createSiteBuilder(t) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", "* Page Paginate: Page 1|Summary: Initial summary|Content: <p>Content.</p>") - - b.EditFiles(contentFilename, ` ---- -title: "Page 1 edit" -summary: "Edited summary" ---- - -Edited content. - -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", "* Page Paginate: Page 1 edit|Summary: Edited summary|Content: <p>Edited content.</p>") - // https://github.com/gohugoio/hugo/issues/5833 - b.AssertFileContent("public/index.html", "* Page Pages: Page 1 edit|Summary: Edited summary|Content: <p>Edited content.</p>") - }) - - // https://github.com/gohugoio/hugo/issues/6768 - t.Run("Edit data", func(t *testing.T) { - b := createSiteBuilder(t) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Data: Rocks! -Data Inline: Rocks! -`) - - b.EditFiles(dataFilename, `hugo = "Rules!"`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Data: Rules! -Data Inline: Rules!`) - }) - - // https://github.com/gohugoio/hugo/issues/6968 - t.Run("Edit single.html with base", func(t *testing.T) { - b := newTestSitesBuilder(t).Running() - - b.WithTemplates( - "_default/single.html", `{{ define "main" }}Single{{ end }}`, - "_default/baseof.html", `Base: {{ block "main" .}}Block{{ end }}`, - ) - - b.WithContent("p1.md", "---\ntitle: Page\n---") - - b.Build(BuildCfg{}) - - b.EditFiles("layouts/_default/single.html", `Single Edit: {{ define "main" }}Single{{ end }}`) - - counters := &testCounters{} - - b.Build(BuildCfg{testCounters: counters}) - - b.Assert(int(counters.contentRenderCounter), qt.Equals, 0) - }) - - t.Run("Page.Render, edit baseof", func(t *testing.T) { - b := createSiteBuilder(t) - - b.WithTemplatesAdded("index.html", ` -{{ $p := site.GetPage "prender.md" }} -prender: {{ $p.Title }}|{{ $p.Content }} - -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` - Render /prender/: Baseof:Single Main: Page 1|Mypartial1: Mypartial1|Mypartial3: Mypartial3:END -`) - - b.EditFiles("layouts/_default/baseof.html", `Baseof Edited:{{ block "main" . }}Baseof Main{{ end }}:END`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Render /prender/: Baseof Edited:Single Main: Page 1|Mypartial1: Mypartial1:END -`) - }) - - t.Run("Page.Render, edit partial in baseof", func(t *testing.T) { - b := createSiteBuilder(t) - - b.WithTemplatesAdded("index.html", ` -{{ $p := site.GetPage "prender.md" }} -prender: {{ $p.Title }}|{{ $p.Content }} - -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` - Render /prender/: Baseof:Single Main: Page 1|Mypartial1: Mypartial1|Mypartial3: Mypartial3:END -`) - - b.EditFiles("layouts/partials/mypartial3.html", `Mypartial3 Edited`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Render /prender/: Baseof:Single Main: Page 1|Mypartial1: Mypartial1|Mypartial3: Mypartial3 Edited:END -`) - }) - - t.Run("Edit RSS shortcode", func(t *testing.T) { - b := createSiteBuilder(t) - - b.WithContent("output.md", `--- -title: Output -outputs: ["HTML", "AMP"] -layout: output ---- - -Content for Output. - -{{< output >}} - -`) - - b.WithTemplates( - "layouts/_default/output.html", `Output HTML: {{ .RelPermalink }}|{{ .Content }}`, - "layouts/_default/output.amp.html", `Output AMP: {{ .RelPermalink }}|{{ .Content }}`, - "layouts/shortcodes/output.html", `Output Shortcode HTML`, - "layouts/shortcodes/output.amp.html", `Output Shortcode AMP`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/output/index.html", ` -Output Shortcode HTML -`) - b.AssertFileContent("public/amp/output/index.html", ` -Output Shortcode AMP -`) - - b.EditFiles("layouts/shortcodes/output.amp.html", `Output Shortcode AMP Edited`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/amp/output/index.html", ` -Output Shortcode AMP Edited -`) - }) -} - -// Issues #7623 #7625 -func TestSitesRebuildOnFilesIncludedWithGetPage(t *testing.T) { - b := newTestSitesBuilder(t).Running() - b.WithContent("pages/p1.md", `--- -title: p1 ---- -P3: {{< GetPage "pages/p3" >}} -`) - - b.WithContent("pages/p2.md", `--- -title: p2 ---- -P4: {{< site_GetPage "pages/p4" >}} -P5: {{< site_GetPage "p5" >}} -P6: {{< dot_site_GetPage "p6" >}} -`) - - b.WithContent("pages/p3/index.md", "---\ntitle: p3\nheadless: true\n---\nP3 content") - b.WithContent("pages/p4/index.md", "---\ntitle: p4\nheadless: true\n---\nP4 content") - b.WithContent("pages/p5.md", "---\ntitle: p5\n---\nP5 content") - b.WithContent("pages/p6.md", "---\ntitle: p6\n---\nP6 content") - - b.WithTemplates( - "_default/single.html", `{{ .Content }}`, - "shortcodes/GetPage.html", ` -{{ $arg := .Get 0 }} -{{ $p := .Page.GetPage $arg }} -{{ $p.Content }} - `, - "shortcodes/site_GetPage.html", ` -{{ $arg := .Get 0 }} -{{ $p := site.GetPage $arg }} -{{ $p.Content }} - `, "shortcodes/dot_site_GetPage.html", ` -{{ $arg := .Get 0 }} -{{ $p := .Site.GetPage $arg }} -{{ $p.Content }} - `, - ) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/pages/p1/index.html", "P3 content") - b.AssertFileContent("public/pages/p2/index.html", `P4 content -P5 content -P6 content -`) - - b.EditFiles("content/pages/p3/index.md", "---\ntitle: p3\n---\nP3 changed content") - b.EditFiles("content/pages/p4/index.md", "---\ntitle: p4\n---\nP4 changed content") - b.EditFiles("content/pages/p5.md", "---\ntitle: p5\n---\nP5 changed content") - b.EditFiles("content/pages/p6.md", "---\ntitle: p6\n---\nP6 changed content") - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/pages/p1/index.html", "P3 changed content") - b.AssertFileContent("public/pages/p2/index.html", `P4 changed content -P5 changed content -P6 changed content -`) -} diff --git a/hugolib/hugo_sites_test.go b/hugolib/hugo_sites_test.go new file mode 100644 index 000000000..5e1a1504c --- /dev/null +++ b/hugolib/hugo_sites_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugolib + +import "testing" + +func TestSitesAndLanguageOrder(t *testing.T) { + files := ` +-- hugo.toml -- +defaultContentLanguage = "fr" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.fr] +weight = 2 +[languages.de] +weight = 3 +-- layouts/index.html -- +{{ $bundle := site.GetPage "bundle" }} +Bundle all translations: {{ range $bundle.AllTranslations }}{{ .Lang }}|{{ end }}$ +Bundle translations: {{ range $bundle.Translations }}{{ .Lang }}|{{ end }}$ +Site languages: {{ range site.Languages }}{{ .Lang }}|{{ end }}$ +Sites: {{ range site.Sites }}{{ .Language.Lang }}|{{ end }}$ +-- content/bundle/index.fr.md -- +--- +title: "Bundle Fr" +--- +-- content/bundle/index.en.md -- +--- +title: "Bundle En" +--- +-- content/bundle/index.de.md -- +--- +title: "Bundle De" +--- + + ` + b := Test(t, files) + + b.AssertFileContent("public/en/index.html", + "Bundle all translations: en|fr|de|$", + "Bundle translations: fr|de|$", + "Site languages: en|fr|de|$", + "Sites: fr|en|de|$", + ) +} diff --git a/hugolib/hugo_smoke_test.go b/hugolib/hugo_smoke_test.go index 295d15582..42b0ab488 100644 --- a/hugolib/hugo_smoke_test.go +++ b/hugolib/hugo_smoke_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,9 +16,9 @@ package hugolib import ( "fmt" "math/rand" - "strings" "testing" + "github.com/bep/logg" qt "github.com/frankban/quicktest" ) @@ -34,30 +34,86 @@ disableKinds = ["term", "taxonomy", "section", "page"] title: Page --- -- layouts/index.html -- -{{ .Title }} +Home: {{ .Title }} ` b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, TxtarString: files, + LogLevel: logg.LevelTrace, }, ).Build() + b.Assert(b.H.Log.LoggCount(logg.LevelWarn), qt.Equals, 0) b.AssertFileContent("public/index.html", `Hello`) } -func TestSmoke(t *testing.T) { +func TestSmokeOutputFormats(t *testing.T) { t.Parallel() - c := qt.New(t) + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +defaultContentLanguage = "en" +disableKinds = ["term", "taxonomy", "robotsTXT", "sitemap"] +[outputs] +home = ["html", "rss"] +section = ["html", "rss"] +page = ["html"] +-- content/p1.md -- +--- +title: Page +--- +Page. + +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .RelPermalink}}|{{ range .OutputFormats }}{{ .Name }}: {{ .RelPermalink }}|{{ end }}$ +-- layouts/_default/list.xml -- +List xml: {{ .Title }}|{{ .RelPermalink}}|{{ range .OutputFormats }}{{ .Name }}: {{ .RelPermalink }}|{{ end }}$ +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .RelPermalink}}|{{ range .OutputFormats }}{{ .Name }}: {{ .RelPermalink }}|{{ end }}$ + +` + + b := Test(t, files) + + b.AssertFileContent("public/index.html", `List: |/|html: /|rss: /index.xml|$`) + b.AssertFileContent("public/index.xml", `List xml: |/|html: /|rss: /index.xml|$`) + b.AssertFileContent("public/p1/index.html", `Single: Page|/p1/|html: /p1/|$`) + b.AssertFileExists("public/p1/index.xml", false) +} + +func TestSmoke(t *testing.T) { + t.Parallel() - const configFile = ` + // Basic test cases. + // OK translations + // OK page collections + // OK next, prev in section + // OK GetPage + // OK Pagination + // OK RenderString with shortcode + // OK cascade + // OK site last mod, section last mod. + // OK main sections + // OK taxonomies + // OK GetTerms + // OK Resource page + // OK Resource txt + + const files = ` +-- hugo.toml -- baseURL = "https://example.com" -title = "Simple Site" +title = "Smoke Site" rssLimit = 3 +paginate = 1 defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true enableRobotsTXT = true +[taxonomies] +category = 'categories' +tag = 'tags' [languages] [languages.en] @@ -71,222 +127,305 @@ title = "På norsk" hugo = "Rules!" [outputs] - home = ["HTML", "JSON", "CSV", "RSS"] - -` - - const pageContentAndSummaryDivider = `--- -title: Page with outputs -hugo: "Rocks!" -outputs: ["HTML", "JSON"] -tags: [ "hugo" ] -aliases: [ "/a/b/c" ] + home = ["html", "json", "rss"] +-- layouts/index.html -- +Home: {{ .Lang}}|{{ .Kind }}|{{ .RelPermalink }}|{{ .Title }}|{{ .Content }}|Len Resources: {{ len .Resources }}|HTML +Resources: {{ range .Resources }}{{ .ResourceType }}|{{ .RelPermalink }}|{{ .MediaType }} - {{ end }}| +Site last mod: {{ site.Lastmod.Format "2006-02-01" }}| +Home last mod: {{ .Lastmod.Format "2006-02-01" }}| +Len Translations: {{ len .Translations }}| +Len home.RegularPagesRecursive: {{ len .RegularPagesRecursive }}| +RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .RelPermalink }}|{{ end }}@ +Len site.RegularPages: {{ len site.RegularPages }}| +Len site.Pages: {{ len site.Pages }}| +Len site.AllPages: {{ len site.AllPages }}| +GetPage: {{ with .Site.GetPage "posts/p1" }}{{ .RelPermalink }}|{{ .Title }}{{ end }}| +RenderString with shortcode: {{ .RenderString "{{% hello %}}" }}| +Paginate: {{ .Paginator.PageNumber }}/{{ .Paginator.TotalPages }}| +-- layouts/index.json -- +Home:{{ .Lang}}|{{ .Kind }}|{{ .RelPermalink }}|{{ .Title }}|{{ .Content }}|Len Resources: {{ len .Resources }}|JSON +-- layouts/_default/list.html -- +List: {{ .Lang}}|{{ .Kind }}|{{ .RelPermalink }}|{{ .Title }}|{{ .Content }}|Len Resources: {{ len .Resources }}| +Resources: {{ range .Resources }}{{ .ResourceType }}|{{ .RelPermalink }}|{{ .MediaType }} - {{ end }} +Pages Length: {{ len .Pages }} +RegularPages Length: {{ len .RegularPages }} +RegularPagesRecursive Length: {{ len .RegularPagesRecursive }} +List last mod: {{ .Lastmod.Format "2006-02-01" }} +Background: {{ .Params.background }}| +Kind: {{ .Kind }} +Type: {{ .Type }} +Paginate: {{ .Paginator.PageNumber }}/{{ .Paginator.TotalPages }}| +-- layouts/_default/single.html -- +Single: {{ .Lang}}|{{ .Kind }}|{{ .RelPermalink }}|{{ .Title }}|{{ .Content }}|Len Resources: {{ len .Resources }}|Background: {{ .Params.background }}| +Resources: {{ range .Resources }}{{ .ResourceType }}|{{ .RelPermalink }}|{{ .MediaType }}|{{ .Params }} - {{ end }} +{{ $textResource := .Resources.GetMatch "**.txt" }} +{{ with $textResource }} +Icon: {{ .Params.icon }}| +{{ $textResourceFingerprinted := . | fingerprint }} +Icon fingerprinted: {{ with $textResourceFingerprinted }}{{ .Params.icon }}|{{ .RelPermalink }}{{ end }}| +{{ end }} +NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}|{{ .Title }}{{ end }}| +PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}|{{ .Title }}{{ end }}| +GetTerms: {{ range .GetTerms "tags" }}name: {{ .Name }}, title: {{ .Title }}|{{ end }} +-- layouts/shortcodes/hello.html -- +Hello. +-- content/_index.md -- --- - -This is summary. - -<!--more--> - -This is content with some shortcodes. - -Shortcode 1: {{< sc >}}. -Shortcode 2: {{< sc >}}. - -` - - const pageContentWithMarkdownShortcodes = `--- -title: Page with markdown shortcode -hugo: "Rocks!" -outputs: ["HTML", "JSON"] +title: Home in English --- +Home Content. +-- content/_index.no.md -- +--- +title: Hjem +cascade: + - _target: + kind: page + path: /posts/** + background: post.jpg + - _target: + kind: term + background: term.jpg +--- +Hjem Innhold. +-- content/posts/f1.txt -- +posts f1 text. +-- content/posts/sub/f1.txt -- +posts sub f1 text. +-- content/posts/p1/index.md -- ++++ +title = "Post 1" +lastMod = "2001-01-01" +tags = ["tag1"] +[[resources]] +src = '**' +[resources.params] +icon = 'enicon' ++++ +Content 1. +-- content/posts/p1/index.no.md -- ++++ +title = "Post 1 no" +lastMod = "2002-02-02" +tags = ["tag1", "tag2"] +[[resources]] +src = '**' +[resources.params] +icon = 'noicon' ++++ +Content 1 no. +-- content/posts/_index.md -- +--- +title: Posts +--- +-- content/posts/p1/f1.txt -- +posts p1 f1 text. +-- content/posts/p1/sub/ps1.md -- +--- +title: Post Sub 1 +--- +Content Sub 1. +-- content/posts/p2.md -- +--- +title: Post 2 +tags: ["tag1", "tag3"] +--- +Content 2. +-- content/posts/p2.no.md -- +--- +title: Post 2 No +--- +Content 2 No. +-- content/tags/_index.md -- +--- +title: Tags +--- +Content Tags. +-- content/tags/tag1/_index.md -- +--- +title: Tag 1 +--- +Content Tag 1. -This is summary. - -<!--more--> - -This is content[^a]. - -# Header above - -{{% markdown-shortcode %}} -# Header inside - -Some **markdown**.[^b] - -{{% /markdown-shortcode %}} - -# Heder below - -Some more content[^c]. - -Footnotes: - -[^a]: Fn 1 -[^b]: Fn 2 -[^c]: Fn 3 ` - pageContentAutoSummary := strings.Replace(pageContentAndSummaryDivider, "<!--more-->", "", 1) - - b := newTestSitesBuilder(t).WithConfigFile("toml", configFile) - b.WithTemplatesAdded("shortcodes/markdown-shortcode.html", ` -Some **Markdown** in shortcode. - -{{ .Inner }} - - - -`) - - b.WithTemplatesAdded("shortcodes/markdown-shortcode.json", ` -Some **Markdown** in JSON shortcode. -{{ .Inner }} - -`) - - for i := 1; i <= 11; i++ { - if i%2 == 0 { - b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAndSummaryDivider) - b.WithContent(fmt.Sprintf("blog/page%d.no.md", i), pageContentAndSummaryDivider) - } else { - b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAutoSummary) - } - } - - for i := 1; i <= 5; i++ { - // Root section pages - b.WithContent(fmt.Sprintf("root%d.md", i), pageContentAutoSummary) - } - - // https://github.com/gohugoio/hugo/issues/4695 - b.WithContent("blog/markyshort.md", pageContentWithMarkdownShortcodes) - - // Add one bundle - b.WithContent("blog/mybundle/index.md", pageContentAndSummaryDivider) - b.WithContent("blog/mybundle/mydata.csv", "Bundled CSV") - - const ( - commonPageTemplate = `|{{ .Kind }}|{{ .Title }}|{{ .File.Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}` - commonPaginatorTemplate = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}` - commonListTemplateNoPaginator = `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}` - commonListTemplate = commonPaginatorTemplate + `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}` - commonShortcodeTemplate = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}` - prevNextTemplate = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}` - prevNextInSectionTemplate = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}` - paramsTemplate = `|Params: {{ .Params.hugo }}` - treeNavTemplate = `|CurrentSection: {{ .CurrentSection }}` + b := NewIntegrationTestBuilder(IntegrationTestConfig{ + T: t, + TxtarString: files, + NeedsOsFS: true, + // Verbose: true, + // LogLevel: logg.LevelTrace, + }).Build() + + b.AssertFileContent("public/en/index.html", + "Home: en|home|/en/|Home in English|<p>Home Content.</p>\n|HTML", + "Site last mod: 2001-01-01", + "Home last mod: 2001-01-01", + "Translations: 1|", + "Len home.RegularPagesRecursive: 2|", + "Len site.RegularPages: 2|", + "Len site.Pages: 8|", + "Len site.AllPages: 16|", + "GetPage: /en/posts/p1/|Post 1|", + "RenderString with shortcode: Hello.|", + "Paginate: 1/2|", ) - - b.WithTemplates( - "_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}", - "_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator, - "_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator, - "_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate, - "_default/single.json", "JSON: Single"+commonPageTemplate, - - // For .Render test - "_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate, - "_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate, - "_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate, - - "404.html", "{{ .Kind }}|{{ .Title }}|Page not found", - - "shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate, - "shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate, - "shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate, + b.AssertFileContent("public/en/page/2/index.html", "Paginate: 2/2|") + + b.AssertFileContent("public/no/index.html", + "Home: no|home|/no/|Hjem|<p>Hjem Innhold.</p>\n|HTML", + "Site last mod: 2002-02-02", + "Home last mod: 2002-02-02", + "Translations: 1", + "GetPage: /no/posts/p1/|Post 1 no|", ) - b.CreateSites().Build(BuildCfg{}) + b.AssertFileContent("public/en/index.json", "Home:en|home|/en/|Home in English|<p>Home Content.</p>\n|JSON") + b.AssertFileContent("public/no/index.json", "Home:no|home|/no/|Hjem|<p>Hjem Innhold.</p>\n|JSON") - b.AssertFileContent("public/blog/page1/index.html", - "This is content with some shortcodes.", - "Page with outputs", - "Pages: Pages(0)", - "RelPermalink: /blog/page1/|", - "Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.", - "Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.", - "Prev: /blog/page10/|Next: /blog/mybundle/", - "PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/", - "Summary: This is summary.", - "CurrentSection: Page(/blog)", + b.AssertFileContent("public/en/posts/p1/index.html", + "Single: en|page|/en/posts/p1/|Post 1|<p>Content 1.</p>\n|Len Resources: 2|", + "Resources: text|/en/posts/p1/f1.txt|text/plain|map[icon:enicon] - page||application/octet-stream|map[draft:false iscjklanguage:false title:Post Sub 1] -", + "Icon: enicon", + "Icon fingerprinted: enicon|/en/posts/p1/f1.e5746577af5cbfc4f34c558051b7955a9a5a795a84f1c6ab0609cb3473a924cb.txt|", + "NextInSection: |\nPrevInSection: /en/posts/p2/|Post 2|", + "GetTerms: name: tag1, title: Tag 1|", ) - b.AssertFileContent("public/blog/page1/index.json", - "JSON: Single|page|Page with outputs|", - "SON: Shortcode: |sc|0||") - - b.AssertFileContent("public/index.html", - "home|In English", - "Site params: Rules", - "Pages: Pages(6)|Data Pages: Pages(6)", - "Paginator: 1", - "First Site: In English", - "RelPermalink: /", + b.AssertFileContent("public/no/posts/p1/index.html", + "Resources: 1", + "Resources: text|/en/posts/p1/f1.txt|text/plain|map[icon:noicon] -", + "Icon: noicon", + "Icon fingerprinted: noicon|/en/posts/p1/f1.e5746577af5cbfc4f34c558051b7955a9a5a795a84f1c6ab0609cb3473a924cb.txt|", + "Background: post.jpg", + "NextInSection: |\nPrevInSection: /no/posts/p2/|Post 2 No|", ) - b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/") - - // Check RSS - rssHome := b.FileContent("public/index.xml") - c.Assert(rssHome, qt.Contains, `<atom:link href="https://example.com/index.xml" rel="self" type="application/rss+xml" />`) - c.Assert(strings.Count(rssHome, "<item>"), qt.Equals, 3) // rssLimit = 3 - - // .Render should use template/content from the current output format - // even if that output format isn't configured for that page. - b.AssertFileContent( - "public/index.json", - "Render 0: page|JSON: LI|false|Params: Rocks!", + b.AssertFileContent("public/en/posts/index.html", + "List: en|section|/en/posts/|Posts||Len Resources: 2|", + "Resources: text|/en/posts/f1.txt|text/plain - text|/en/posts/sub/f1.txt|text/plain -", + "List last mod: 2001-01-01", ) - b.AssertFileContent( - "public/index.html", - "Render 0: page|HTML: LI|false|Params: Rocks!|", + b.AssertFileContent("public/no/posts/index.html", + "List last mod: 2002-02-02", ) - b.AssertFileContent( - "public/index.csv", - "Render 0: page|CSV: LI|false|Params: Rocks!|", + b.AssertFileContent("public/en/posts/p2/index.html", "Single: en|page|/en/posts/p2/|Post 2|<p>Content 2.</p>\n|", + "|Len Resources: 0", + "GetTerms: name: tag1, title: Tag 1|name: tag3, title: Tag3|", ) + b.AssertFileContent("public/no/posts/p2/index.html", "Single: no|page|/no/posts/p2/|Post 2 No|<p>Content 2 No.</p>\n|") - // Check bundled resources - b.AssertFileContent( - "public/blog/mybundle/index.html", - "Resources: 1", + b.AssertFileContent("public/no/categories/index.html", + "Kind: taxonomy", + "Type: categories", ) - - // Check pages in root section - b.AssertFileContent( - "public/root3/index.html", - "Single|page|Page with outputs|root3.md|", - "Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/", + b.AssertFileContent("public/no/tags/index.html", + "Kind: taxonomy", + "Type: tags", ) - b.AssertFileContent( - "public/root3/index.json", "Shortcode 1: JSON:") - - // Paginators - b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`) - b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2") + b.AssertFileContent("public/no/tags/tag1/index.html", + "Background: term.jpg", + "Kind: term", + "Type: tags", + "Paginate: 1/1|", + ) - // 404 - b.AssertFileContent("public/404.html", "404|404 Page not found") + b.AssertFileContent("public/en/tags/tag1/index.html", + "Kind: term", + "Type: tags", + "Paginate: 1/2|", + ) +} - // Sitemaps - b.AssertFileContent("public/en/sitemap.xml", "<loc>https://example.com/blog/</loc>") - b.AssertFileContent("public/no/sitemap.xml", `hreflang="no"`) +// Basic tests that verifies that the different file systems work as expected. +func TestSmokeFilesystems(t *testing.T) { + t.Parallel() - b.AssertFileContent("public/sitemap.xml", "<loc>https://example.com/en/sitemap.xml</loc>", "<loc>https://example.com/no/sitemap.xml</loc>") + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +title = "In English" +[languages.nn] +title = "På nynorsk" +[module] +[[module.mounts]] +source = "i18n" +target = "i18n" +[[module.mounts]] +source = "data" +target = "data" +[[module.mounts]] +source = "content/en" +target = "content" +lang = "en" +[[module.mounts]] +source = "content/nn" +target = "content" +lang = "nn" +[[module.imports]] +path = "mytheme" +-- layouts/index.html -- +i18n s1: {{ i18n "s1" }}| +i18n s2: {{ i18n "s2" }}| +data s1: {{ site.Data.d1.s1 }}| +data s2: {{ site.Data.d1.s2 }}| +title: {{ .Title }}| +-- themes/mytheme/hugo.toml -- +[[module.mounts]] +source = "i18n" +target = "i18n" +[[module.mounts]] +source = "data" +target = "data" +# i18n files both project and theme. +-- i18n/en.toml -- +[s1] +other = 's1project' +-- i18n/nn.toml -- +[s1] +other = 's1prosjekt' +-- themes/mytheme/i18n/en.toml -- +[s1] +other = 's1theme' +[s2] +other = 's2theme' +# data files both project and theme. +-- data/d1.yaml -- +s1: s1project +-- themes/mytheme/data/d1.yaml -- +s1: s1theme +s2: s2theme +# Content +-- content/en/_index.md -- +--- +title: "Home" +--- +-- content/nn/_index.md -- +--- +title: "Heim" +--- - // robots.txt - b.AssertFileContent("public/robots.txt", `User-agent: *`) +` + b := Test(t, files) - // Aliases - b.AssertFileContent("public/a/b/c/index.html", `refresh`) + b.AssertFileContent("public/en/index.html", + "i18n s1: s1project", "i18n s2: s2theme", + "data s1: s1project", "data s2: s2theme", + "title: Home", + ) - // Markdown vs shortcodes - // Check that all footnotes are grouped (even those from inside the shortcode) - b.AssertFileContentRe("public/blog/markyshort/index.html", `Footnotes:.*<ol>.*Fn 1.*Fn 2.*Fn 3.*</ol>`) + b.AssertFileContent("public/nn/index.html", + "i18n s1: s1prosjekt", "i18n s2: s2theme", + "data s1: s1project", "data s2: s2theme", + "title: Heim", + ) } // https://github.com/golang/go/issues/30286 diff --git a/hugolib/image_test.go b/hugolib/image_test.go index db1707c22..b3b933711 100644 --- a/hugolib/image_test.go +++ b/hugolib/image_test.go @@ -73,11 +73,10 @@ SUNSET2: {{ $resized2.RelPermalink }}/{{ $resized2.Width }}/Lat: {{ $resized2.Ex b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", "SUNSET FOR: en: /bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667") - b.AssertFileContent("public/fr/index.html", "SUNSET FOR: fr: /fr/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667") + b.AssertFileContent("public/fr/index.html", "SUNSET FOR: fr: /bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667") b.AssertFileContent("public/index.html", " SUNSET2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg/123/Lat: 36.59744166666667") b.AssertFileContent("public/nn/index.html", " SUNSET2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg/123/Lat: 36.59744166666667") - b.AssertImage(200, 200, "public/fr/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg") b.AssertImage(200, 200, "public/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg") // Check the file cache @@ -85,10 +84,10 @@ SUNSET2: {{ $resized2.RelPermalink }}/{{ $resized2.Width }}/Lat: {{ $resized2.Ex b.AssertFileContent("resources/_gen/images/bundle/sunset_3166614710256882113.json", "DateTimeDigitized|time.Time", "PENTAX") - b.AssertImage(123, 234, "resources/_gen/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg") - b.AssertFileContent("resources/_gen/images/sunset_3166614710256882113.json", + + b.AssertImage(123, 234, "resources/_gen/images/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg") + b.AssertFileContent("resources/_gen/images/images/sunset_3166614710256882113.json", "DateTimeDigitized|time.Time", "PENTAX") - // TODO(bep) add this as a default assertion after Build()? b.AssertNoDuplicateWrites() } diff --git a/hugolib/integration_test.go b/hugolib/integration_test.go index 93468eceb..250c7bcec 100644 --- a/hugolib/integration_test.go +++ b/hugolib/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -86,7 +86,6 @@ tags: ['T1'] b.AssertFileContent("public/en/tags/t1/index.html", "<ul><li>T1-en</li></ul>", ) - } // Issue #11538 @@ -112,7 +111,6 @@ func TestRenderStringBadMarkupOpt(t *testing.T) { if !strings.Contains(err.Error(), want) { t.Errorf("error msg must contain %q, error msg actually contains %q", want, err.Error()) } - } // Issue #11547 diff --git a/hugolib/integrationtest_builder.go b/hugolib/integrationtest_builder.go index a0cae1d95..34d3c5530 100644 --- a/hugolib/integrationtest_builder.go +++ b/hugolib/integrationtest_builder.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "math/rand" "os" "path/filepath" "regexp" @@ -29,17 +30,64 @@ import ( "github.com/gohugoio/hugo/htesting" "github.com/gohugoio/hugo/hugofs" "github.com/spf13/afero" + "golang.org/x/text/unicode/norm" "golang.org/x/tools/txtar" ) +type TestOpt func(*IntegrationTestConfig) + +func TestOptRunning() TestOpt { + return func(c *IntegrationTestConfig) { + c.Running = true + } +} + +// Enable tracing in integration tests. +// THis should only be used during development and not committed to the repo. +func TestOptTrace() TestOpt { + return func(c *IntegrationTestConfig) { + c.LogLevel = logg.LevelTrace + } +} + +// TestOptDebug will enable debug logging in integration tests. +func TestOptDebug() TestOpt { + return func(c *IntegrationTestConfig) { + c.LogLevel = logg.LevelDebug + } +} + +// TestOptWithNFDOnDarwin will normalize the Unicode filenames to NFD on Darwin. +func TestOptWithNFDOnDarwin() TestOpt { + return func(c *IntegrationTestConfig) { + c.NFDFormOnDarwin = true + } +} + +// TestOptWithWorkingDir allows setting any config optiona as a function al option. +func TestOptWithConfig(fn func(c *IntegrationTestConfig)) TestOpt { + return func(c *IntegrationTestConfig) { + fn(c) + } +} + // Test is a convenience method to create a new IntegrationTestBuilder from some files and run a build. -func Test(t testing.TB, files string) *IntegrationTestBuilder { - return NewIntegrationTestBuilder(IntegrationTestConfig{T: t, TxtarString: files}).Build() +func Test(t testing.TB, files string, opts ...TestOpt) *IntegrationTestBuilder { + cfg := IntegrationTestConfig{T: t, TxtarString: files} + for _, o := range opts { + o(&cfg) + } + return NewIntegrationTestBuilder(cfg).Build() } // TestRunning is a convenience method to create a new IntegrationTestBuilder from some files with Running set to true and run a build. -func TestRunning(t testing.TB, files string) *IntegrationTestBuilder { - return NewIntegrationTestBuilder(IntegrationTestConfig{T: t, TxtarString: files, Running: true}).Build() +// Deprecated: Use Test with TestOptRunning instead. +func TestRunning(t testing.TB, files string, opts ...TestOpt) *IntegrationTestBuilder { + cfg := IntegrationTestConfig{T: t, TxtarString: files, Running: true} + for _, o := range opts { + o(&cfg) + } + return NewIntegrationTestBuilder(cfg).Build() } func NewIntegrationTestBuilder(conf IntegrationTestConfig) *IntegrationTestBuilder { @@ -50,6 +98,12 @@ func NewIntegrationTestBuilder(conf IntegrationTestConfig) *IntegrationTestBuild data := txtar.Parse([]byte(conf.TxtarString)) + if conf.NFDFormOnDarwin { + for i, f := range data.Files { + data.Files[i].Name = norm.NFD.String(f.Name) + } + } + c, ok := conf.T.(*qt.C) if !ok { c = qt.New(conf.T) @@ -95,10 +149,11 @@ type IntegrationTestBuilder struct { createdFiles []string removedFiles []string renamedFiles []string + renamedDirs []string buildCount int GCCount int - counters *testCounters + counters *buildCounters logBuff lockingBuffer builderInit sync.Once @@ -142,11 +197,6 @@ func (s *IntegrationTestBuilder) AssertBuildCountLayouts(count int) { s.Assert(s.H.init.layouts.InitCount(), qt.Equals, count) } -func (s *IntegrationTestBuilder) AssertBuildCountTranslations(count int) { - s.Helper() - s.Assert(s.H.init.translations.InitCount(), qt.Equals, count) -} - func (s *IntegrationTestBuilder) AssertFileCount(dirname string, expected int) { s.Helper() fs := s.fs.WorkingDirReadOnly @@ -168,6 +218,7 @@ func (s *IntegrationTestBuilder) AssertFileContent(filename string, matches ...s s.Helper() content := strings.TrimSpace(s.FileContent(filename)) for _, m := range matches { + cm := qt.Commentf("File: %s Match %s", filename, m) lines := strings.Split(m, "\n") for _, match := range lines { match = strings.TrimSpace(match) @@ -180,10 +231,10 @@ func (s *IntegrationTestBuilder) AssertFileContent(filename string, matches ...s match = strings.TrimPrefix(match, "! ") } if negate { - s.Assert(content, qt.Not(qt.Contains), match, qt.Commentf(m)) + s.Assert(content, qt.Not(qt.Contains), match, cm) continue } - s.Assert(content, qt.Contains, match, qt.Commentf(m)) + s.Assert(content, qt.Contains, match, cm) } } } @@ -208,24 +259,6 @@ func (s *IntegrationTestBuilder) AssertFileExists(filename string, b bool) { s.Assert(err, checker) } -// Deprecated: Use AssertFileExists instead but remember to prefix with "public/". -// I have had some surprises with this one, hence the deprecation. -func (s *IntegrationTestBuilder) AssertDestinationExists(filename string, b bool) { - checker := qt.IsTrue - if !b { - checker = qt.IsFalse - } - s.Assert(s.destinationExists(filepath.Clean(filename)), checker) -} - -func (s *IntegrationTestBuilder) destinationExists(filename string) bool { - b, err := helpers.Exists(filename, s.fs.PublishDir) - if err != nil { - panic(err) - } - return b -} - func (s *IntegrationTestBuilder) AssertIsFileError(err error) herrors.FileError { s.Assert(err, qt.ErrorAs, new(herrors.FileError)) return herrors.UnwrapFileError(err) @@ -233,12 +266,18 @@ func (s *IntegrationTestBuilder) AssertIsFileError(err error) herrors.FileError func (s *IntegrationTestBuilder) AssertRenderCountContent(count int) { s.Helper() - s.Assert(s.counters.contentRenderCounter, qt.Equals, uint64(count)) + s.Assert(s.counters.contentRenderCounter.Load(), qt.Equals, uint64(count)) } func (s *IntegrationTestBuilder) AssertRenderCountPage(count int) { s.Helper() - s.Assert(s.counters.pageRenderCounter, qt.Equals, uint64(count)) + s.Assert(s.counters.pageRenderCounter.Load(), qt.Equals, uint64(count)) +} + +func (s *IntegrationTestBuilder) AssertRenderCountPageBetween(from, to int) { + s.Helper() + i := int(s.counters.pageRenderCounter.Load()) + s.Assert(i >= from && i <= to, qt.IsTrue) } func (s *IntegrationTestBuilder) Build() *IntegrationTestBuilder { @@ -246,10 +285,22 @@ func (s *IntegrationTestBuilder) Build() *IntegrationTestBuilder { _, err := s.BuildE() if s.Cfg.Verbose || err != nil { fmt.Println(s.logBuff.String()) + if s.H != nil && err == nil { + for _, s := range s.H.Sites { + m := s.pageMap + var buff bytes.Buffer + fmt.Fprintf(&buff, "PageMap for site %q\n\n", s.Language().Lang) + m.debugPrint("", 999, &buff) + fmt.Println(buff.String()) + } + } + } else if s.Cfg.LogLevel <= logg.LevelDebug { + fmt.Println(s.logBuff.String()) } s.Assert(err, qt.IsNil) if s.Cfg.RunGC { s.GCCount, err = s.H.GC() + s.Assert(err, qt.IsNil) } return s @@ -286,7 +337,13 @@ type IntegrationTestDebugConfig struct { PrefixPagemap string } -func (s *IntegrationTestBuilder) EditFileReplace(filename string, replacementFunc func(s string) string) *IntegrationTestBuilder { +func (s *IntegrationTestBuilder) EditFileReplaceAll(filename, old, new string) *IntegrationTestBuilder { + return s.EditFileReplaceFunc(filename, func(s string) string { + return strings.ReplaceAll(s, old, new) + }) +} + +func (s *IntegrationTestBuilder) EditFileReplaceFunc(filename string, replacementFunc func(s string) string) *IntegrationTestBuilder { absFilename := s.absFilename(filename) b, err := afero.ReadFile(s.fs.Source, absFilename) s.Assert(err, qt.IsNil) @@ -337,6 +394,26 @@ func (s *IntegrationTestBuilder) RenameFile(old, new string) *IntegrationTestBui return s } +func (s *IntegrationTestBuilder) RenameDir(old, new string) *IntegrationTestBuilder { + absOldFilename := s.absFilename(old) + absNewFilename := s.absFilename(new) + s.renamedDirs = append(s.renamedDirs, absOldFilename) + s.changedFiles = append(s.changedFiles, absNewFilename) + afero.Walk(s.fs.Source, absOldFilename, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + s.createdFiles = append(s.createdFiles, strings.Replace(path, absOldFilename, absNewFilename, 1)) + return nil + }) + s.Assert(s.fs.Source.MkdirAll(filepath.Dir(absNewFilename), 0o777), qt.IsNil) + s.Assert(s.fs.Source.Rename(absOldFilename, absNewFilename), qt.IsNil) + return s +} + func (s *IntegrationTestBuilder) FileContent(filename string) string { s.Helper() return s.readWorkingDir(s, s.fs, filepath.FromSlash(filename)) @@ -353,7 +430,7 @@ func (s *IntegrationTestBuilder) initBuilder() error { } if s.Cfg.LogLevel == 0 { - s.Cfg.LogLevel = logg.LevelWarn + s.Cfg.LogLevel = logg.LevelError } isBinaryRe := regexp.MustCompile(`^(.*)(\.png|\.jpg)$`) @@ -365,7 +442,7 @@ func (s *IntegrationTestBuilder) initBuilder() error { data := bytes.TrimSuffix(f.Data, []byte("\n")) datastr := strings.TrimSpace(string(data)) if strings.HasPrefix(datastr, dataSourceFilenamePrefix) { - // Read from file relative to tue current dir. + // Read from file relative to the current dir. var err error wd, _ := os.Getwd() filename := filepath.Join(wd, strings.TrimSpace(strings.TrimPrefix(datastr, dataSourceFilenamePrefix))) @@ -404,7 +481,12 @@ func (s *IntegrationTestBuilder) initBuilder() error { flags.Set("workingDir", s.Cfg.WorkingDir) } - w := &s.logBuff + var w io.Writer + if s.Cfg.LogLevel == logg.LevelTrace { + w = os.Stdout + } else { + w = &s.logBuff + } logger := loggers.New( loggers.Options{ @@ -476,18 +558,22 @@ func (s *IntegrationTestBuilder) absFilename(filename string) string { return filename } +func (s *IntegrationTestBuilder) reset() { + s.changedFiles = nil + s.createdFiles = nil + s.removedFiles = nil + s.renamedFiles = nil +} + func (s *IntegrationTestBuilder) build(cfg BuildCfg) error { s.Helper() defer func() { - s.changedFiles = nil - s.createdFiles = nil - s.removedFiles = nil - s.renamedFiles = nil + s.reset() }() changeEvents := s.changeEvents() s.logBuff.Reset() - s.counters = &testCounters{} + s.counters = &buildCounters{} cfg.testCounters = s.counters if s.buildCount > 0 && (len(changeEvents) == 0) { @@ -522,6 +608,15 @@ func (s *IntegrationTestBuilder) changeEvents() []fsnotify.Event { Op: fsnotify.Rename, }) } + + for _, v := range s.renamedDirs { + events = append(events, fsnotify.Event{ + Name: v, + // This is what we get on MacOS. + Op: fsnotify.Remove | fsnotify.Rename, + }) + } + for _, v := range s.changedFiles { events = append(events, fsnotify.Event{ Name: v, @@ -535,6 +630,12 @@ func (s *IntegrationTestBuilder) changeEvents() []fsnotify.Event { }) } + // Shuffle events. + for i := range events { + j := rand.Intn(i + 1) + events[i], events[j] = events[j], events[i] + } + return events } @@ -598,6 +699,7 @@ type IntegrationTestConfig struct { // Will print the log buffer after the build Verbose bool + // The log level to use. LogLevel logg.Level // Whether it needs the real file system (e.g. for js.Build tests). @@ -612,7 +714,12 @@ type IntegrationTestConfig struct { // Whether to run npm install before Build. NeedsNpmInstall bool + // Whether to normalize the Unicode filenames to NFD on Darwin. + NFDFormOnDarwin bool + + // The working dir to use. If not absolute, a temp dir will be created. WorkingDir string + // The config to pass to Build. BuildCfg BuildCfg } diff --git a/hugolib/language_content_dir_test.go b/hugolib/language_content_dir_test.go index a22201475..e02e118f5 100644 --- a/hugolib/language_content_dir_test.go +++ b/hugolib/language_content_dir_test.go @@ -14,513 +14,45 @@ package hugolib import ( - "context" - "fmt" - "os" - "path/filepath" "testing" - - "github.com/gohugoio/hugo/resources/kinds" - "github.com/spf13/cast" - - qt "github.com/frankban/quicktest" ) -/* - -/en/p1.md -/nn/p1.md - -.Readdir - -- Name() => p1.en.md, p1.nn.md - -.Stat(name) - -.Open() --- real file name - - -*/ - func TestLanguageContentRoot(t *testing.T) { - t.Parallel() - c := qt.New(t) - - config := ` + files := ` +-- hugo.toml -- baseURL = "https://example.org/" - defaultContentLanguage = "en" defaultContentLanguageInSubdir = true - -contentDir = "content/main" -workingDir = "/my/project" - -[Languages] -[Languages.en] +[languages] +[languages.en] weight = 10 -title = "In English" -languageName = "English" - -[Languages.nn] +contentDir = "content/en" +[languages.nn] weight = 20 -title = "På Norsk" -languageName = "Norsk" -# This tells Hugo that all content in this directory is in the Norwegian language. -# It does not have to have the "my-page.nn.md" format. It can, but that is optional. -contentDir = "content/norsk" - -[Languages.sv] -weight = 30 -title = "På Svenska" -languageName = "Svensk" -contentDir = "content/svensk" -` - - pageTemplate := ` +contentDir = "content/nn" +-- content/en/_index.md -- --- -title: %s -slug: %s -weight: %d +title: "Home" --- - -Content. - -SVP3-REF: {{< ref path="/sect/page3.md" lang="sv" >}} -SVP3-RELREF: {{< relref path="/sect/page3.md" lang="sv" >}} - -` - - pageBundleTemplate := ` +-- content/nn/_index.md -- --- -title: %s -weight: %d +title: "Heim" --- - -Content. - -` - var contentFiles []string - section := "sect" - - contentRoot := func(lang string) string { - switch lang { - case "nn": - return "content/norsk" - case "sv": - return "content/svensk" - default: - return "content/main" - } - } - - contentSectionRoot := func(lang string) string { - return contentRoot(lang) + "/" + section - } - - for _, lang := range []string{"en", "nn", "sv"} { - for j := 1; j <= 10; j++ { - if (lang == "nn" || lang == "en") && j%4 == 0 { - // Skip 4 and 8 for nn - // We also skip it for en, but that is added to the Swedish directory below. - continue - } - - if lang == "sv" && j%5 == 0 { - // Skip 5 and 10 for sv - continue - } - - base := fmt.Sprintf("p-%s-%d", lang, j) - slug := base - langID := "" - - if lang == "sv" && j%4 == 0 { - // Put an English page in the Swedish content dir. - langID = ".en" - } - - if lang == "en" && j == 8 { - // This should win over the sv variant above. - langID = ".en" - } - - slug += langID - - contentRoot := contentSectionRoot(lang) - - filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID)) - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j)) - } - } - - // Put common translations in all of them - for i, lang := range []string{"en", "nn", "sv"} { - contentRoot := contentSectionRoot(lang) - - slug := fmt.Sprintf("common_%s", lang) - - filename := filepath.Join(contentRoot, "common.md") - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, 100+i)) - - for j, lang2 := range []string{"en", "nn", "sv"} { - filename := filepath.Join(contentRoot, fmt.Sprintf("translated_all.%s.md", lang2)) - langSlug := slug + "_translated_all_" + lang2 - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 200+i+j)) - } - - for j, lang2 := range []string{"sv", "nn"} { - if lang == "en" { - continue - } - filename := filepath.Join(contentRoot, fmt.Sprintf("translated_some.%s.md", lang2)) - langSlug := slug + "_translated_some_" + lang2 - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 300+i+j)) - } - } - - // Add a bundle with some images - for i, lang := range []string{"en", "nn", "sv"} { - contentRoot := contentSectionRoot(lang) - slug := fmt.Sprintf("bundle_%s", lang) - filename := filepath.Join(contentRoot, "mybundle", "index.md") - contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i)) - if lang == "en" { - imageFilename := filepath.Join(contentRoot, "mybundle", "logo.png") - contentFiles = append(contentFiles, imageFilename, "PNG Data") - } - imageFilename := filepath.Join(contentRoot, "mybundle", "featured.png") - contentFiles = append(contentFiles, imageFilename, fmt.Sprintf("PNG Data for %s", lang)) - - // Add some bundled pages - contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 401+i)) - contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "sub", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 402+i)) - - } - - // Add some static files inside the content dir - // https://github.com/gohugoio/hugo/issues/5759 - for _, lang := range []string{"en", "nn", "sv"} { - contentRoot := contentRoot(lang) - for i := 0; i < 2; i++ { - filename := filepath.Join(contentRoot, "mystatic", fmt.Sprintf("file%d.yaml", i)) - contentFiles = append(contentFiles, filename, lang) - } - } - - b := newTestSitesBuilder(t) - b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites() - - _ = os.Stdout - - err := b.BuildE(BuildCfg{}) - - // dumpPages(b.H.Sites[1].RegularPages()...) - - c.Assert(err, qt.IsNil) - - c.Assert(len(b.H.Sites), qt.Equals, 3) - - enSite := b.H.Sites[0] - nnSite := b.H.Sites[1] - svSite := b.H.Sites[2] - - b.AssertFileContent("public/en/mystatic/file1.yaml", "en") - b.AssertFileContent("public/nn/mystatic/file1.yaml", "nn") - - // dumpPages(nnSite.RegularPages()...) - - c.Assert(len(nnSite.RegularPages()), qt.Equals, 12) - c.Assert(len(enSite.RegularPages()), qt.Equals, 13) - - c.Assert(len(svSite.RegularPages()), qt.Equals, 10) - - svP2, err := svSite.getPageNew(nil, "/sect/page2.md") - c.Assert(err, qt.IsNil) - nnP2, err := nnSite.getPageNew(nil, "/sect/page2.md") - c.Assert(err, qt.IsNil) - - enP2, err := enSite.getPageNew(nil, "/sect/page2.md") - c.Assert(err, qt.IsNil) - c.Assert(enP2.Language().Lang, qt.Equals, "en") - c.Assert(svP2.Language().Lang, qt.Equals, "sv") - c.Assert(nnP2.Language().Lang, qt.Equals, "nn") - - content, _ := nnP2.Content(context.Background()) - contentStr := cast.ToString(content) - c.Assert(contentStr, qt.Contains, "SVP3-REF: https://example.org/sv/sect/p-sv-3/") - c.Assert(contentStr, qt.Contains, "SVP3-RELREF: /sv/sect/p-sv-3/") - - // Test RelRef with and without language indicator. - nn3RefArgs := map[string]any{ - "path": "/sect/page3.md", - "lang": "nn", - } - nnP3RelRef, err := svP2.RelRef( - nn3RefArgs, - ) - c.Assert(err, qt.IsNil) - c.Assert(nnP3RelRef, qt.Equals, "/nn/sect/p-nn-3/") - nnP3Ref, err := svP2.Ref( - nn3RefArgs, - ) - c.Assert(err, qt.IsNil) - c.Assert(nnP3Ref, qt.Equals, "https://example.org/nn/sect/p-nn-3/") - - for i, p := range enSite.RegularPages() { - j := i + 1 - c.Assert(p.Language().Lang, qt.Equals, "en") - c.Assert(p.Section(), qt.Equals, "sect") - if j < 9 { - if j%4 == 0 { - } else { - c.Assert(p.Title(), qt.Contains, "p-en") - } - } - } - - for _, p := range nnSite.RegularPages() { - c.Assert(p.Language().Lang, qt.Equals, "nn") - c.Assert(p.Title(), qt.Contains, "nn") - } - - for _, p := range svSite.RegularPages() { - c.Assert(p.Language().Lang, qt.Equals, "sv") - c.Assert(p.Title(), qt.Contains, "sv") - } - - // Check bundles - bundleEn := enSite.RegularPages()[len(enSite.RegularPages())-1] - bundleNn := nnSite.RegularPages()[len(nnSite.RegularPages())-1] - bundleSv := svSite.RegularPages()[len(svSite.RegularPages())-1] - - c.Assert(bundleEn.RelPermalink(), qt.Equals, "/en/sect/mybundle/") - c.Assert(bundleSv.RelPermalink(), qt.Equals, "/sv/sect/mybundle/") - - c.Assert(len(bundleNn.Resources()), qt.Equals, 4) - c.Assert(len(bundleSv.Resources()), qt.Equals, 4) - c.Assert(len(bundleEn.Resources()), qt.Equals, 4) - - b.AssertFileContent("public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png") - b.AssertFileContent("public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png") - b.AssertFileContent("public/sv/sect/mybundle/index.html", "image/png: /sv/sect/mybundle/logo.png") - - b.AssertFileContent("public/sv/sect/mybundle/featured.png", "PNG Data for sv") - b.AssertFileContent("public/nn/sect/mybundle/featured.png", "PNG Data for nn") - b.AssertFileContent("public/en/sect/mybundle/featured.png", "PNG Data for en") - b.AssertFileContent("public/en/sect/mybundle/logo.png", "PNG Data") - b.AssertFileContent("public/sv/sect/mybundle/logo.png", "PNG Data") - b.AssertFileContent("public/nn/sect/mybundle/logo.png", "PNG Data") - - nnSect := nnSite.getPage(kinds.KindSection, "sect") - c.Assert(nnSect, qt.Not(qt.IsNil)) - c.Assert(len(nnSect.Pages()), qt.Equals, 12) - nnHome := nnSite.Home() - c.Assert(nnHome.RelPermalink(), qt.Equals, "/nn/") -} - -// https://github.com/gohugoio/hugo/issues/6463 -func TestLanguageRootSectionsMismatch(t *testing.T) { - t.Parallel() - - config := ` -baseURL: "https://example.org/" -languageCode: "en-us" -title: "My New Hugo Site" -theme: "mytheme" - -contentDir: "content/en" - -languages: - en: - weight: 1 - languageName: "English" - contentDir: content/en - es: - weight: 2 - languageName: "Español" - contentDir: content/es - fr: - weight: 4 - languageName: "Française" - contentDir: content/fr - - -` - createPage := func(title string) string { - return fmt.Sprintf(`--- -title: %q ---- - -`, title) - } - - b := newTestSitesBuilder(t) - b.WithConfigFile("yaml", config) - - b.WithSourceFile("themes/mytheme/layouts/index.html", `MYTHEME`) - b.WithTemplates("index.html", ` -Lang: {{ .Lang }} -{{ range .Site.RegularPages }} -Page: {{ .RelPermalink }}|{{ .Title -}} +-- content/en/myfiles/file1.txt -- +file 1 en +-- content/en/myfiles/file2.txt -- +file 2 en +-- content/nn/myfiles/file1.txt -- +file 1 nn +-- layouts/index.html -- +Title: {{ .Title }}| +Len Resources: {{ len .Resources }}| +{{ range $i, $e := .Resources }} +{{ $i }}|{{ .RelPermalink }}|{{ .Content }}| {{ end }} -`) - b.WithSourceFile("static/hello.txt", `hello`) - b.WithContent("en/_index.md", createPage("en home")) - b.WithContent("es/_index.md", createPage("es home")) - b.WithContent("fr/_index.md", createPage("fr home")) - - for i := 1; i < 3; i++ { - b.WithContent(fmt.Sprintf("en/event/page%d.md", i), createPage(fmt.Sprintf("ev-en%d", i))) - b.WithContent(fmt.Sprintf("es/event/page%d.md", i), createPage(fmt.Sprintf("ev-es%d", i))) - b.WithContent(fmt.Sprintf("fr/event/page%d.md", i), createPage(fmt.Sprintf("ev-fr%d", i))) - b.WithContent(fmt.Sprintf("en/blog/page%d.md", i), createPage(fmt.Sprintf("blog-en%d", i))) - b.WithContent(fmt.Sprintf("es/blog/page%d.md", i), createPage(fmt.Sprintf("blog-es%d", i))) - b.WithContent(fmt.Sprintf("fr/other/page%d.md", i), createPage(fmt.Sprintf("other-fr%d", i))) - } - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", ` -Lang: en -Page: /blog/page1/|blog-en1 -Page: /blog/page2/|blog-en2 -Page: /event/page1/|ev-en1 -Page: /event/page2/|ev-en2 -`) - - b.AssertFileContent("public/es/index.html", ` -Lang: es -Page: /es/blog/page1/|blog-es1 -Page: /es/blog/page2/|blog-es2 -Page: /es/event/page1/|ev-es1 -Page: /es/event/page2/|ev-es2 -`) - b.AssertFileContent("public/fr/index.html", ` -Lang: fr -Page: /fr/event/page1/|ev-fr1 -Page: /fr/event/page2/|ev-fr2 -Page: /fr/other/page1/|other-fr1 -Page: /fr/other/page2/|other-fr2`) -} - -// Issue 9693 -func TestContentMountMerge(t *testing.T) { - t.Parallel() - - files := ` --- config.toml -- -baseURL = 'https://example.org/' -languageCode = 'en-us' -title = 'Hugo Forum Topic #37225' -theme = 'mytheme' - -disableKinds = ['sitemap','RSS','taxonomy','term'] -defaultContentLanguage = 'en' -defaultContentLanguageInSubdir = true - -[languages.en] -languageName = 'English' -weight = 1 -[languages.de] -languageName = 'Deutsch' -weight = 2 -[languages.nl] -languageName = 'Nederlands' -weight = 3 - -# EN content -[[module.mounts]] -source = 'content/en' -target = 'content' -lang = 'en' - -# DE content -[[module.mounts]] -source = 'content/de' -target = 'content' -lang = 'de' - -# This fills in the gaps in DE content with EN content -[[module.mounts]] -source = 'content/en' -target = 'content' -lang = 'de' - -# NL content -[[module.mounts]] -source = 'content/nl' -target = 'content' -lang = 'nl' - -# This should fill in the gaps in NL content with EN content -[[module.mounts]] -source = 'content/en' -target = 'content' -lang = 'nl' - --- content/de/_index.md -- ---- -title: "home (de)" ---- --- content/de/p1.md -- ---- -title: "p1 (de)" ---- --- content/en/_index.md -- ---- -title: "home (en)" ---- --- content/en/p1.md -- ---- -title: "p1 (en)" ---- --- content/en/p2.md -- ---- -title: "p2 (en)" ---- --- content/en/p3.md -- ---- -title: "p3 (en)" ---- --- content/nl/_index.md -- ---- -title: "home (nl)" ---- --- content/nl/p1.md -- ---- -title: "p1 (nl)" ---- --- content/nl/p3.md -- ---- -title: "p3 (nl)" ---- --- layouts/home.html -- -{{ .Title }}: {{ site.Language.Lang }}: {{ range site.RegularPages }}{{ .Title }}|{{ end }}:END --- themes/mytheme/config.toml -- -[[module.mounts]] -source = 'content/nlt' -target = 'content' -lang = 'nl' --- themes/mytheme/content/nlt/p3.md -- ---- -title: "p3 theme (nl)" ---- --- themes/mytheme/content/nlt/p4.md -- ---- -title: "p4 theme (nl)" ---- ` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - }, - ).Build() - - b.AssertFileContent("public/nl/index.html", `home (nl): nl: p1 (nl)|p2 (en)|p3 (nl)|p4 theme (nl)|:END`) - b.AssertFileContent("public/de/index.html", `home (de): de: p1 (de)|p2 (en)|p3 (en)|:END`) - b.AssertFileContent("public/en/index.html", `home (en): en: p1 (en)|p2 (en)|p3 (en)|:END`) - + b := Test(t, files) + b.AssertFileContent("public/en/index.html", "Home", "0|/en/myfiles/file1.txt|file 1 en|\n\n1|/en/myfiles/file2.txt|file 2 en|") + b.AssertFileContent("public/nn/index.html", "Heim", "0|/nn/myfiles/file1.txt|file 1 nn|\n\n1|/en/myfiles/file2.txt|file 2 en|") } diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go index 810b9fe20..77d92d04f 100644 --- a/hugolib/menu_test.go +++ b/hugolib/menu_test.go @@ -161,35 +161,35 @@ menu: b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", - `Default1|0|10|A|/blog/a/|Page(/blog/A.md) - Default1|1|20|B|/blog/b/|Page(/blog/B.md) - Default1|2|30|C|/blog/c/|Page(/blog/C.md) - Default1|3|100|Home|/|Page(/_index.md) - - ByWeight|0|10|A|/blog/a/|Page(/blog/A.md) - ByWeight|1|20|B|/blog/b/|Page(/blog/B.md) - ByWeight|2|30|C|/blog/c/|Page(/blog/C.md) - ByWeight|3|100|Home|/|Page(/_index.md) - - Reverse|0|100|Home|/|Page(/_index.md) - Reverse|1|30|C|/blog/c/|Page(/blog/C.md) - Reverse|2|20|B|/blog/b/|Page(/blog/B.md) - Reverse|3|10|A|/blog/a/|Page(/blog/A.md) - - Default2|0|10|A|/blog/a/|Page(/blog/A.md) - Default2|1|20|B|/blog/b/|Page(/blog/B.md) - Default2|2|30|C|/blog/c/|Page(/blog/C.md) - Default2|3|100|Home|/|Page(/_index.md) - - ByWeight|0|10|A|/blog/a/|Page(/blog/A.md) - ByWeight|1|20|B|/blog/b/|Page(/blog/B.md) - ByWeight|2|30|C|/blog/c/|Page(/blog/C.md) - ByWeight|3|100|Home|/|Page(/_index.md) - - Default3|0|10|A|/blog/a/|Page(/blog/A.md) - Default3|1|20|B|/blog/b/|Page(/blog/B.md) - Default3|2|30|C|/blog/c/|Page(/blog/C.md) - Default3|3|100|Home|/|Page(/_index.md)`, + `Default1|0|10|A|/blog/a/|Page(/blog/a) + Default1|1|20|B|/blog/b/|Page(/blog/b) + Default1|2|30|C|/blog/c/|Page(/blog/c) + Default1|3|100|Home|/|Page(/) + + ByWeight|0|10|A|/blog/a/|Page(/blog/a) + ByWeight|1|20|B|/blog/b/|Page(/blog/b) + ByWeight|2|30|C|/blog/c/|Page(/blog/c) + ByWeight|3|100|Home|/|Page(/) + + Reverse|0|100|Home|/|Page(/) + Reverse|1|30|C|/blog/c/|Page(/blog/c) + Reverse|2|20|B|/blog/b/|Page(/blog/b) + Reverse|3|10|A|/blog/a/|Page(/blog/a) + + Default2|0|10|A|/blog/a/|Page(/blog/a) + Default2|1|20|B|/blog/b/|Page(/blog/b) + Default2|2|30|C|/blog/c/|Page(/blog/c) + Default2|3|100|Home|/|Page(/) + + ByWeight|0|10|A|/blog/a/|Page(/blog/a) + ByWeight|1|20|B|/blog/b/|Page(/blog/b) + ByWeight|2|30|C|/blog/c/|Page(/blog/c) + ByWeight|3|100|Home|/|Page(/) + + Default3|0|10|A|/blog/a/|Page(/blog/a) + Default3|1|20|B|/blog/b/|Page(/blog/b) + Default3|2|30|C|/blog/c/|Page(/blog/c) + Default3|3|100|Home|/|Page(/)`, ) } @@ -494,34 +494,34 @@ title: "Contact: With No Menu Defined" b.AssertFileContent("public/index.html", ` Main: 5 -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: false|Page: Page(/blog/_index.md) -My Post 2: With Menu Defined|HasMenuCurrent: false|Page: Page(/blog/post2.md) -My Post 3|HasMenuCurrent: false|Page: Page(/blog/post3.md) -Contact Us|HasMenuCurrent: false|Page: Page(/contact.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: false|Page: Page(/blog) +My Post 2: With Menu Defined|HasMenuCurrent: false|Page: Page(/blog/post2) +My Post 3|HasMenuCurrent: false|Page: Page(/blog/post3) +Contact Us|HasMenuCurrent: false|Page: Page(/contact) `) b.AssertFileContent("public/blog/post1/index.html", ` -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: true|Page: Page(/blog) `) b.AssertFileContent("public/blog/post2/index.html", ` -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md) -Blog|IsMenuCurrent: false|Page: Page(/blog/_index.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: true|Page: Page(/blog) +Blog|IsMenuCurrent: false|Page: Page(/blog) `) b.AssertFileContent("public/blog/post3/index.html", ` -Home|HasMenuCurrent: false|Page: Page(/_index.md) -Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md) +Home|HasMenuCurrent: false|Page: Page(/) +Blog|HasMenuCurrent: true|Page: Page(/blog) `) b.AssertFileContent("public/contact/index.html", ` -Contact Us|HasMenuCurrent: false|Page: Page(/contact.md) -Contact Us|IsMenuCurrent: true|Page: Page(/contact.md) -Blog|HasMenuCurrent: false|Page: Page(/blog/_index.md) -Blog|IsMenuCurrent: false|Page: Page(/blog/_index.md) +Contact Us|HasMenuCurrent: false|Page: Page(/contact) +Contact Us|IsMenuCurrent: true|Page: Page(/contact) +Blog|HasMenuCurrent: false|Page: Page(/blog) +Blog|IsMenuCurrent: false|Page: Page(/blog) `) } @@ -619,7 +619,6 @@ Menu Item: {{ $i }}: {{ .Pre }}{{ .Name }}{{ .Post }}|{{ .URL }}| b.AssertFileContent("public/index.html", ` Menu Item: 0: <span>Home</span>|/| `) - } // Issue #11062 @@ -651,5 +650,4 @@ Menu Item: {{ $i }}|{{ .URL }}| b.AssertFileContent("public/index.html", ` Menu Item: 0|/foo/posts| `) - } diff --git a/hugolib/mount_filters_test.go b/hugolib/mount_filters_test.go index 4f6a448d2..16b062ec6 100644 --- a/hugolib/mount_filters_test.go +++ b/hugolib/mount_filters_test.go @@ -36,7 +36,7 @@ func TestMountFilters(t *testing.T) { defer clean() for _, component := range files.ComponentFolders { - b.Assert(os.MkdirAll(filepath.Join(workingDir, component), 0777), qt.IsNil) + b.Assert(os.MkdirAll(filepath.Join(workingDir, component), 0o777), qt.IsNil) } b.WithWorkingDir(workingDir).WithLogger(loggers.NewDefault()) b.WithConfigFile("toml", fmt.Sprintf(` @@ -109,10 +109,9 @@ Resources: {{ resources.Match "**.js" }} b.AssertFileContent(filepath.Join("public", "index.html"), ` Data: map[mydata:map[b:map[b1:bval]]]:END Template: false -Resource1: js/include.js:END +Resource1: /js/include.js:END Resource2: :END Resource3: :END -Resources: [js/include.js] +Resources: [/js/include.js] `) - } diff --git a/hugolib/page.go b/hugolib/page.go index bf5e19ac4..f8ec5e225 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -14,16 +14,14 @@ package hugolib import ( - "bytes" "context" "fmt" - "path" - "path/filepath" - "sort" - "strings" - - "go.uber.org/atomic" + "strconv" + "sync" + "sync/atomic" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/output" @@ -35,29 +33,25 @@ import ( "github.com/gohugoio/hugo/tpl" - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/parser/metadecoders" - - "github.com/gohugoio/hugo/parser/pageparser" + "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/common/collections" "github.com/gohugoio/hugo/common/text" - "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" ) var ( - _ page.Page = (*pageState)(nil) - _ collections.Grouper = (*pageState)(nil) - _ collections.Slicer = (*pageState)(nil) + _ page.Page = (*pageState)(nil) + _ collections.Grouper = (*pageState)(nil) + _ collections.Slicer = (*pageState)(nil) + _ identity.DependencyManagerScopedProvider = (*pageState)(nil) + _ contentNodeI = (*pageState)(nil) + _ pageContext = (*pageState)(nil) ) var ( @@ -74,15 +68,6 @@ type pageContext interface { posOffset(offset int) text.Position wrapError(err error) error getContentConverter() converter.Converter - addDependency(dep identity.Provider) -} - -// wrapErr adds some context to the given error if possible. -func wrapErr(err error, ctx any) error { - if pc, ok := ctx.(pageContext); ok { - return pc.wrapError(err) - } - return err } type pageSiteAdapter struct { @@ -90,20 +75,9 @@ type pageSiteAdapter struct { s *Site } -func (pa pageSiteAdapter) GetPageWithTemplateInfo(info tpl.Info, ref string) (page.Page, error) { - p, err := pa.GetPage(ref) - if p != nil { - // Track pages referenced by templates/shortcodes - // when in server mode. - if im, ok := info.(identity.Manager); ok { - im.Add(p) - } - } - return p, err -} - func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) { - p, err := pa.s.getPageNew(pa.p, ref) + p, err := pa.s.getPage(pa.p, ref) + if p == nil { // The nil struct has meaning in some situations, mostly to avoid breaking // existing sites doing $nilpage.IsDescendant($p), which will always return @@ -116,7 +90,7 @@ func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) { type pageState struct { // Incremented for each new page created. // Note that this will change between builds for a given Page. - id int + pid uint64 // This slice will be of same length as the number of global slice of output // formats (for all sites). @@ -126,16 +100,69 @@ type pageState struct { pageOutputTemplateVariationsState *atomic.Uint32 // This will be shifted out when we start to render a new output format. + pageOutputIdx int *pageOutput // Common for all output formats. *pageCommon + + resource.Staler + dependencyManager identity.Manager + resourcesPublishInit *sync.Once +} + +func (p *pageState) IdentifierBase() string { + return p.Path() +} + +func (p *pageState) GetIdentity() identity.Identity { + return p +} + +func (p *pageState) ForEeachIdentity(f func(identity.Identity) bool) { + f(p) +} + +func (p *pageState) GetDependencyManager() identity.Manager { + return p.dependencyManager +} + +func (p *pageState) GetDependencyManagerForScope(scope int) identity.Manager { + switch scope { + case pageDependencyScopeDefault: + return p.dependencyManagerOutput + case pageDependencyScopeGlobal: + return p.dependencyManager + default: + return identity.NopManager + } +} + +func (p *pageState) Key() string { + return "page-" + strconv.FormatUint(p.pid, 10) +} + +func (p *pageState) resetBuildState() { + p.Scratcher = maps.NewScratcher() } func (p *pageState) reusePageOutputContent() bool { return p.pageOutputTemplateVariationsState.Load() == 1 } +func (po *pageState) isRenderedAny() bool { + for _, o := range po.pageOutputs { + if o.isRendered() { + return true + } + } + return false +} + +func (p *pageState) isContentNodeBranch() bool { + return p.IsNode() +} + func (p *pageState) Err() resource.ResourceError { return nil } @@ -151,11 +178,6 @@ func (p *pageState) Eq(other any) bool { return p == pp } -// GetIdentity is for internal use. -func (p *pageState) GetIdentity() identity.Identity { - return identity.NewPathIdentity(files.ComponentFolderContent, filepath.FromSlash(p.Pathc())) -} - func (p *pageState) HeadingsFiltered(context.Context) tableofcontents.Headings { return nil } @@ -175,10 +197,11 @@ func (p *pageHeadingsFiltered) page() page.Page { // For internal use by the related content feature. func (p *pageState) ApplyFilterToHeadings(ctx context.Context, fn func(*tableofcontents.Heading) bool) related.Document { - if p.pageOutput.cp.tableOfContents == nil { - return p + r, err := p.content.contentToC(ctx, p.pageOutput.pco) + if err != nil { + panic(err) } - headings := p.pageOutput.cp.tableOfContents.Headings.FilterBy(fn) + headings := r.tableOfContents.Headings.FilterBy(fn) return &pageHeadingsFiltered{ pageState: p, headings: headings, @@ -196,72 +219,28 @@ func (p *pageState) CodeOwners() []string { // GetTerms gets the terms defined on this page in the given taxonomy. // The pages returned will be ordered according to the front matter. func (p *pageState) GetTerms(taxonomy string) page.Pages { - if p.treeRef == nil { - return nil - } - - m := p.s.pageMap - - taxonomy = strings.ToLower(taxonomy) - prefix := cleanSectionTreeKey(taxonomy) - self := strings.TrimPrefix(p.treeRef.key, "/") - - var pas page.Pages - - m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool { - key := s + self - if tn, found := m.taxonomyEntries.Get(key); found { - vi := tn.(*contentNode).viewInfo - pas = append(pas, pageWithOrdinal{pageState: n.p, ordinal: vi.ordinal}) - } - return false - }) - - page.SortByDefault(pas) - - return pas + return p.s.pageMap.getTermsForPageInTaxonomy(p.Path(), taxonomy) } func (p *pageState) MarshalJSON() ([]byte, error) { return page.MarshalPageToJSON(p) } -func (p *pageState) getPages() page.Pages { - b := p.bucket - if b == nil { - return nil - } - return b.getPages() -} - -func (p *pageState) getPagesRecursive() page.Pages { - b := p.bucket - if b == nil { - return nil - } - return b.getPagesRecursive() -} - -func (p *pageState) getPagesAndSections() page.Pages { - b := p.bucket - if b == nil { - return nil - } - return b.getPagesAndSections() -} - func (p *pageState) RegularPagesRecursive() page.Pages { - p.regularPagesRecursiveInit.Do(func() { - var pages page.Pages - switch p.Kind() { - case kinds.KindSection, kinds.KindHome: - pages = p.getPagesRecursive() - default: - pages = p.RegularPages() - } - p.regularPagesRecursive = pages - }) - return p.regularPagesRecursive + switch p.Kind() { + case kinds.KindSection, kinds.KindHome: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindPage), + }, + Recursive: true, + }, + ) + default: + return p.RegularPages() + } } func (p *pageState) PagesRecursive() page.Pages { @@ -269,110 +248,95 @@ func (p *pageState) PagesRecursive() page.Pages { } func (p *pageState) RegularPages() page.Pages { - p.regularPagesInit.Do(func() { - var pages page.Pages - - switch p.Kind() { - case kinds.KindPage: - case kinds.KindSection, kinds.KindHome, kinds.KindTaxonomy: - pages = p.getPages() - case kinds.KindTerm: - all := p.Pages() - for _, p := range all { - if p.IsPage() { - pages = append(pages, p) - } - } - default: - pages = p.s.RegularPages() - } - - p.regularPages = pages - }) - - return p.regularPages + switch p.Kind() { + case kinds.KindPage: + case kinds.KindSection, kinds.KindHome, kinds.KindTaxonomy: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindPage), + }, + }, + ) + case kinds.KindTerm: + return p.s.pageMap.getPagesWithTerm( + pageMapQueryPagesBelowPath{ + Path: p.Path(), + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindPage), + }, + ) + default: + return p.s.RegularPages() + } + return nil } func (p *pageState) Pages() page.Pages { - p.pagesInit.Do(func() { - var pages page.Pages - - switch p.Kind() { - case kinds.KindPage: - case kinds.KindSection, kinds.KindHome: - pages = p.getPagesAndSections() - case kinds.KindTerm: - b := p.treeRef.n - viewInfo := b.viewInfo - taxonomy := p.s.Taxonomies()[viewInfo.name.plural].Get(viewInfo.termKey) - pages = taxonomy.Pages() - case kinds.KindTaxonomy: - pages = p.bucket.getTaxonomies() - default: - pages = p.s.Pages() - } - - p.pages = pages - }) - - return p.pages + switch p.Kind() { + case kinds.KindPage: + case kinds.KindSection, kinds.KindHome: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + KeyPart: "page-section", + Include: pagePredicates.ShouldListLocal.And( + pagePredicates.KindPage.Or(pagePredicates.KindSection), + ), + }, + }, + ) + case kinds.KindTerm: + return p.s.pageMap.getPagesWithTerm( + pageMapQueryPagesBelowPath{ + Path: p.Path(), + }, + ) + case kinds.KindTaxonomy: + return p.s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: p.Path(), + KeyPart: "term", + Include: pagePredicates.ShouldListLocal.And(pagePredicates.KindTerm), + }, + Recursive: true, + }, + ) + default: + return p.s.Pages() + } + return nil } // RawContent returns the un-rendered source content without // any leading front matter. func (p *pageState) RawContent() string { - if p.source.parsed == nil { + if p.content.parseInfo.itemsStep2 == nil { return "" } - start := p.source.posMainContent + start := p.content.parseInfo.posMainContent if start == -1 { start = 0 } - - return string(p.source.parsed.Input()[start:]) -} - -func (p *pageState) sortResources() { - sort.SliceStable(p.resources, func(i, j int) bool { - ri, rj := p.resources[i], p.resources[j] - if ri.ResourceType() < rj.ResourceType() { - return true - } - - p1, ok1 := ri.(page.Page) - p2, ok2 := rj.(page.Page) - - if ok1 != ok2 { - return ok2 - } - - if ok1 { - return page.DefaultPageSort(p1, p2) - } - - // Make sure not to use RelPermalink or any of the other methods that - // trigger lazy publishing. - return ri.Name() < rj.Name() - }) + source, err := p.content.contentSource() + if err != nil { + panic(err) + } + return string(source[start:]) } func (p *pageState) Resources() resource.Resources { - p.resourcesInit.Do(func() { - p.sortResources() - if len(p.m.resourcesMetadata) > 0 { - resources.AssignMetadata(p.m.resourcesMetadata, p.resources...) - p.sortResources() - } - }) - return p.resources + return p.s.pageMap.getOrCreateResourcesForPage(p) } func (p *pageState) HasShortcode(name string) bool { - if p.shortcodeState == nil { + if p.content.shortcodeState == nil { return false } - return p.shortcodeState.hasName(name) + return p.content.shortcodeState.hasName(name) } func (p *pageState) Site() page.Site { @@ -380,47 +344,72 @@ func (p *pageState) Site() page.Site { } func (p *pageState) String() string { - if sourceRef := p.sourceRef(); sourceRef != "" { - return fmt.Sprintf("Page(%s)", sourceRef) - } - return fmt.Sprintf("Page(%q)", p.Title()) + return fmt.Sprintf("Page(%s)", p.Path()) } // IsTranslated returns whether this content file is translated to // other language(s). func (p *pageState) IsTranslated() bool { - p.s.h.init.translations.Do(context.Background()) - return len(p.translations) > 0 + return len(p.Translations()) > 0 } -// TranslationKey returns the key used to map language translations of this page. -// It will use the translationKey set in front matter if set, or the content path and -// filename (excluding any language code and extension), e.g. "about/index". -// The Page Kind is always prepended. +// TranslationKey returns the key used to identify a translation of this content. func (p *pageState) TranslationKey() string { - p.translationKeyInit.Do(func() { - if p.m.translationKey != "" { - p.translationKey = p.Kind() + "/" + p.m.translationKey - } else if p.IsPage() && !p.File().IsZero() { - p.translationKey = path.Join(p.Kind(), filepath.ToSlash(p.File().Dir()), p.File().TranslationBaseName()) - } else if p.IsNode() { - p.translationKey = path.Join(p.Kind(), p.SectionsPath()) - } - }) - - return p.translationKey + if p.m.translationKey != "" { + return p.m.translationKey + } + return p.Path() } // AllTranslations returns all translations, including the current Page. func (p *pageState) AllTranslations() page.Pages { - p.s.h.init.translations.Do(context.Background()) - return p.allTranslations + key := p.Path() + "/" + "translations-all" + pages, err := p.s.pageMap.getOrCreatePagesFromCache(key, func(string) (page.Pages, error) { + if p.m.translationKey != "" { + // translationKey set by user. + pas, _ := p.s.h.translationKeyPages.Get(p.m.translationKey) + pasc := make(page.Pages, len(pas)) + copy(pasc, pas) + page.SortByLanguage(pasc) + return pasc, nil + } + var pas page.Pages + p.s.pageMap.treePages.ForEeachInDimension(p.Path(), doctree.DimensionLanguage.Index(), + func(n contentNodeI) bool { + if n != nil { + pas = append(pas, n.(page.Page)) + } + return false + }, + ) + + pas = pagePredicates.ShouldLink.Filter(pas) + page.SortByLanguage(pas) + return pas, nil + }) + if err != nil { + panic(err) + } + + return pages } // Translations returns the translations excluding the current Page. func (p *pageState) Translations() page.Pages { - p.s.h.init.translations.Do(context.Background()) - return p.translations + key := p.Path() + "/" + "translations" + pages, err := p.s.pageMap.getOrCreatePagesFromCache(key, func(string) (page.Pages, error) { + var pas page.Pages + for _, pp := range p.AllTranslations() { + if !pp.Eq(p) { + pas = append(pas, pp) + } + } + return pas, nil + }) + if err != nil { + panic(err) + } + return pages } func (ps *pageState) initCommonProviders(pp pagePaths) error { @@ -450,8 +439,12 @@ func (p *pageState) getLayoutDescriptor() layouts.LayoutDescriptor { section = sections[0] } case kinds.KindTaxonomy, kinds.KindTerm: - b := p.getTreeRef().n - section = b.viewInfo.name.singular + + if p.m.singular != "" { + section = p.m.singular + } else if len(sections) > 0 { + section = sections[0] + } default: } @@ -470,14 +463,6 @@ func (p *pageState) getLayoutDescriptor() layouts.LayoutDescriptor { func (p *pageState) resolveTemplate(layouts ...string) (tpl.Template, bool, error) { f := p.outputFormat() - if len(layouts) == 0 { - selfLayout := p.selfLayoutForOutput(f) - if selfLayout != "" { - templ, found := p.s.Tmpl().Lookup(selfLayout) - return templ, found, nil - } - } - d := p.getLayoutDescriptor() if len(layouts) > 0 { @@ -488,15 +473,6 @@ func (p *pageState) resolveTemplate(layouts ...string) (tpl.Template, bool, erro return p.s.Tmpl().LookupLayout(d, f) } -// This is serialized -func (p *pageState) initOutputFormat(isRenderingSite bool, idx int) error { - if err := p.shiftToOutputFormat(isRenderingSite, idx); err != nil { - return err - } - - return nil -} - // Must be run after the site section tree etc. is built and ready. func (p *pageState) initPage() error { if _, err := p.init.Do(context.Background()); err != nil { @@ -505,12 +481,11 @@ func (p *pageState) initPage() error { return nil } -func (p *pageState) renderResources() (err error) { - p.resourcesPublishInit.Do(func() { - var toBeDeleted []int - - for i, r := range p.Resources() { +func (p *pageState) renderResources() error { + var initErr error + p.resourcesPublishInit.Do(func() { + for _, r := range p.Resources() { if _, ok := r.(page.Page); ok { // Pages gets rendered with the owning page but we count them here. p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages) @@ -519,50 +494,21 @@ func (p *pageState) renderResources() (err error) { src, ok := r.(resource.Source) if !ok { - err = fmt.Errorf("Resource %T does not support resource.Source", src) + initErr = fmt.Errorf("resource %T does not support resource.Source", src) return } if err := src.Publish(); err != nil { - if herrors.IsNotExist(err) { - // The resource has been deleted from the file system. - // This should be extremely rare, but can happen on live reload in server - // mode when the same resource is member of different page bundles. - toBeDeleted = append(toBeDeleted, i) - } else { + if !herrors.IsNotExist(err) { p.s.Log.Errorf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err) } } else { p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files) } } - - for _, i := range toBeDeleted { - p.deleteResource(i) - } }) - return -} - -func (p *pageState) deleteResource(i int) { - p.resources = append(p.resources[:i], p.resources[i+1:]...) -} - -func (p *pageState) getTargetPaths() page.TargetPaths { - return p.targetPaths() -} - -func (p *pageState) setTranslations(pages page.Pages) { - p.allTranslations = pages - page.SortByLanguage(p.allTranslations) - translations := make(page.Pages, 0) - for _, t := range p.allTranslations { - if !t.Eq(p) { - translations = append(translations, t) - } - } - p.translations = translations + return initErr } func (p *pageState) AlternativeOutputFormats() page.OutputFormats { @@ -588,229 +534,39 @@ var defaultRenderStringOpts = renderStringOpts{ Markup: "", // Will inherit the page's value when not set. } -func (p *pageState) addDependency(dep identity.Provider) { - if !p.s.watching() || p.pageOutput.cp == nil { - return - } - p.pageOutput.cp.dependencyTracker.Add(dep) -} - -// wrapError adds some more context to the given error if possible/needed -func (p *pageState) wrapError(err error) error { +func (p *pageMeta) wrapError(err error) error { if err == nil { panic("wrapError with nil") } - if p.File().IsZero() { + if p.File() == nil { // No more details to add. - return fmt.Errorf("%q: %w", p.Pathc(), err) - } - - filename := p.File().Filename() - - // Check if it's already added. - for _, ferr := range herrors.UnwrapFileErrors(err) { - errfilename := ferr.Position().Filename - if errfilename == filename { - if ferr.ErrorContext() == nil { - f, ioerr := p.s.SourceSpec.Fs.Source.Open(filename) - if ioerr != nil { - return err - } - defer f.Close() - ferr.UpdateContent(f, nil) - } - return err - } + return fmt.Errorf("%q: %w", p.Path(), err) } - lineMatcher := herrors.NopLineMatcher - - if textSegmentErr, ok := err.(*herrors.TextSegmentError); ok { - lineMatcher = herrors.ContainsMatcher(textSegmentErr.Segment) - } + return hugofs.AddFileInfoToError(err, p.File().FileInfo(), p.s.SourceSpec.Fs.Source) +} - return herrors.NewFileErrorFromFile(err, filename, p.s.SourceSpec.Fs.Source, lineMatcher) +// wrapError adds some more context to the given error if possible/needed +func (p *pageState) wrapError(err error) error { + return p.m.wrapError(err) } func (p *pageState) getContentConverter() converter.Converter { var err error - p.m.contentConverterInit.Do(func() { + p.contentConverterInit.Do(func() { markup := p.m.markup if markup == "html" { // Only used for shortcode inner content. markup = "markdown" } - p.m.contentConverter, err = p.m.newContentConverter(p, markup) + p.contentConverter, err = p.m.newContentConverter(p, markup) }) if err != nil { p.s.Log.Errorln("Failed to create content converter:", err) } - return p.m.contentConverter -} - -func (p *pageState) mapContent(bucket *pagesMapBucket, meta *pageMeta) error { - p.cmap = &pageContentMap{ - items: make([]any, 0, 20), - } - - return p.mapContentForResult( - p.source.parsed, - p.shortcodeState, - p.cmap, - meta.markup, - func(m map[string]interface{}) error { - return meta.setMetadata(bucket, p, m) - }, - ) -} - -func (p *pageState) mapContentForResult( - result pageparser.Result, - s *shortcodeHandler, - rn *pageContentMap, - markup string, - withFrontMatter func(map[string]any) error, -) error { - iter := result.Iterator() - - fail := func(err error, i pageparser.Item) error { - if fe, ok := err.(herrors.FileError); ok { - return fe - } - return p.parseError(err, result.Input(), i.Pos()) - } - - // the parser is guaranteed to return items in proper order or fail, so … - // … it's safe to keep some "global" state - var currShortcode shortcode - var ordinal int - var frontMatterSet bool - -Loop: - for { - it := iter.Next() - - switch { - case it.Type == pageparser.TypeIgnore: - case it.IsFrontMatter(): - f := pageparser.FormatFromFrontMatterType(it.Type) - m, err := metadecoders.Default.UnmarshalToMap(it.Val(result.Input()), f) - if err != nil { - if fe, ok := err.(herrors.FileError); ok { - pos := fe.Position() - // Apply the error to the content file. - pos.Filename = p.File().Filename() - // Offset the starting position of front matter. - offset := iter.LineNumber(result.Input()) - 1 - if f == metadecoders.YAML { - offset -= 1 - } - pos.LineNumber += offset - - fe.UpdatePosition(pos) - - return fe - } else { - return err - } - } - - if withFrontMatter != nil { - if err := withFrontMatter(m); err != nil { - return err - } - } - - frontMatterSet = true - - next := iter.Peek() - p.source.posMainContent = next.Pos() - - if !p.s.shouldBuild(p) { - // Nothing more to do. - return nil - } - - case it.Type == pageparser.TypeLeadSummaryDivider: - posBody := -1 - f := func(item pageparser.Item) bool { - if posBody == -1 && !item.IsDone() { - posBody = item.Pos() - } - - if item.IsNonWhitespace(result.Input()) { - p.truncated = true - - // Done - return false - } - return true - } - iter.PeekWalk(f) - - p.source.posSummaryEnd = it.Pos() - p.source.posBodyStart = posBody - p.source.hasSummaryDivider = true - - if markup != "html" { - // The content will be rendered by Goldmark or similar, - // and we need to track the summary. - rn.AddReplacement(internalSummaryDividerPre, it) - } - - // Handle shortcode - case it.IsLeftShortcodeDelim(): - // let extractShortcode handle left delim (will do so recursively) - iter.Backup() - - currShortcode, err := s.extractShortcode(ordinal, 0, result.Input(), iter) - if err != nil { - return fail(err, it) - } - - currShortcode.pos = it.Pos() - currShortcode.length = iter.Current().Pos() - it.Pos() - if currShortcode.placeholder == "" { - currShortcode.placeholder = createShortcodePlaceholder("s", p.id, currShortcode.ordinal) - } - - if currShortcode.name != "" { - s.addName(currShortcode.name) - } - - if currShortcode.params == nil { - var s []string - currShortcode.params = s - } - - currShortcode.placeholder = createShortcodePlaceholder("s", p.id, ordinal) - ordinal++ - s.shortcodes = append(s.shortcodes, currShortcode) - - rn.AddShortcode(currShortcode) - case it.IsEOF(): - break Loop - case it.IsError(): - err := fail(it.Err, it) - currShortcode.err = err - return err - - default: - rn.AddBytes(it) - } - } - - if !frontMatterSet && withFrontMatter != nil { - // Page content without front matter. Assign default front matter from - // cascades etc. - if err := withFrontMatter(nil); err != nil { - return err - } - } - - return nil + return p.contentConverter } func (p *pageState) errorf(err error, format string, a ...any) error { @@ -835,47 +591,33 @@ func (p *pageState) outputFormat() (f output.Format) { } func (p *pageState) parseError(err error, input []byte, offset int) error { - pos := p.posFromInput(input, offset) + pos := posFromInput("", input, offset) return herrors.NewFileErrorFromName(err, p.File().Filename()).UpdatePosition(pos) } func (p *pageState) pathOrTitle() string { - if !p.File().IsZero() { + if p.File() != nil { return p.File().Filename() } - if p.Pathc() != "" { - return p.Pathc() + if p.Path() != "" { + return p.Path() } return p.Title() } func (p *pageState) posFromInput(input []byte, offset int) text.Position { - if offset < 0 { - return text.Position{ - Filename: p.pathOrTitle(), - } - } - lf := []byte("\n") - input = input[:offset] - lineNumber := bytes.Count(input, lf) + 1 - endOfLastLine := bytes.LastIndex(input, lf) - - return text.Position{ - Filename: p.pathOrTitle(), - LineNumber: lineNumber, - ColumnNumber: offset - endOfLastLine, - Offset: offset, - } + return posFromInput(p.pathOrTitle(), input, offset) } func (p *pageState) posOffset(offset int) text.Position { - return p.posFromInput(p.source.parsed.Input(), offset) + return p.posFromInput(p.content.mustSource(), offset) } // shiftToOutputFormat is serialized. The output format idx refers to the // full set of output formats for all sites. +// This is serialized. func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { if err := p.initPage(); err != nil { return err @@ -885,6 +627,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { idx = 0 } + p.pageOutputIdx = idx p.pageOutput = p.pageOutputs[idx] if p.pageOutput == nil { panic(fmt.Sprintf("pageOutput is nil for output idx %d", idx)) @@ -897,7 +640,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { } if isRenderingSite { - cp := p.pageOutput.cp + cp := p.pageOutput.pco if cp == nil && p.reusePageOutputContent() { // Look for content to reuse. for i := 0; i < len(p.pageOutputs); i++ { @@ -906,8 +649,8 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { } po := p.pageOutputs[i] - if po.cp != nil { - cp = po.cp + if po.pco != nil { + cp = po.pco break } } @@ -915,12 +658,12 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { if cp == nil { var err error - cp, err = newPageContentOutput(p, p.pageOutput) + cp, err = newPageContentOutput(p.pageOutput) if err != nil { return err } } - p.pageOutput.initContentProvider(cp) + p.pageOutput.setContentProvider(cp) } else { // We attempt to assign pageContentOutputs while preparing each site // for rendering and before rendering each site. This lets us share @@ -932,7 +675,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { lcp.Reset() } else { lcp = page.NewLazyContentProvider(func() (page.OutputFormatContentProvider, error) { - cp, err := newPageContentOutput(p, p.pageOutput) + cp, err := newPageContentOutput(p.pageOutput) if err != nil { return nil, err } @@ -948,48 +691,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { return nil } -// sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to -// this page. It is prefixed with a "/". -// -// For pages that have a source file, it is returns the path to this file as an -// absolute path rooted in this site's content dir. -// For pages that do not (sections without content page etc.), it returns the -// virtual path, consistent with where you would add a source file. -func (p *pageState) sourceRef() string { - if !p.File().IsZero() { - sourcePath := p.File().Path() - if sourcePath != "" { - return "/" + filepath.ToSlash(sourcePath) - } - } - - if len(p.SectionsEntries()) > 0 { - // no backing file, return the virtual source path - return "/" + p.SectionsPath() - } - - return "" -} - -func (s *Site) sectionsFromFile(fi source.File) []string { - dirname := fi.Dir() - - dirname = strings.Trim(dirname, helpers.FilePathSeparator) - if dirname == "" { - return nil - } - parts := strings.Split(dirname, helpers.FilePathSeparator) - - if fii, ok := fi.(*fileInfo); ok { - if len(parts) > 0 && fii.FileInfo().Meta().Classifier == files.ContentClassLeaf { - // my-section/mybundle/index.md => my-section - return parts[:len(parts)-1] - } - } - - return parts -} - var ( _ page.Page = (*pageWithOrdinal)(nil) _ collections.Order = (*pageWithOrdinal)(nil) @@ -1008,3 +709,16 @@ func (p pageWithOrdinal) Ordinal() int { func (p pageWithOrdinal) page() page.Page { return p.pageState } + +type pageWithWeight0 struct { + weight0 int + *pageState +} + +func (p pageWithWeight0) Weight0() int { + return p.weight0 +} + +func (p pageWithWeight0) page() page.Page { + return p.pageState +} diff --git a/hugolib/page__common.go b/hugolib/page__common.go index 0069bdf89..0881affe7 100644 --- a/hugolib/page__common.go +++ b/hugolib/page__common.go @@ -19,6 +19,7 @@ import ( "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/compare" "github.com/gohugoio/hugo/lazy" + "github.com/gohugoio/hugo/markup/converter" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/output/layouts" "github.com/gohugoio/hugo/resources/page" @@ -26,14 +27,6 @@ import ( "github.com/gohugoio/hugo/source" ) -type treeRefProvider interface { - getTreeRef() *contentTreeRef -} - -func (p *pageCommon) getTreeRef() *contentTreeRef { - return p.treeRef -} - type nextPrevProvider interface { getNextPrev() *nextPrev } @@ -56,9 +49,6 @@ type pageCommon struct { sWrapped page.Site - bucket *pagesMapBucket - treeRef *contentTreeRef - // Lazily initialized dependencies. init *lazy.Init @@ -87,7 +77,7 @@ type pageCommon struct { page.TreeProvider resource.LanguageProvider resource.ResourceDataProvider - resource.ResourceMetaProvider + resource.ResourceNameTitleProvider resource.ResourceParamsProvider resource.ResourceTypeProvider resource.MediaTypeProvider @@ -101,11 +91,8 @@ type pageCommon struct { layoutDescriptor layouts.LayoutDescriptor layoutDescriptorInit sync.Once - // The parsed page content. - pageContent - - // Keeps track of the shortcodes on a page. - shortcodeState *shortcodeHandler + // The source and the parsed page content. + content *cachedContent // Set if feature enabled and this is in a Git repo. gitInfo source.GitInfo @@ -121,38 +108,10 @@ type pageCommon struct { // Internal use page.InternalDependencies - // The children. Regular pages will have none. - *pagePages - - // Any bundled resources - resources resource.Resources - resourcesInit sync.Once - resourcesPublishInit sync.Once - - translations page.Pages - allTranslations page.Pages - - // Calculated an cached translation mapping key - translationKey string - translationKeyInit sync.Once - - // Will only be set for bundled pages. - parent *pageState - - // Set in fast render mode to force render a given page. - forceRender bool + contentConverterInit sync.Once + contentConverter converter.Converter } func (p *pageCommon) Store() *maps.Scratch { return p.store } - -type pagePages struct { - pagesInit sync.Once - pages page.Pages - - regularPagesInit sync.Once - regularPages page.Pages - regularPagesRecursiveInit sync.Once - regularPagesRecursive page.Pages -} diff --git a/hugolib/page__content.go b/hugolib/page__content.go index 89c38bd84..64ce83f0e 100644 --- a/hugolib/page__content.go +++ b/hugolib/page__content.go @@ -14,36 +14,147 @@ package hugolib import ( + "bytes" "context" + "errors" "fmt" + "html/template" + "io" + "strings" + "unicode/utf8" - "github.com/gohugoio/hugo/output" + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/hcontext" + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/markup/converter" + "github.com/gohugoio/hugo/markup/tableofcontents" + "github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/parser/pageparser" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" + "github.com/gohugoio/hugo/tpl" +) + +const ( + internalSummaryDividerBase = "HUGOMORE42" ) var ( - internalSummaryDividerBase = "HUGOMORE42" internalSummaryDividerBaseBytes = []byte(internalSummaryDividerBase) internalSummaryDividerPre = []byte("\n\n" + internalSummaryDividerBase + "\n\n") ) -// The content related items on a Page. -type pageContent struct { - selfLayout string - truncated bool +type pageContentReplacement struct { + val []byte + + source pageparser.Item +} + +func newCachedContent(m *pageMeta, pid uint64) (*cachedContent, error) { + var openSource hugio.OpenReadSeekCloser + var filename string + if m.f != nil { + meta := m.f.FileInfo().Meta() + openSource = func() (hugio.ReadSeekCloser, error) { + r, err := meta.Open() + if err != nil { + return nil, fmt.Errorf("failed to open file %q: %w", meta.Filename, err) + } + return r, nil + } + filename = m.f.Filename() + } + + c := &cachedContent{ + pm: m.s.pageMap, + StaleInfo: m, + shortcodeState: newShortcodeHandler(filename, m.s), + parseInfo: &contentParseInfo{ + pid: pid, + }, + cacheBaseKey: m.pathInfo.PathNoLang(), + openSource: openSource, + enableEmoji: m.s.conf.EnableEmoji, + } + + source, err := c.contentSource() + if err != nil { + return nil, err + } + + if err := c.parseContentFile(source); err != nil { + return nil, err + } + + return c, nil +} + +type cachedContent struct { + pm *pageMap + + cacheBaseKey string + + // The source bytes. + openSource hugio.OpenReadSeekCloser + + resource.StaleInfo + + shortcodeState *shortcodeHandler + + // Parsed content. + parseInfo *contentParseInfo + + enableEmoji bool +} + +type contentParseInfo struct { + pid uint64 + frontMatter map[string]any - cmap *pageContentMap + // Whether the parsed content contains a summary separator. + hasSummaryDivider bool + + // Whether there are more content after the summary divider. + summaryTruncated bool + + // Returns the position in bytes after any front matter. + posMainContent int + + // Indicates whether we must do placeholder replacements. + hasNonMarkdownShortcode bool + + // Items from the page parser. + // These maps directly to the source + itemsStep1 pageparser.Items + + // *shortcode, pageContentReplacement or pageparser.Item + itemsStep2 []any +} + +func (p *contentParseInfo) AddBytes(item pageparser.Item) { + p.itemsStep2 = append(p.itemsStep2, item) +} - source rawPageContent +func (p *contentParseInfo) AddReplacement(val []byte, source pageparser.Item) { + p.itemsStep2 = append(p.itemsStep2, pageContentReplacement{val: val, source: source}) } -// returns the content to be processed by Goldmark or similar. -func (p pageContent) contentToRender(ctx context.Context, parsed pageparser.Result, pm *pageContentMap, renderedShortcodes map[string]shortcodeRenderer) ([]byte, bool, error) { - source := parsed.Input() +func (p *contentParseInfo) AddShortcode(s *shortcode) { + p.itemsStep2 = append(p.itemsStep2, s) + if s.insertPlaceholder() { + p.hasNonMarkdownShortcode = true + } +} + +// contentToRenderForItems returns the content to be processed by Goldmark or similar. +func (pi *contentParseInfo) contentToRender(ctx context.Context, source []byte, renderedShortcodes map[string]shortcodeRenderer) ([]byte, bool, error) { var hasVariants bool c := make([]byte, 0, len(source)+(len(source)/10)) - for _, it := range pm.items { + for _, it := range pi.itemsStep2 { switch v := it.(type) { case pageparser.Item: c = append(c, source[v.Pos():v.Pos()+len(v.Val(source))]...) @@ -78,59 +189,556 @@ func (p pageContent) contentToRender(ctx context.Context, parsed pageparser.Resu return c, hasVariants, nil } -func (p pageContent) selfLayoutForOutput(f output.Format) string { - if p.selfLayout == "" { - return "" +func (c *cachedContent) IsZero() bool { + return len(c.parseInfo.itemsStep2) == 0 +} + +func (c *cachedContent) parseContentFile(source []byte) error { + if source == nil || c.openSource == nil { + return nil } - return p.selfLayout + f.Name + + items, err := pageparser.ParseBytes( + source, + pageparser.Config{}, + ) + if err != nil { + return err + } + + c.parseInfo.itemsStep1 = items + + return c.parseInfo.mapItems(source, c.shortcodeState) } -type rawPageContent struct { - hasSummaryDivider bool +func (c *contentParseInfo) parseFrontMatter(it pageparser.Item, iter *pageparser.Iterator, source []byte) error { + if c.frontMatter != nil { + return nil + } - // The AST of the parsed page. Contains information about: - // shortcodes, front matter, summary indicators. - parsed pageparser.Result + f := pageparser.FormatFromFrontMatterType(it.Type) + var err error + c.frontMatter, err = metadecoders.Default.UnmarshalToMap(it.Val(source), f) + if err != nil { + if fe, ok := err.(herrors.FileError); ok { + pos := fe.Position() - // Returns the position in bytes after any front matter. - posMainContent int + // Offset the starting position of front matter. + offset := iter.LineNumber(source) - 1 + if f == metadecoders.YAML { + offset -= 1 + } + pos.LineNumber += offset - // These are set if we're able to determine this from the source. - posSummaryEnd int - posBodyStart int + fe.UpdatePosition(pos) + fe.SetFilename("") // It will be set later. + + return fe + } else { + return err + } + } + + return nil } -type pageContentReplacement struct { - val []byte +func (rn *contentParseInfo) mapItems( + source []byte, + s *shortcodeHandler, +) error { + if len(rn.itemsStep1) == 0 { + return nil + } - source pageparser.Item + fail := func(err error, i pageparser.Item) error { + if fe, ok := err.(herrors.FileError); ok { + return fe + } + + pos := posFromInput("", source, i.Pos()) + + return herrors.NewFileErrorFromPos(err, pos) + } + + iter := pageparser.NewIterator(rn.itemsStep1) + + // the parser is guaranteed to return items in proper order or fail, so … + // … it's safe to keep some "global" state + var ordinal int + +Loop: + for { + it := iter.Next() + + switch { + case it.Type == pageparser.TypeIgnore: + case it.IsFrontMatter(): + if err := rn.parseFrontMatter(it, iter, source); err != nil { + return err + } + next := iter.Peek() + if !next.IsDone() { + rn.posMainContent = next.Pos() + } + case it.Type == pageparser.TypeLeadSummaryDivider: + posBody := -1 + f := func(item pageparser.Item) bool { + if posBody == -1 && !item.IsDone() { + posBody = item.Pos() + } + + if item.IsNonWhitespace(source) { + rn.summaryTruncated = true + + // Done + return false + } + return true + } + iter.PeekWalk(f) + + rn.hasSummaryDivider = true + + // The content may be rendered by Goldmark or similar, + // and we need to track the summary. + rn.AddReplacement(internalSummaryDividerPre, it) + + // Handle shortcode + case it.IsLeftShortcodeDelim(): + // let extractShortcode handle left delim (will do so recursively) + iter.Backup() + + currShortcode, err := s.extractShortcode(ordinal, 0, source, iter) + if err != nil { + return fail(err, it) + } + + currShortcode.pos = it.Pos() + currShortcode.length = iter.Current().Pos() - it.Pos() + if currShortcode.placeholder == "" { + currShortcode.placeholder = createShortcodePlaceholder("s", rn.pid, currShortcode.ordinal) + } + + if currShortcode.name != "" { + s.addName(currShortcode.name) + } + + if currShortcode.params == nil { + var s []string + currShortcode.params = s + } + + currShortcode.placeholder = createShortcodePlaceholder("s", rn.pid, ordinal) + ordinal++ + s.shortcodes = append(s.shortcodes, currShortcode) + + rn.AddShortcode(currShortcode) + + case it.IsEOF(): + break Loop + case it.IsError(): + return fail(it.Err, it) + default: + rn.AddBytes(it) + } + } + + return nil } -type pageContentMap struct { +func (c *cachedContent) mustSource() []byte { + source, err := c.contentSource() + if err != nil { + panic(err) + } + return source +} - // If not, we can skip any pre-rendering of shortcodes. - hasMarkdownShortcode bool +func (c *cachedContent) contentSource() ([]byte, error) { + key := c.cacheBaseKey + v, err := c.pm.cacheContentSource.GetOrCreate(key, func(string) (*resources.StaleValue[[]byte], error) { + b, err := c.readSourceAll() + if err != nil { + return nil, err + } - // Indicates whether we must do placeholder replacements. - hasNonMarkdownShortcode bool + return &resources.StaleValue[[]byte]{ + Value: b, + IsStaleFunc: func() bool { + return c.IsStale() + }, + }, nil + }) + if err != nil { + return nil, err + } - // *shortcode, pageContentReplacement or pageparser.Item - items []any + return v.Value, nil } -func (p *pageContentMap) AddBytes(item pageparser.Item) { - p.items = append(p.items, item) +func (c *cachedContent) readSourceAll() ([]byte, error) { + if c.openSource == nil { + return []byte{}, nil + } + r, err := c.openSource() + if err != nil { + return nil, err + } + defer r.Close() + + return io.ReadAll(r) } -func (p *pageContentMap) AddReplacement(val []byte, source pageparser.Item) { - p.items = append(p.items, pageContentReplacement{val: val, source: source}) +type contentTableOfContents struct { + // For Goldmark we split Parse and Render. + astDoc any + + tableOfContents *tableofcontents.Fragments + tableOfContentsHTML template.HTML + + // Temporary storage of placeholders mapped to their content. + // These are shortcodes etc. Some of these will need to be replaced + // after any markup is rendered, so they share a common prefix. + contentPlaceholders map[string]shortcodeRenderer + + contentToRender []byte } -func (p *pageContentMap) AddShortcode(s *shortcode) { - p.items = append(p.items, s) - if s.insertPlaceholder() { - p.hasNonMarkdownShortcode = true - } else { - p.hasMarkdownShortcode = true +type contentSummary struct { + content template.HTML + summary template.HTML + summaryTruncated bool +} + +type contentPlainPlainWords struct { + plain string + plainWords []string + + summary template.HTML + summaryTruncated bool + + wordCount int + fuzzyWordCount int + readingTime int +} + +func (c *cachedContent) contentRendered(ctx context.Context, cp *pageContentOutput) (contentSummary, error) { + ctx = tpl.Context.DependencyScope.Set(ctx, pageDependencyScopeGlobal) + key := c.cacheBaseKey + "/" + cp.po.f.Name + versionv := cp.contentRenderedVersion + + v, err := c.pm.cacheContentRendered.GetOrCreate(key, func(string) (*resources.StaleValue[contentSummary], error) { + cp.po.p.s.Log.Trace(logg.StringFunc(func() string { + return fmt.Sprintln("contentRendered", key) + })) + + cp.po.p.s.h.contentRenderCounter.Add(1) + cp.contentRendered = true + po := cp.po + + ct, err := c.contentToC(ctx, cp) + if err != nil { + return nil, err + } + + rs := &resources.StaleValue[contentSummary]{ + IsStaleFunc: func() bool { + return c.IsStale() || cp.contentRenderedVersion != versionv + }, + } + + if len(c.parseInfo.itemsStep2) == 0 { + // Nothing to do. + return rs, nil + } + + var b []byte + + if ct.astDoc != nil { + // The content is parsed, but not rendered. + r, ok, err := po.contentRenderer.RenderContent(ctx, ct.contentToRender, ct.astDoc) + if err != nil { + return nil, err + } + if !ok { + return nil, errors.New("invalid state: astDoc is set but RenderContent returned false") + } + + b = r.Bytes() + + } else { + // Copy the content to be rendered. + b = make([]byte, len(ct.contentToRender)) + copy(b, ct.contentToRender) + } + + // There are one or more replacement tokens to be replaced. + var hasShortcodeVariants bool + tokenHandler := func(ctx context.Context, token string) ([]byte, error) { + if token == tocShortcodePlaceholder { + return []byte(ct.tableOfContentsHTML), nil + } + renderer, found := ct.contentPlaceholders[token] + if found { + repl, more, err := renderer.renderShortcode(ctx) + if err != nil { + return nil, err + } + hasShortcodeVariants = hasShortcodeVariants || more + return repl, nil + } + // This should never happen. + panic(fmt.Errorf("unknown shortcode token %q (number of tokens: %d)", token, len(ct.contentPlaceholders))) + } + + b, err = expandShortcodeTokens(ctx, b, tokenHandler) + if err != nil { + return nil, err + } + if hasShortcodeVariants { + cp.po.p.pageOutputTemplateVariationsState.Add(1) + } + + var result contentSummary // hasVariants bool + + if c.parseInfo.hasSummaryDivider { + isHTML := cp.po.p.m.markup == "html" + if isHTML { + // Use the summary sections as provided by the user. + i := bytes.Index(b, internalSummaryDividerPre) + result.summary = helpers.BytesToHTML(b[:i]) + b = b[i+len(internalSummaryDividerPre):] + + } else { + summary, content, err := splitUserDefinedSummaryAndContent(cp.po.p.m.markup, b) + if err != nil { + cp.po.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.po.p.pathOrTitle(), err) + } else { + b = content + result.summary = helpers.BytesToHTML(summary) + } + } + result.summaryTruncated = c.parseInfo.summaryTruncated + } + result.content = helpers.BytesToHTML(b) + rs.Value = result + + return rs, nil + }) + if err != nil { + return contentSummary{}, cp.po.p.wrapError(err) + } + + return v.Value, nil +} + +func (c *cachedContent) mustContentToC(ctx context.Context, cp *pageContentOutput) contentTableOfContents { + ct, err := c.contentToC(ctx, cp) + if err != nil { + panic(err) + } + return ct +} + +var setGetContentCallbackInContext = hcontext.NewContextDispatcher[func(*pageContentOutput, contentTableOfContents)]("contentCallback") + +func (c *cachedContent) contentToC(ctx context.Context, cp *pageContentOutput) (contentTableOfContents, error) { + key := c.cacheBaseKey + "/" + cp.po.f.Name + versionv := cp.contentRenderedVersion + + v, err := c.pm.contentTableOfContents.GetOrCreate(key, func(string) (*resources.StaleValue[contentTableOfContents], error) { + source, err := c.contentSource() + if err != nil { + return nil, err + } + + var ct contentTableOfContents + if err := cp.initRenderHooks(); err != nil { + return nil, err + } + f := cp.po.f + po := cp.po + p := po.p + ct.contentPlaceholders, err = c.shortcodeState.prepareShortcodesForPage(ctx, p, f, false) + if err != nil { + return nil, err + } + + // Callback called from above (e.g. in .RenderString) + ctxCallback := func(cp2 *pageContentOutput, ct2 contentTableOfContents) { + // Merge content placeholders + for k, v := range ct2.contentPlaceholders { + ct.contentPlaceholders[k] = v + } + + if p.s.conf.Internal.Watch { + for _, s := range cp2.po.p.content.shortcodeState.shortcodes { + for _, templ := range s.templs { + cp.trackDependency(templ.(identity.IdentityProvider)) + } + } + } + + // Transfer shortcode names so HasShortcode works for shortcodes from included pages. + cp.po.p.content.shortcodeState.transferNames(cp2.po.p.content.shortcodeState) + if cp2.po.p.pageOutputTemplateVariationsState.Load() > 0 { + cp.po.p.pageOutputTemplateVariationsState.Add(1) + } + } + + ctx = setGetContentCallbackInContext.Set(ctx, ctxCallback) + + var hasVariants bool + ct.contentToRender, hasVariants, err = c.parseInfo.contentToRender(ctx, source, ct.contentPlaceholders) + if err != nil { + return nil, err + } + + if hasVariants { + p.pageOutputTemplateVariationsState.Add(1) + } + + isHTML := cp.po.p.m.markup == "html" + + if !isHTML { + createAndSetToC := func(tocProvider converter.TableOfContentsProvider) { + cfg := p.s.ContentSpec.Converters.GetMarkupConfig() + ct.tableOfContents = tocProvider.TableOfContents() + ct.tableOfContentsHTML = template.HTML( + ct.tableOfContents.ToHTML( + cfg.TableOfContents.StartLevel, + cfg.TableOfContents.EndLevel, + cfg.TableOfContents.Ordered, + ), + ) + } + + // If the converter supports doing the parsing separately, we do that. + parseResult, ok, err := po.contentRenderer.ParseContent(ctx, ct.contentToRender) + if err != nil { + return nil, err + } + if ok { + // This is Goldmark. + // Store away the parse result for later use. + createAndSetToC(parseResult) + + ct.astDoc = parseResult.Doc() + + } else { + + // This is Asciidoctor etc. + r, err := po.contentRenderer.ParseAndRenderContent(ctx, ct.contentToRender, true) + if err != nil { + return nil, err + } + + ct.contentToRender = r.Bytes() + + if tocProvider, ok := r.(converter.TableOfContentsProvider); ok { + createAndSetToC(tocProvider) + } else { + tmpContent, tmpTableOfContents := helpers.ExtractTOC(ct.contentToRender) + ct.tableOfContentsHTML = helpers.BytesToHTML(tmpTableOfContents) + ct.tableOfContents = tableofcontents.Empty + ct.contentToRender = tmpContent + } + } + } + + return &resources.StaleValue[contentTableOfContents]{ + Value: ct, + IsStaleFunc: func() bool { + return c.IsStale() || cp.contentRenderedVersion != versionv + }, + }, nil + }) + if err != nil { + return contentTableOfContents{}, err + } + + return v.Value, nil +} + +func (c *cachedContent) contentPlain(ctx context.Context, cp *pageContentOutput) (contentPlainPlainWords, error) { + key := c.cacheBaseKey + "/" + cp.po.f.Name + + versionv := cp.contentRenderedVersion + + v, err := c.pm.cacheContentPlain.GetOrCreateWitTimeout(key, cp.po.p.s.Conf.Timeout(), func(string) (*resources.StaleValue[contentPlainPlainWords], error) { + var result contentPlainPlainWords + rs := &resources.StaleValue[contentPlainPlainWords]{ + IsStaleFunc: func() bool { + return c.IsStale() || cp.contentRenderedVersion != versionv + }, + } + + rendered, err := c.contentRendered(ctx, cp) + if err != nil { + return nil, err + } + + result.plain = tpl.StripHTML(string(rendered.content)) + result.plainWords = strings.Fields(result.plain) + + isCJKLanguage := cp.po.p.m.isCJKLanguage + + if isCJKLanguage { + result.wordCount = 0 + for _, word := range result.plainWords { + runeCount := utf8.RuneCountInString(word) + if len(word) == runeCount { + result.wordCount++ + } else { + result.wordCount += runeCount + } + } + } else { + result.wordCount = helpers.TotalWords(result.plain) + } + + // TODO(bep) is set in a test. Fix that. + if result.fuzzyWordCount == 0 { + result.fuzzyWordCount = (result.wordCount + 100) / 100 * 100 + } + + if isCJKLanguage { + result.readingTime = (result.wordCount + 500) / 501 + } else { + result.readingTime = (result.wordCount + 212) / 213 + } + + if rendered.summary != "" { + result.summary = rendered.summary + result.summaryTruncated = rendered.summaryTruncated + } else if cp.po.p.m.summary != "" { + b, err := cp.po.contentRenderer.ParseAndRenderContent(ctx, []byte(cp.po.p.m.summary), false) + if err != nil { + return nil, err + } + html := cp.po.p.s.ContentSpec.TrimShortHTML(b.Bytes()) + result.summary = helpers.BytesToHTML(html) + } else { + var summary string + var truncated bool + if isCJKLanguage { + summary, truncated = cp.po.p.s.ContentSpec.TruncateWordsByRune(result.plainWords) + } else { + summary, truncated = cp.po.p.s.ContentSpec.TruncateWordsToWholeSentence(result.plain) + } + result.summary = template.HTML(summary) + result.summaryTruncated = truncated + } + + rs.Value = result + + return rs, nil + }) + if err != nil { + if herrors.IsTimeoutError(err) { + err = fmt.Errorf("timed out rendering the page content. You may have a circular loop in a shortcode, or your site may have resources that take longer to build than the `timeout` limit in your Hugo config file: %w", err) + } + return contentPlainPlainWords{}, err } + return v.Value, nil } diff --git a/hugolib/page__data.go b/hugolib/page__data.go index ad6ba126e..9712f1b4a 100644 --- a/hugolib/page__data.go +++ b/hugolib/page__data.go @@ -14,6 +14,7 @@ package hugolib import ( + "strings" "sync" "github.com/gohugoio/hugo/resources/kinds" @@ -37,23 +38,18 @@ func (p *pageData) Data() any { switch p.Kind() { case kinds.KindTerm: - b := p.treeRef.n - name := b.viewInfo.name - termKey := b.viewInfo.termKey - - taxonomy := p.s.Taxonomies()[name.plural].Get(termKey) - - p.data[name.singular] = taxonomy + path := p.Path() + name := p.s.pageMap.cfg.getTaxonomyConfig(path) + term := p.s.Taxonomies()[name.plural].Get(strings.TrimPrefix(path, name.pluralTreeKey)) + p.data[name.singular] = term p.data["Singular"] = name.singular p.data["Plural"] = name.plural - p.data["Term"] = b.viewInfo.term() + p.data["Term"] = p.Title() case kinds.KindTaxonomy: - b := p.treeRef.n - name := b.viewInfo.name - - p.data["Singular"] = name.singular - p.data["Plural"] = name.plural - p.data["Terms"] = p.s.Taxonomies()[name.plural] + viewCfg := p.s.pageMap.cfg.getTaxonomyConfig(p.Path()) + p.data["Singular"] = viewCfg.singular + p.data["Plural"] = viewCfg.plural + p.data["Terms"] = p.s.Taxonomies()[viewCfg.plural] // keep the following just for legacy reasons p.data["OrderedIndex"] = p.data["Terms"] p.data["Index"] = p.data["Terms"] diff --git a/hugolib/page__fragments_test.go b/hugolib/page__fragments_test.go index cce006e9f..c30fa829e 100644 --- a/hugolib/page__fragments_test.go +++ b/hugolib/page__fragments_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -65,7 +65,6 @@ Fragments : {{ $p1.Fragments.Identifiers }} b.AssertFileContent("public/en/p1/index.html", "HTML") b.AssertFileContent("public/en/p1/index.json", "ToC: <nav id=\"TableOfContents\">\n <ul>\n <li><a href=\"#heading-1-fr\">Heading 1 FR</a></li>\n </ul>\n</nav>\nFragments : [heading-1-fr]") - } // Issue #10866 @@ -108,5 +107,4 @@ Fragments: {{ .Fragments.Identifiers }}| b.AssertFileContent("public/p1/index.html", "Fragments: [heading-p1-1 heading-p2-1 heading-p2-2]|") b.AssertFileContent("public/p2/index.html", "Fragments: [heading-p2-1 heading-p2-2]|") - } diff --git a/hugolib/page__meta.go b/hugolib/page__meta.go index eb1559fb1..0ffdb0b84 100644 --- a/hugolib/page__meta.go +++ b/hugolib/page__meta.go @@ -14,28 +14,26 @@ package hugolib import ( + "context" "fmt" - "path" "path/filepath" "regexp" "strings" - "sync" "time" - "github.com/gohugoio/hugo/langs" - "github.com/gobuffalo/flect" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/markup/converter" - - "github.com/gohugoio/hugo/hugofs/files" - - "github.com/gohugoio/hugo/common/hugo" + xmaps "golang.org/x/exp/maps" "github.com/gohugoio/hugo/related" "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/helpers" @@ -50,79 +48,76 @@ import ( var cjkRe = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`) type pageMeta struct { - // kind is the discriminator that identifies the different page types - // in the different page collections. This can, as an example, be used - // to to filter regular pages, find sections etc. - // Kind will, for the pages available to the templates, be one of: - // page, home, section, taxonomy and term. - // It is of string type to make it easy to reason about in - // the templates. - kind string - - // This is a standalone page not part of any page collection. These - // include sitemap, robotsTXT and similar. It will have no pageOutputs, but - // a fixed pageOutput. - standalone bool - - draft bool // Only published when running with -D flag - buildConfig pagemeta.BuildConfig - - bundleType files.ContentClass + kind string // Page kind. + term string // Set for kind == KindTerm. + singular string // Set for kind == KindTerm and kind == KindTaxonomy. - // Params contains configuration defined in the params section of page frontmatter. - params map[string]any + resource.Staler + pageMetaParams - title string - linkTitle string + pageMetaFrontMatter - summary string + // Set for standalone pages, e.g. robotsTXT. + standaloneOutputFormat output.Format - resourcePath string + resourcePath string // Set for bundled pages; path relative to its bundle root. + bundled bool // Set if this page is bundled inside another. - weight int + pathInfo *paths.Path // Always set. This the canonical path to the Page. + f *source.File - markup string - contentType string - - // whether the content is in a CJK language. - isCJKLanguage bool - - layout string - - aliases []string - - description string - keywords []string - - urlPaths pagemeta.URLPath - - resource.Dates - - // Set if this page is bundled inside another. - bundled bool - - // A key that maps to translation(s) of this page. This value is fetched - // from the page front matter. - translationKey string + s *Site // The site this page belongs to. +} - // From front matter. - configuredOutputFormats output.Formats +// Prepare for a rebuild of the data passed in from front matter. +func (m *pageMeta) setMetaPostPrepareRebuild() { + params := xmaps.Clone[map[string]any](m.paramsOriginal) + m.pageMetaParams.params = params + m.pageMetaFrontMatter = pageMetaFrontMatter{} +} - // This is the raw front matter metadata that is going to be assigned to - // the Resources above. - resourcesMetadata []map[string]any +type pageMetaParams struct { + setMetaPostCount int + setMetaPostCascadeChanged bool - f source.File + params map[string]any // Params contains configuration defined in the params section of page frontmatter. + cascade map[page.PageMatcher]maps.Params // cascade contains default configuration to be cascaded downwards. - sections []string + // These are only set in watch mode. + datesOriginal pageMetaDates + paramsOriginal map[string]any // contains the original params as defined in the front matter. + cascadeOriginal map[page.PageMatcher]maps.Params // contains the original cascade as defined in the front matter. +} - // Sitemap overrides from front matter. - sitemap config.SitemapConfig +// From page front matter. +type pageMetaFrontMatter struct { + draft bool // Only published when running with -D flag + title string + linkTitle string + summary string + weight int + markup string + contentType string // type in front matter. + isCJKLanguage bool // whether the content is in a CJK language. + layout string + aliases []string + description string + keywords []string + translationKey string // maps to translation(s) of this page. - s *Site + buildConfig pagemeta.BuildConfig + configuredOutputFormats output.Formats // outputs defiend in front matter. + pageMetaDates // The 4 front matter dates that Hugo cares about. + resourcesMetadata []map[string]any // Raw front matter metadata that is going to be assigned to the page resources. + sitemap config.SitemapConfig // Sitemap overrides from front matter. + urlPaths pagemeta.URLPath +} - contentConverterInit sync.Once - contentConverter converter.Converter +func (m *pageMetaParams) init(preserveOringal bool) { + if preserveOringal { + m.paramsOriginal = xmaps.Clone[maps.Params](m.params) + m.cascadeOriginal = xmaps.Clone[map[page.PageMatcher]maps.Params](m.cascade) + } } func (p *pageMeta) Aliases() []string { @@ -144,8 +139,15 @@ func (p *pageMeta) Authors() page.AuthorList { return nil } -func (p *pageMeta) BundleType() files.ContentClass { - return p.bundleType +func (p *pageMeta) BundleType() string { + switch p.pathInfo.BundleType() { + case paths.PathTypeLeaf: + return "leaf" + case paths.PathTypeBranch: + return "branch" + default: + return "" + } } func (p *pageMeta) Description() string { @@ -160,7 +162,7 @@ func (p *pageMeta) Draft() bool { return p.draft } -func (p *pageMeta) File() source.File { +func (p *pageMeta) File() *source.File { return p.f } @@ -192,6 +194,9 @@ func (p *pageMeta) Name() string { if p.resourcePath != "" { return p.resourcePath } + if p.kind == kinds.KindTerm { + return p.pathInfo.Unmormalized().BaseNameNoIdentifier() + } return p.Title() } @@ -217,28 +222,11 @@ func (p *pageMeta) Params() maps.Params { } func (p *pageMeta) Path() string { - if !p.File().IsZero() { - const example = ` - {{ $path := "" }} - {{ with .File }} - {{ $path = .Path }} - {{ else }} - {{ $path = .Path }} - {{ end }} -` - p.s.Log.Warnln(".Path when the page is backed by a file is deprecated. We plan to use Path for a canonical source path and you probably want to check the source is a file. To get the current behaviour, you can use a construct similar to the one below:\n" + example) - - } - - return p.Pathc() + return p.pathInfo.Base() } -// This is just a bridge method, use Path in templates. -func (p *pageMeta) Pathc() string { - if !p.File().IsZero() { - return p.File().Path() - } - return p.SectionsPath() +func (p *pageMeta) PathInfo() *paths.Path { + return p.pathInfo } // RelatedKeywords implements the related.Document interface needed for fast page searches. @@ -256,31 +244,7 @@ func (p *pageMeta) IsSection() bool { } func (p *pageMeta) Section() string { - if p.IsHome() { - return "" - } - - if p.IsNode() { - if len(p.sections) == 0 { - // May be a sitemap or similar. - return "" - } - return p.sections[0] - } - - if !p.File().IsZero() { - return p.File().Section() - } - - panic("invalid page state") -} - -func (p *pageMeta) SectionsEntries() []string { - return p.sections -} - -func (p *pageMeta) SectionsPath() string { - return path.Join(p.SectionsEntries()...) + return p.pathInfo.Section() } func (p *pageMeta) Sitemap() config.SitemapConfig { @@ -309,79 +273,114 @@ func (p *pageMeta) Weight() int { return p.weight } -func (pm *pageMeta) mergeBucketCascades(b1, b2 *pagesMapBucket) { - if b1.cascade == nil { - b1.cascade = make(map[page.PageMatcher]maps.Params) - } - - if b2 != nil && b2.cascade != nil { - for k, v := range b2.cascade { +func (ps *pageState) setMetaPre() error { + pm := ps.m + p := ps + frontmatter := p.content.parseInfo.frontMatter + watching := p.s.watching() - vv, found := b1.cascade[k] - if !found { - b1.cascade[k] = v - } else { - // Merge - for ck, cv := range v { - if _, found := vv[ck]; !found { - vv[ck] = cv - } + if frontmatter != nil { + // Needed for case insensitive fetching of params values + maps.PrepareParams(frontmatter) + pm.pageMetaParams.params = frontmatter + if p.IsNode() { + // Check for any cascade define on itself. + if cv, found := frontmatter["cascade"]; found { + var err error + cascade, err := page.DecodeCascade(cv) + if err != nil { + return err } + pm.pageMetaParams.cascade = cascade + } } + } else if pm.pageMetaParams.params == nil { + pm.pageMetaParams.params = make(maps.Params) } + + pm.pageMetaParams.init(watching) + + return nil } -func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, frontmatter map[string]any) error { - pm.params = make(maps.Params) +func (ps *pageState) setMetaPost(cascade map[page.PageMatcher]maps.Params) error { + ps.m.setMetaPostCount++ + var cascadeHashPre uint64 + if ps.m.setMetaPostCount > 1 { + cascadeHashPre = identity.HashUint64(ps.m.cascade) + ps.m.cascade = xmaps.Clone[map[page.PageMatcher]maps.Params](ps.m.cascadeOriginal) - if frontmatter == nil && (parentBucket == nil || parentBucket.cascade == nil) { - return nil } - if frontmatter != nil { - // Needed for case insensitive fetching of params values - maps.PrepareParams(frontmatter) - if p.bucket != nil { - // Check for any cascade define on itself. - if cv, found := frontmatter["cascade"]; found { - var err error - p.bucket.cascade, err = page.DecodeCascade(cv) - if err != nil { - return err + // Apply cascades first so they can be overriden later. + if cascade != nil { + if ps.m.cascade != nil { + for k, v := range cascade { + vv, found := ps.m.cascade[k] + if !found { + ps.m.cascade[k] = v + } else { + // Merge + for ck, cv := range v { + if _, found := vv[ck]; !found { + vv[ck] = cv + } + } } } + cascade = ps.m.cascade + } else { + ps.m.cascade = cascade } - } else { - frontmatter = make(map[string]any) } - var cascade map[page.PageMatcher]maps.Params + if cascade == nil { + cascade = ps.m.cascade + } - if p.bucket != nil { - if parentBucket != nil { - // Merge missing keys from parent into this. - pm.mergeBucketCascades(p.bucket, parentBucket) + if ps.m.setMetaPostCount > 1 { + ps.m.setMetaPostCascadeChanged = cascadeHashPre != identity.HashUint64(ps.m.cascade) + if !ps.m.setMetaPostCascadeChanged { + // No changes, restore any value that may be changed by aggregation. + ps.m.dates = ps.m.datesOriginal.dates + return nil } - cascade = p.bucket.cascade - } else if parentBucket != nil { - cascade = parentBucket.cascade + ps.m.setMetaPostPrepareRebuild() + } + // Cascade is also applied to itself. for m, v := range cascade { - if !m.Matches(p) { + if !m.Matches(ps) { continue } for kk, vv := range v { - if _, found := frontmatter[kk]; !found { - frontmatter[kk] = vv + if _, found := ps.m.params[kk]; !found { + ps.m.params[kk] = vv } } } + if err := ps.setMetaPostParams(); err != nil { + return err + } + + if err := ps.m.applyDefaultValues(); err != nil { + return err + } + + // Store away any original values that may be changed from aggregation. + ps.m.datesOriginal = ps.m.pageMetaDates + + return nil +} + +func (p *pageState) setMetaPostParams() error { + pm := p.m var mtime time.Time var contentBaseName string - if !p.File().IsZero() { + if p.File() != nil { contentBaseName = p.File().ContentBaseName() if p.File().FileInfo() != nil { mtime = p.File().FileInfo().ModTime() @@ -393,10 +392,12 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron gitAuthorDate = p.gitInfo.AuthorDate } + pm.pageMetaDates = pageMetaDates{} + pm.urlPaths = pagemeta.URLPath{} + descriptor := &pagemeta.FrontMatterDescriptor{ - Frontmatter: frontmatter, Params: pm.params, - Dates: &pm.Dates, + Dates: &pm.pageMetaDates.dates, PageURLs: &pm.urlPaths, BaseFilename: contentBaseName, ModTime: mtime, @@ -412,7 +413,7 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron p.s.Log.Errorf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err) } - pm.buildConfig, err = pagemeta.DecodeBuildConfig(frontmatter["_build"]) + pm.buildConfig, err = pagemeta.DecodeBuildConfig(pm.params["_build"]) if err != nil { return err } @@ -420,7 +421,7 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron var sitemapSet bool var draft, published, isCJKLanguage *bool - for k, v := range frontmatter { + for k, v := range pm.params { loki := strings.ToLower(k) if loki == "published" { // Intentionally undocumented @@ -458,15 +459,6 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron if strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") { return fmt.Errorf("URLs with protocol (http*) not supported: %q. In page %q", url, p.pathOrTitle()) } - lang := p.s.GetLanguagePrefix() - if lang != "" && !strings.HasPrefix(url, "/") && strings.HasPrefix(url, lang+"/") { - if strings.HasPrefix(hugo.CurrentVersion.String(), "0.55") { - // We added support for page relative URLs in Hugo 0.55 and - // this may get its language path added twice. - // TODO(bep) eventually remove this. - p.s.Log.Warnf(`Front matter in %q with the url %q with no leading / has what looks like the language prefix added. In Hugo 0.55 we added support for page relative URLs in front matter, no language prefix needed. Check the URL and consider to either add a leading / or remove the language prefix.`, p.pathOrTitle(), url) - } - } pm.urlPaths.URL = url pm.params[loki] = url case "type": @@ -615,8 +607,8 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron if isCJKLanguage != nil { pm.isCJKLanguage = *isCJKLanguage - } else if p.s.conf.HasCJKLanguage && p.source.parsed != nil { - if cjkRe.Match(p.source.parsed.Input()) { + } else if p.s.conf.HasCJKLanguage && p.content.openSource != nil { + if cjkRe.Match(p.content.mustSource()) { pm.isCJKLanguage = true } else { pm.isCJKLanguage = false @@ -628,28 +620,39 @@ func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, fron return nil } -func (p *pageMeta) noListAlways() bool { - return p.buildConfig.List != pagemeta.Always +// shouldList returns whether this page should be included in the list of pages. +// glogal indicates site.Pages etc. +func (p *pageMeta) shouldList(global bool) bool { + if p.isStandalone() { + // Never list 404, sitemap and similar. + return false + } + + switch p.buildConfig.List { + case pagemeta.Always: + return true + case pagemeta.Never: + return false + case pagemeta.ListLocally: + return !global + } + return false +} + +func (p *pageMeta) shouldListAny() bool { + return p.shouldList(true) || p.shouldList(false) } -func (p *pageMeta) getListFilter(local bool) contentTreeNodeCallback { - return newContentTreeFilter(func(n *contentNode) bool { - if n == nil { - return true - } +func (p *pageMeta) isStandalone() bool { + return !p.standaloneOutputFormat.IsZero() +} - var shouldList bool - switch n.p.m.buildConfig.List { - case pagemeta.Always: - shouldList = true - case pagemeta.Never: - shouldList = false - case pagemeta.ListLocally: - shouldList = local - } +func (p *pageMeta) shouldBeCheckedForMenuDefinitions() bool { + if !p.shouldList(false) { + return false + } - return !shouldList - }) + return p.kind == kinds.KindHome || p.kind == kinds.KindSection || p.kind == kinds.KindPage } func (p *pageMeta) noRender() bool { @@ -660,17 +663,17 @@ func (p *pageMeta) noLink() bool { return p.buildConfig.Render == pagemeta.Never } -func (p *pageMeta) applyDefaultValues(n *contentNode) error { +func (p *pageMeta) applyDefaultValues() error { if p.buildConfig.IsZero() { p.buildConfig, _ = pagemeta.DecodeBuildConfig(nil) } - if !p.s.isEnabled(p.Kind()) { + if !p.s.conf.IsKindEnabled(p.Kind()) { (&p.buildConfig).Disable() } if p.markup == "" { - if !p.File().IsZero() { + if p.File() != nil { // Fall back to file extension p.markup = p.s.ContentSpec.ResolveMarkup(p.File().Ext()) } @@ -679,43 +682,26 @@ func (p *pageMeta) applyDefaultValues(n *contentNode) error { } } - if p.title == "" && p.f.IsZero() { + if p.title == "" && p.f == nil { switch p.Kind() { case kinds.KindHome: p.title = p.s.Title() case kinds.KindSection: - var sectionName string - if n != nil { - sectionName = n.rootSection() - } else { - sectionName = p.sections[0] - } + sectionName := p.pathInfo.Unmormalized().BaseNameNoIdentifier() if p.s.conf.PluralizeListTitles { sectionName = flect.Pluralize(sectionName) } p.title = p.s.conf.C.CreateTitle(sectionName) case kinds.KindTerm: - // TODO(bep) improve - key := p.sections[len(p.sections)-1] - p.title = strings.Replace(p.s.conf.C.CreateTitle(key), "-", " ", -1) + if p.term != "" { + p.title = p.s.conf.C.CreateTitle(p.term) + } else { + panic("term not set") + } case kinds.KindTaxonomy: - p.title = p.s.conf.C.CreateTitle(p.sections[0]) - case kinds.Kind404: + p.title = strings.Replace(p.s.conf.C.CreateTitle(p.pathInfo.Unmormalized().BaseNameNoIdentifier()), "-", " ", -1) + case kinds.KindStatus404: p.title = "404 Page not found" - - } - } - - if p.IsNode() { - p.bundleType = files.ContentClassBranch - } else { - source := p.File() - if fi, ok := source.(*fileInfo); ok { - class := fi.FileInfo().Meta().Classifier - switch class { - case files.ContentClassBranch, files.ContentClassLeaf: - p.bundleType = class - } } } @@ -734,12 +720,12 @@ func (p *pageMeta) newContentConverter(ps *pageState, markup string) (converter. var id string var filename string var path string - if !p.f.IsZero() { + if p.f != nil { id = p.f.UniqueID() filename = p.f.Filename() path = p.f.Path() } else { - path = p.Pathc() + path = p.Path() } cpp, err := cp.New( @@ -803,3 +789,89 @@ func getParam(m resource.ResourceParamsProvider, key string, stringToLower bool) func getParamToLower(m resource.ResourceParamsProvider, key string) any { return getParam(m, key, true) } + +type pageMetaDates struct { + dates resource.Dates +} + +func (d *pageMetaDates) Date() time.Time { + return d.dates.Date() +} + +func (d *pageMetaDates) Lastmod() time.Time { + return d.dates.Lastmod() +} + +func (d *pageMetaDates) PublishDate() time.Time { + return d.dates.PublishDate() +} + +func (d *pageMetaDates) ExpiryDate() time.Time { + return d.dates.ExpiryDate() +} + +func (ps *pageState) initLazyProviders() error { + ps.init.Add(func(ctx context.Context) (any, error) { + pp, err := newPagePaths(ps) + if err != nil { + return nil, err + } + + var outputFormatsForPage output.Formats + var renderFormats output.Formats + + if ps.m.standaloneOutputFormat.IsZero() { + outputFormatsForPage = ps.m.outputFormats() + renderFormats = ps.s.h.renderFormats + } else { + // One of the fixed output format pages, e.g. 404. + outputFormatsForPage = output.Formats{ps.m.standaloneOutputFormat} + renderFormats = outputFormatsForPage + } + + // Prepare output formats for all sites. + // We do this even if this page does not get rendered on + // its own. It may be referenced via one of the site collections etc. + // it will then need an output format. + ps.pageOutputs = make([]*pageOutput, len(renderFormats)) + created := make(map[string]*pageOutput) + shouldRenderPage := !ps.m.noRender() + + for i, f := range renderFormats { + + if po, found := created[f.Name]; found { + ps.pageOutputs[i] = po + continue + } + + render := shouldRenderPage + if render { + _, render = outputFormatsForPage.GetByName(f.Name) + } + + po := newPageOutput(ps, pp, f, render) + + // Create a content provider for the first, + // we may be able to reuse it. + if i == 0 { + contentProvider, err := newPageContentOutput(po) + if err != nil { + return nil, err + } + po.setContentProvider(contentProvider) + } + + ps.pageOutputs[i] = po + created[f.Name] = po + + } + + if err := ps.initCommonProviders(pp); err != nil { + return nil, err + } + + return nil, nil + }) + + return nil +} diff --git a/hugolib/page__new.go b/hugolib/page__new.go index 108e5717f..89eeb2e0e 100644 --- a/hugolib/page__new.go +++ b/hugolib/page__new.go @@ -14,207 +14,173 @@ package hugolib import ( - "context" - "html/template" - "strings" + "fmt" + "sync" + "sync/atomic" - "go.uber.org/atomic" - - "github.com/gohugoio/hugo/common/hugo" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/lazy" + "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -var pageIdCounter atomic.Int64 - -func newPageBase(metaProvider *pageMeta) (*pageState, error) { - if metaProvider.s == nil { - panic("must provide a Site") - } - - id := int(pageIdCounter.Add(1)) - - s := metaProvider.s - - ps := &pageState{ - id: id, - pageOutput: nopPageOutput, - pageOutputTemplateVariationsState: atomic.NewUint32(0), - pageCommon: &pageCommon{ - FileProvider: metaProvider, - AuthorProvider: metaProvider, - Scratcher: maps.NewScratcher(), - store: maps.NewScratch(), - Positioner: page.NopPage, - InSectionPositioner: page.NopPage, - ResourceMetaProvider: metaProvider, - ResourceParamsProvider: metaProvider, - PageMetaProvider: metaProvider, - RelatedKeywordsProvider: metaProvider, - OutputFormatsProvider: page.NopPage, - ResourceTypeProvider: pageTypesProvider, - MediaTypeProvider: pageTypesProvider, - RefProvider: page.NopPage, - ShortcodeInfoProvider: page.NopPage, - LanguageProvider: s, - pagePages: &pagePages{}, - - InternalDependencies: s, - init: lazy.New(), - m: metaProvider, - s: s, - sWrapped: page.WrapSite(s), - }, - } - - ps.shortcodeState = newShortcodeHandler(ps, ps.s) - - siteAdapter := pageSiteAdapter{s: s, p: ps} - - ps.pageMenus = &pageMenus{p: ps} - ps.PageMenusProvider = ps.pageMenus - ps.GetPageProvider = siteAdapter - ps.GitInfoProvider = ps - ps.TranslationsProvider = ps - ps.ResourceDataProvider = &pageData{pageState: ps} - ps.RawContentProvider = ps - ps.ChildCareProvider = ps - ps.TreeProvider = pageTree{p: ps} - ps.Eqer = ps - ps.TranslationKeyProvider = ps - ps.ShortcodeInfoProvider = ps - ps.AlternativeOutputFormatsProvider = ps - - return ps, nil -} - -func newPageBucket(p *pageState) *pagesMapBucket { - return &pagesMapBucket{owner: p, pagesMapBucketPages: &pagesMapBucketPages{}} -} - -func newPageFromMeta( - n *contentNode, - parentBucket *pagesMapBucket, - meta map[string]any, - metaProvider *pageMeta) (*pageState, error) { - if metaProvider.f == nil { - metaProvider.f = page.NewZeroFile(metaProvider.s.Log) - } - - ps, err := newPageBase(metaProvider) - if err != nil { - return nil, err - } +var pageIDCounter atomic.Uint64 - bucket := parentBucket - - if ps.IsNode() { - ps.bucket = newPageBucket(ps) - } - - if meta != nil || parentBucket != nil { - if err := metaProvider.setMetadata(bucket, ps, meta); err != nil { - return nil, ps.wrapError(err) +func (h *HugoSites) newPage(m *pageMeta) (*pageState, error) { + if m.pathInfo == nil { + if m.f != nil { + m.pathInfo = m.f.FileInfo().Meta().PathInfo + } + if m.pathInfo == nil { + panic(fmt.Sprintf("missing pathInfo in %v", m)) } } - if err := metaProvider.applyDefaultValues(n); err != nil { - return nil, err - } - - ps.init.Add(func(context.Context) (any, error) { - pp, err := newPagePaths(metaProvider.s, ps, metaProvider) - if err != nil { - return nil, err - } + m.Staler = &resources.AtomicStaler{} + + ps, err := func() (*pageState, error) { + if m.s == nil { + // Identify the Site/language to associate this Page with. + var lang string + if m.f != nil { + meta := m.f.FileInfo().Meta() + lang = meta.Lang + m.s = h.Sites[meta.LangIndex] + } else { + lang = m.pathInfo.Lang() + } + var found bool + for _, ss := range h.Sites { + if ss.Lang() == lang { + m.s = ss + found = true + break + } + } + if !found { + return nil, fmt.Errorf("no site found for language %q", lang) + } - makeOut := func(f output.Format, render bool) *pageOutput { - return newPageOutput(ps, pp, f, render) } - shouldRenderPage := !ps.m.noRender() - - if ps.m.standalone { - ps.pageOutput = makeOut(ps.m.outputFormats()[0], shouldRenderPage) - } else { - outputFormatsForPage := ps.m.outputFormats() - - // Prepare output formats for all sites. - // We do this even if this page does not get rendered on - // its own. It may be referenced via .Site.GetPage and - // it will then need an output format. - ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats)) - created := make(map[string]*pageOutput) - for i, f := range ps.s.h.renderFormats { - po, found := created[f.Name] - if !found { - render := shouldRenderPage - if render { - _, render = outputFormatsForPage.GetByName(f.Name) + // Identify Page Kind. + if m.kind == "" { + m.kind = kinds.KindSection + if m.pathInfo.Base() == "/" { + m.kind = kinds.KindHome + } else if m.pathInfo.IsBranchBundle() { + // A section, taxonomy or term. + tc := m.s.pageMap.cfg.getTaxonomyConfig(m.Path()) + if !tc.IsZero() { + // Either a taxonomy or a term. + if tc.pluralTreeKey == m.Path() { + m.kind = kinds.KindTaxonomy + } else { + m.kind = kinds.KindTerm } - po = makeOut(f, render) - created[f.Name] = po } - ps.pageOutputs[i] = po + } else if m.f != nil { + m.kind = kinds.KindPage } } - if err := ps.initCommonProviders(pp); err != nil { - return nil, err + if m.kind == kinds.KindPage && !m.s.conf.IsKindEnabled(m.kind) { + return nil, nil } - return nil, nil - }) + pid := pageIDCounter.Add(1) - return ps, err -} + // Parse page content. + cachedContent, err := newCachedContent(m, pid) + if err != nil { + return nil, m.wrapError(err) + } -// Used by the legacy 404, sitemap and robots.txt rendering -func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) { - m.configuredOutputFormats = output.Formats{f} - m.standalone = true - p, err := newPageFromMeta(nil, nil, nil, m) - if err != nil { - return nil, err - } + var dependencyManager identity.Manager = identity.NopManager - if err := p.initPage(); err != nil { - return nil, err - } + if m.s.conf.Internal.Watch { + dependencyManager = identity.NewManager(m.Path()) + } - return p, nil -} + ps := &pageState{ + pid: pid, + pageOutput: nopPageOutput, + pageOutputTemplateVariationsState: &atomic.Uint32{}, + resourcesPublishInit: &sync.Once{}, + Staler: m, + dependencyManager: dependencyManager, + pageCommon: &pageCommon{ + content: cachedContent, + FileProvider: m, + AuthorProvider: m, + Scratcher: maps.NewScratcher(), + store: maps.NewScratch(), + Positioner: page.NopPage, + InSectionPositioner: page.NopPage, + ResourceNameTitleProvider: m, + ResourceParamsProvider: m, + PageMetaProvider: m, + RelatedKeywordsProvider: m, + OutputFormatsProvider: page.NopPage, + ResourceTypeProvider: pageTypesProvider, + MediaTypeProvider: pageTypesProvider, + RefProvider: page.NopPage, + ShortcodeInfoProvider: page.NopPage, + LanguageProvider: m.s, + + InternalDependencies: m.s, + init: lazy.New(), + m: m, + s: m.s, + sWrapped: page.WrapSite(m.s), + }, + } -type pageDeprecatedWarning struct { - p *pageState -} + if m.f != nil { + gi, err := m.s.h.gitInfoForPage(ps) + if err != nil { + return nil, fmt.Errorf("failed to load Git data: %w", err) + } + ps.gitInfo = gi + owners, err := m.s.h.codeownersForPage(ps) + if err != nil { + return nil, fmt.Errorf("failed to load CODEOWNERS: %w", err) + } + ps.codeowners = owners + } -func (p *pageDeprecatedWarning) IsDraft() bool { return p.p.m.draft } -func (p *pageDeprecatedWarning) Hugo() hugo.HugoInfo { return p.p.s.Hugo() } -func (p *pageDeprecatedWarning) LanguagePrefix() string { return p.p.s.GetLanguagePrefix() } -func (p *pageDeprecatedWarning) GetParam(key string) any { - return p.p.m.params[strings.ToLower(key)] -} + ps.pageMenus = &pageMenus{p: ps} + ps.PageMenusProvider = ps.pageMenus + ps.GetPageProvider = pageSiteAdapter{s: m.s, p: ps} + ps.GitInfoProvider = ps + ps.TranslationsProvider = ps + ps.ResourceDataProvider = &pageData{pageState: ps} + ps.RawContentProvider = ps + ps.ChildCareProvider = ps + ps.TreeProvider = pageTree{p: ps} + ps.Eqer = ps + ps.TranslationKeyProvider = ps + ps.ShortcodeInfoProvider = ps + ps.AlternativeOutputFormatsProvider = ps + + if err := ps.setMetaPre(); err != nil { + return nil, ps.wrapError(err) + } -func (p *pageDeprecatedWarning) RSSLink() template.URL { - f := p.p.OutputFormats().Get("RSS") - if f == nil { - return "" + if err := ps.initLazyProviders(); err != nil { + return nil, ps.wrapError(err) + } + return ps, nil + }() + // Make sure to evict any cached and now stale data. + if err != nil { + m.MarkStale() } - return template.URL(f.Permalink()) -} -func (p *pageDeprecatedWarning) URL() string { - if p.p.IsPage() && p.p.m.urlPaths.URL != "" { - // This is the url set in front matter - return p.p.m.urlPaths.URL - } - // Fall back to the relative permalink. - return p.p.RelPermalink() + return ps, err } diff --git a/hugolib/page__output.go b/hugolib/page__output.go index 21f58e795..6fae10740 100644 --- a/hugolib/page__output.go +++ b/hugolib/page__output.go @@ -14,6 +14,7 @@ package hugolib import ( + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" @@ -23,7 +24,8 @@ func newPageOutput( ps *pageState, pp pagePaths, f output.Format, - render bool) *pageOutput { + render bool, +) *pageOutput { var targetPathsProvider targetPathsHolder var linksProvider resource.ResourceLinksProvider @@ -43,6 +45,11 @@ func newPageOutput( paginatorProvider = pag } + var dependencyManager identity.Manager = identity.NopManager + if ps.s.conf.Internal.Watch { + dependencyManager = identity.NewManager(ps.Path() + "/" + f.Name) + } + providers := struct { page.PaginatorProvider resource.ResourceLinksProvider @@ -54,6 +61,7 @@ func newPageOutput( } po := &pageOutput{ + p: ps, f: f, pagePerOutputProviders: providers, ContentProvider: page.NopPage, @@ -61,6 +69,7 @@ func newPageOutput( TableOfContentsProvider: page.NopPage, render: render, paginator: pag, + dependencyManagerOutput: dependencyManager, } return po @@ -69,6 +78,8 @@ func newPageOutput( // We create a pageOutput for every output format combination, even if this // particular page isn't configured to be rendered to that format. type pageOutput struct { + p *pageState + // Set if this page isn't configured to be rendered to this format. render bool @@ -89,10 +100,39 @@ type pageOutput struct { page.RenderShortcodesProvider // May be nil. - cp *pageContentOutput + pco *pageContentOutput + + dependencyManagerOutput identity.Manager + + renderState int // Reset when it needs to be rendered again. + renderOnce bool // To make sure we at least try to render it once. } -func (p *pageOutput) initContentProvider(cp *pageContentOutput) { +func (po *pageOutput) incrRenderState() { + po.renderState++ + po.renderOnce = true +} + +// isRendered reports whether this output format or its content has been rendered. +func (po *pageOutput) isRendered() bool { + if po.renderState > 0 { + return true + } + if po.pco != nil && po.pco.contentRendered { + return true + } + return false +} + +func (po *pageOutput) IdentifierBase() string { + return po.p.f.Name +} + +func (po *pageOutput) GetDependencyManager() identity.Manager { + return po.dependencyManagerOutput +} + +func (p *pageOutput) setContentProvider(cp *pageContentOutput) { if cp == nil { return } @@ -101,12 +141,5 @@ func (p *pageOutput) initContentProvider(cp *pageContentOutput) { p.PageRenderProvider = cp p.TableOfContentsProvider = cp p.RenderShortcodesProvider = cp - p.cp = cp - -} - -func (p *pageOutput) enablePlaceholders() { - if p.cp != nil { - p.cp.enablePlaceholders() - } + p.pco = cp } diff --git a/hugolib/page__paginator.go b/hugolib/page__paginator.go index 6a1b4bfab..b6a778a21 100644 --- a/hugolib/page__paginator.go +++ b/hugolib/page__paginator.go @@ -16,7 +16,6 @@ package hugolib import ( "sync" - "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) @@ -71,8 +70,6 @@ func (p *pagePaginator) Paginate(seq any, options ...any) (*page.Pager, error) { } func (p *pagePaginator) Paginator(options ...any) (*page.Pager, error) { - defer herrors.Recover() - var initErr error p.init.Do(func() { pagerSize, err := page.ResolvePagerSize(p.source.s.Conf, options...) diff --git a/hugolib/page__paths.go b/hugolib/page__paths.go index 9a6caa05e..6e7980a6d 100644 --- a/hugolib/page__paths.go +++ b/hugolib/page__paths.go @@ -17,29 +17,34 @@ import ( "net/url" "strings" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -func newPagePaths( - s *Site, - p page.Page, - pm *pageMeta) (pagePaths, error) { - targetPathDescriptor, err := createTargetPathDescriptor(s, p, pm) +func newPagePaths(ps *pageState) (pagePaths, error) { + s := ps.s + pm := ps.m + + targetPathDescriptor, err := createTargetPathDescriptor(ps) if err != nil { return pagePaths{}, err } - outputFormats := pm.outputFormats() - if len(outputFormats) == 0 { - return pagePaths{}, nil - } + var outputFormats output.Formats + + if ps.m.isStandalone() { + outputFormats = output.Formats{ps.m.standaloneOutputFormat} + } else { + outputFormats = pm.outputFormats() + if len(outputFormats) == 0 { + return pagePaths{}, nil + } - if pm.noRender() { - outputFormats = outputFormats[:1] + if pm.noRender() { + outputFormats = outputFormats[:1] + } } pageOutputFormats := make(page.OutputFormats, len(outputFormats)) @@ -102,46 +107,35 @@ func (l pagePaths) OutputFormats() page.OutputFormats { return l.outputFormats } -func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.TargetPathDescriptor, error) { - var ( - dir string - baseName string - contentBaseName string - ) - +func createTargetPathDescriptor(p *pageState) (page.TargetPathDescriptor, error) { + s := p.s d := s.Deps - var classifier files.ContentClass - - if !p.File().IsZero() { - dir = p.File().Dir() - baseName = p.File().TranslationBaseName() - contentBaseName = p.File().ContentBaseName() - classifier = p.File().FileInfo().Meta().Classifier - } + pm := p.m + alwaysInSubDir := p.Kind() == kinds.KindSitemap - if classifier == files.ContentClassLeaf { - // See https://github.com/gohugoio/hugo/issues/4870 - // A leaf bundle - dir = strings.TrimSuffix(dir, contentBaseName+helpers.FilePathSeparator) - baseName = contentBaseName + pageInfoPage := p.PathInfo() + pageInfoCurrentSection := p.CurrentSection().PathInfo() + if p.s.Conf.DisablePathToLower() { + pageInfoPage = pageInfoPage.Unmormalized() + pageInfoCurrentSection = pageInfoCurrentSection.Unmormalized() } - alwaysInSubDir := p.Kind() == kinds.KindSitemap - desc := page.TargetPathDescriptor{ PathSpec: d.PathSpec, Kind: p.Kind(), - Sections: p.SectionsEntries(), + Path: pageInfoPage, + Section: pageInfoCurrentSection, UglyURLs: s.h.Conf.IsUglyURLs(p.Section()), ForcePrefix: s.h.Conf.IsMultihost() || alwaysInSubDir, - Dir: dir, URL: pm.urlPaths.URL, } if pm.Slug() != "" { desc.BaseName = pm.Slug() + } else if pm.isStandalone() && pm.standaloneOutputFormat.BaseName != "" { + desc.BaseName = pm.standaloneOutputFormat.BaseName } else { - desc.BaseName = baseName + desc.BaseName = pageInfoPage.BaseNameNoIdentifier() } desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir) @@ -162,10 +156,10 @@ func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.Target desc.ExpandedPermalink = opath - if !p.File().IsZero() { + if p.File() != nil { s.Log.Debugf("Set expanded permalink path for %s %s to %#v", p.Kind(), p.File().Path(), opath) } else { - s.Log.Debugf("Set expanded permalink path for %s in %v to %#v", p.Kind(), desc.Sections, opath) + s.Log.Debugf("Set expanded permalink path for %s in %v to %#v", p.Kind(), desc.Section.Path(), opath) } } diff --git a/hugolib/page__per_output.go b/hugolib/page__per_output.go index e806ca339..3d86cdece 100644 --- a/hugolib/page__per_output.go +++ b/hugolib/page__per_output.go @@ -16,13 +16,11 @@ package hugolib import ( "bytes" "context" + "errors" "fmt" "html/template" "strings" "sync" - "unicode/utf8" - - "errors" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/common/types/hstring" @@ -37,8 +35,6 @@ import ( "github.com/gohugoio/hugo/markup/converter" - "github.com/gohugoio/hugo/lazy" - bp "github.com/gohugoio/hugo/bufferpool" "github.com/gohugoio/hugo/tpl" @@ -70,235 +66,11 @@ var ( } ) -var pageContentOutputDependenciesID = identity.KeyValueIdentity{Key: "pageOutput", Value: "dependencies"} - -func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, error) { - parent := p.init - - var dependencyTracker identity.Manager - if p.s.watching() { - dependencyTracker = identity.NewManager(pageContentOutputDependenciesID) - } - +func newPageContentOutput(po *pageOutput) (*pageContentOutput, error) { cp := &pageContentOutput{ - dependencyTracker: dependencyTracker, - p: p, - f: po.f, - renderHooks: &renderHooks{}, - } - - initToC := func(ctx context.Context) (err error) { - if p.cmap == nil { - // Nothing to do. - return nil - } - - if err := po.cp.initRenderHooks(); err != nil { - return err - } - - f := po.f - cp.contentPlaceholders, err = p.shortcodeState.prepareShortcodesForPage(ctx, p, f) - if err != nil { - return err - } - - ctxCallback := func(cp2 *pageContentOutput) { - cp.p.cmap.hasNonMarkdownShortcode = cp.p.cmap.hasNonMarkdownShortcode || cp2.p.cmap.hasNonMarkdownShortcode - // Merge content placeholders - for k, v := range cp2.contentPlaceholders { - cp.contentPlaceholders[k] = v - } - - if p.s.watching() { - for _, s := range cp2.p.shortcodeState.shortcodes { - for _, templ := range s.templs { - dependencyTracker.Add(templ.(identity.Manager)) - } - } - } - - // Transfer shortcode names so HasShortcode works for shortcodes from included pages. - cp.p.shortcodeState.transferNames(cp2.p.shortcodeState) - if cp2.p.pageOutputTemplateVariationsState.Load() == 2 { - cp.p.pageOutputTemplateVariationsState.Store(2) - } - } - - ctx = tpl.SetCallbackFunctionInContext(ctx, ctxCallback) - - var hasVariants bool - cp.workContent, hasVariants, err = p.contentToRender(ctx, p.source.parsed, p.cmap, cp.contentPlaceholders) - if err != nil { - return err - } - if hasVariants { - p.pageOutputTemplateVariationsState.Store(2) - } - - isHTML := cp.p.m.markup == "html" - - if !isHTML { - createAndSetToC := func(tocProvider converter.TableOfContentsProvider) { - cfg := p.s.ContentSpec.Converters.GetMarkupConfig() - cp.tableOfContents = tocProvider.TableOfContents() - cp.tableOfContentsHTML = template.HTML( - cp.tableOfContents.ToHTML( - cfg.TableOfContents.StartLevel, - cfg.TableOfContents.EndLevel, - cfg.TableOfContents.Ordered, - ), - ) - } - // If the converter supports doing the parsing separately, we do that. - parseResult, ok, err := po.contentRenderer.ParseContent(ctx, cp.workContent) - if err != nil { - return err - } - if ok { - // This is Goldmark. - // Store away the parse result for later use. - createAndSetToC(parseResult) - cp.astDoc = parseResult.Doc() - - return nil - } - - // This is Asciidoctor etc. - r, err := po.contentRenderer.ParseAndRenderContent(ctx, cp.workContent, true) - if err != nil { - return err - } - - cp.workContent = r.Bytes() - - if tocProvider, ok := r.(converter.TableOfContentsProvider); ok { - createAndSetToC(tocProvider) - } else { - tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent) - cp.tableOfContentsHTML = helpers.BytesToHTML(tmpTableOfContents) - cp.tableOfContents = tableofcontents.Empty - cp.workContent = tmpContent - } - } - - return nil - + po: po, + renderHooks: &renderHooks{}, } - - initContent := func(ctx context.Context) (err error) { - - p.s.h.IncrContentRender() - - if p.cmap == nil { - // Nothing to do. - return nil - } - - if cp.astDoc != nil { - // The content is parsed, but not rendered. - r, ok, err := po.contentRenderer.RenderContent(ctx, cp.workContent, cp.astDoc) - if err != nil { - return err - } - if !ok { - return errors.New("invalid state: astDoc is set but RenderContent returned false") - } - - cp.workContent = r.Bytes() - } - - if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled { - // There are one or more replacement tokens to be replaced. - var hasShortcodeVariants bool - tokenHandler := func(ctx context.Context, token string) ([]byte, error) { - if token == tocShortcodePlaceholder { - // The Page's TableOfContents was accessed in a shortcode. - if cp.tableOfContentsHTML == "" { - cp.p.s.initInit(ctx, cp.initToC, cp.p) - } - return []byte(cp.tableOfContentsHTML), nil - } - renderer, found := cp.contentPlaceholders[token] - if found { - repl, more, err := renderer.renderShortcode(ctx) - if err != nil { - return nil, err - } - hasShortcodeVariants = hasShortcodeVariants || more - return repl, nil - } - // This should never happen. - return nil, fmt.Errorf("unknown shortcode token %q", token) - } - - cp.workContent, err = expandShortcodeTokens(ctx, cp.workContent, tokenHandler) - if err != nil { - return err - } - if hasShortcodeVariants { - p.pageOutputTemplateVariationsState.Store(2) - } - } - - if cp.p.source.hasSummaryDivider { - isHTML := cp.p.m.markup == "html" - if isHTML { - src := p.source.parsed.Input() - - // Use the summary sections as they are provided by the user. - if p.source.posSummaryEnd != -1 { - cp.summary = helpers.BytesToHTML(src[p.source.posMainContent:p.source.posSummaryEnd]) - } - - if cp.p.source.posBodyStart != -1 { - cp.workContent = src[cp.p.source.posBodyStart:] - } - - } else { - summary, content, err := splitUserDefinedSummaryAndContent(cp.p.m.markup, cp.workContent) - if err != nil { - cp.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.p.pathOrTitle(), err) - } else { - cp.workContent = content - cp.summary = helpers.BytesToHTML(summary) - } - } - } else if cp.p.m.summary != "" { - b, err := po.contentRenderer.ParseAndRenderContent(ctx, []byte(cp.p.m.summary), false) - if err != nil { - return err - } - html := cp.p.s.ContentSpec.TrimShortHTML(b.Bytes()) - cp.summary = helpers.BytesToHTML(html) - } - - cp.content = helpers.BytesToHTML(cp.workContent) - - return nil - } - - cp.initToC = parent.Branch(func(ctx context.Context) (any, error) { - return nil, initToC(ctx) - }) - - // There may be recursive loops in shortcodes and render hooks. - cp.initMain = cp.initToC.BranchWithTimeout(p.s.conf.C.Timeout, func(ctx context.Context) (any, error) { - return nil, initContent(ctx) - }) - - cp.initPlain = cp.initMain.Branch(func(context.Context) (any, error) { - cp.plain = tpl.StripHTML(string(cp.content)) - cp.plainWords = strings.Fields(cp.plain) - cp.setWordCounts(p.m.isCJKLanguage) - - if err := cp.setAutoSummary(); err != nil { - return err, nil - } - - return nil, nil - }) - return cp, nil } @@ -309,86 +81,51 @@ type renderHooks struct { // pageContentOutput represents the Page content for a given output format. type pageContentOutput struct { - f output.Format - - p *pageState - - // Lazy load dependencies - initToC *lazy.Init - initMain *lazy.Init - initPlain *lazy.Init + po *pageOutput - placeholdersEnabled bool - placeholdersEnabledInit sync.Once + contentRenderedVersion int // Incremented on reset. + contentRendered bool // Set on content render. // Renders Markdown hooks. renderHooks *renderHooks +} - workContent []byte - dependencyTracker identity.Manager // Set in server mode. - - // Temporary storage of placeholders mapped to their content. - // These are shortcodes etc. Some of these will need to be replaced - // after any markup is rendered, so they share a common prefix. - contentPlaceholders map[string]shortcodeRenderer - - // Content sections - content template.HTML - summary template.HTML - tableOfContents *tableofcontents.Fragments - tableOfContentsHTML template.HTML - // For Goldmark we split Parse and Render. - astDoc any - - truncated bool - - plainWords []string - plain string - fuzzyWordCount int - wordCount int - readingTime int +func (pco *pageContentOutput) trackDependency(idp identity.IdentityProvider) { + pco.po.p.dependencyManagerOutput.AddIdentity(idp.GetIdentity()) } -func (p *pageContentOutput) trackDependency(id identity.Provider) { - if p.dependencyTracker != nil { - p.dependencyTracker.Add(id) +func (pco *pageContentOutput) Reset() { + if pco == nil { + return } - + pco.contentRenderedVersion++ + pco.contentRendered = false + pco.renderHooks = &renderHooks{} } -func (p *pageContentOutput) Reset() { - if p.dependencyTracker != nil { - p.dependencyTracker.Reset() - } - p.initToC.Reset() - p.initMain.Reset() - p.initPlain.Reset() - p.renderHooks = &renderHooks{} +func (pco *pageContentOutput) Fragments(ctx context.Context) *tableofcontents.Fragments { + return pco.po.p.content.mustContentToC(ctx, pco).tableOfContents } -func (p *pageContentOutput) Fragments(ctx context.Context) *tableofcontents.Fragments { - p.p.s.initInit(ctx, p.initToC, p.p) - if p.tableOfContents == nil { - return tableofcontents.Empty +func (pco *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML, error) { + content := pco.po.p.content + source, err := content.contentSource() + if err != nil { + return "", err + } + ct, err := content.contentToC(ctx, pco) + if err != nil { + return "", err } - return p.tableOfContents -} -func (p *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML, error) { - p.p.s.initInit(ctx, p.initToC, p.p) - source := p.p.source.parsed.Input() - renderedShortcodes := p.contentPlaceholders var insertPlaceholders bool var hasVariants bool - var cb func(*pageContentOutput) - if v := tpl.GetCallbackFunctionFromContext(ctx); v != nil { - if fn, ok := v.(func(*pageContentOutput)); ok { - insertPlaceholders = true - cb = fn - } + cb := setGetContentCallbackInContext.Get(ctx) + if cb != nil { + insertPlaceholders = true } c := make([]byte, 0, len(source)+(len(source)/10)) - for _, it := range p.p.cmap.items { + for _, it := range content.parseInfo.itemsStep2 { switch v := it.(type) { case pageparser.Item: c = append(c, source[v.Pos():v.Pos()+len(v.Val(source))]...) @@ -397,7 +134,7 @@ func (p *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML case *shortcode: if !insertPlaceholders || !v.insertPlaceholder() { // Insert the rendered shortcode. - renderedShortcode, found := renderedShortcodes[v.placeholder] + renderedShortcode, found := ct.contentPlaceholders[v.placeholder] if !found { // This should never happen. panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder)) @@ -421,73 +158,78 @@ func (p *pageContentOutput) RenderShortcodes(ctx context.Context) (template.HTML } if hasVariants { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) } if cb != nil { - cb(p) + cb(pco, ct) } return helpers.BytesToHTML(c), nil } -func (p *pageContentOutput) TableOfContents(ctx context.Context) template.HTML { - p.p.s.initInit(ctx, p.initToC, p.p) - return p.tableOfContentsHTML +func (pco *pageContentOutput) Content(ctx context.Context) (any, error) { + r, err := pco.po.p.content.contentRendered(ctx, pco) + return r.content, err } -func (p *pageContentOutput) Content(ctx context.Context) (any, error) { - p.p.s.initInit(ctx, p.initMain, p.p) - return p.content, nil +func (pco *pageContentOutput) TableOfContents(ctx context.Context) template.HTML { + return pco.po.p.content.mustContentToC(ctx, pco).tableOfContentsHTML } -func (p *pageContentOutput) FuzzyWordCount(ctx context.Context) int { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.fuzzyWordCount +func (p *pageContentOutput) Len(ctx context.Context) int { + return len(p.mustContentRendered(ctx).content) } -func (p *pageContentOutput) Len(ctx context.Context) int { - p.p.s.initInit(ctx, p.initMain, p.p) - return len(p.content) +func (pco *pageContentOutput) mustContentRendered(ctx context.Context) contentSummary { + r, err := pco.po.p.content.contentRendered(ctx, pco) + if err != nil { + pco.fail(err) + } + return r +} + +func (pco *pageContentOutput) mustContentPlain(ctx context.Context) contentPlainPlainWords { + r, err := pco.po.p.content.contentPlain(ctx, pco) + if err != nil { + pco.fail(err) + } + return r } -func (p *pageContentOutput) Plain(ctx context.Context) string { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.plain +func (pco *pageContentOutput) fail(err error) { + pco.po.p.s.h.FatalError(pco.po.p.wrapError(err)) } -func (p *pageContentOutput) PlainWords(ctx context.Context) []string { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.plainWords +func (pco *pageContentOutput) Plain(ctx context.Context) string { + return pco.mustContentPlain(ctx).plain } -func (p *pageContentOutput) ReadingTime(ctx context.Context) int { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.readingTime +func (pco *pageContentOutput) PlainWords(ctx context.Context) []string { + return pco.mustContentPlain(ctx).plainWords } -func (p *pageContentOutput) Summary(ctx context.Context) template.HTML { - p.p.s.initInit(ctx, p.initMain, p.p) - if !p.p.source.hasSummaryDivider { - p.p.s.initInit(ctx, p.initPlain, p.p) - } - return p.summary +func (pco *pageContentOutput) ReadingTime(ctx context.Context) int { + return pco.mustContentPlain(ctx).readingTime } -func (p *pageContentOutput) Truncated(ctx context.Context) bool { - if p.p.truncated { - return true - } - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.truncated +func (pco *pageContentOutput) WordCount(ctx context.Context) int { + return pco.mustContentPlain(ctx).wordCount +} + +func (pco *pageContentOutput) FuzzyWordCount(ctx context.Context) int { + return pco.mustContentPlain(ctx).fuzzyWordCount +} + +func (pco *pageContentOutput) Summary(ctx context.Context) template.HTML { + return pco.mustContentPlain(ctx).summary } -func (p *pageContentOutput) WordCount(ctx context.Context) int { - p.p.s.initInit(ctx, p.initPlain, p.p) - return p.wordCount +func (pco *pageContentOutput) Truncated(ctx context.Context) bool { + return pco.mustContentPlain(ctx).summaryTruncated } -func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (template.HTML, error) { +func (pco *pageContentOutput) RenderString(ctx context.Context, args ...any) (template.HTML, error) { if len(args) < 1 || len(args) > 2 { return "", errors.New("want 1 or 2 arguments") } @@ -523,71 +265,67 @@ func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (temp return "", err } - if err = p.initRenderHooks(); err != nil { + if err = pco.initRenderHooks(); err != nil { return "", err } - conv := p.p.getContentConverter() - if opts.Markup != "" && opts.Markup != p.p.m.markup { + conv := pco.po.p.getContentConverter() + if opts.Markup != "" && opts.Markup != pco.po.p.m.markup { var err error - // TODO(bep) consider cache - conv, err = p.p.m.newContentConverter(p.p, opts.Markup) + conv, err = pco.po.p.m.newContentConverter(pco.po.p, opts.Markup) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } } var rendered []byte + parseInfo := &contentParseInfo{ + pid: pco.po.p.pid, + } + if pageparser.HasShortcode(contentToRender) { + contentToRenderb := []byte(contentToRender) // String contains a shortcode. - parsed, err := pageparser.ParseMain(strings.NewReader(contentToRender), pageparser.Config{}) + parseInfo.itemsStep1, err = pageparser.ParseBytesMain(contentToRenderb, pageparser.Config{}) if err != nil { return "", err } - pm := &pageContentMap{ - items: make([]any, 0, 20), - } - s := newShortcodeHandler(p.p, p.p.s) - - if err := p.p.mapContentForResult( - parsed, - s, - pm, - opts.Markup, - nil, - ); err != nil { + + s := newShortcodeHandler(pco.po.p.pathOrTitle(), pco.po.p.s) + if err := parseInfo.mapItems(contentToRenderb, s); err != nil { return "", err } - placeholders, err := s.prepareShortcodesForPage(ctx, p.p, p.f) + placeholders, err := s.prepareShortcodesForPage(ctx, pco.po.p, pco.po.f, true) if err != nil { return "", err } - contentToRender, hasVariants, err := p.p.contentToRender(ctx, parsed, pm, placeholders) + contentToRender, hasVariants, err := parseInfo.contentToRender(ctx, contentToRenderb, placeholders) if err != nil { return "", err } if hasVariants { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) } - b, err := p.renderContentWithConverter(ctx, conv, contentToRender, false) + b, err := pco.renderContentWithConverter(ctx, conv, contentToRender, false) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } rendered = b.Bytes() - if pm.hasNonMarkdownShortcode || p.placeholdersEnabled { + if parseInfo.hasNonMarkdownShortcode { var hasShortcodeVariants bool tokenHandler := func(ctx context.Context, token string) ([]byte, error) { if token == tocShortcodePlaceholder { - // The Page's TableOfContents was accessed in a shortcode. - if p.tableOfContentsHTML == "" { - p.p.s.initInit(ctx, p.initToC, p.p) + toc, err := pco.po.p.content.contentToC(ctx, pco) + if err != nil { + return nil, err } - return []byte(p.tableOfContentsHTML), nil + // The Page's TableOfContents was accessed in a shortcode. + return []byte(toc.tableOfContentsHTML), nil } renderer, found := placeholders[token] if found { @@ -607,17 +345,17 @@ func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (temp return "", err } if hasShortcodeVariants { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) } } // We need a consolidated view in $page.HasShortcode - p.p.shortcodeState.transferNames(s) + pco.po.p.content.shortcodeState.transferNames(s) } else { - c, err := p.renderContentWithConverter(ctx, conv, []byte(contentToRender), false) + c, err := pco.renderContentWithConverter(ctx, conv, []byte(contentToRender), false) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } rendered = c.Bytes() @@ -626,48 +364,41 @@ func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (temp if opts.Display == "inline" { // We may have to rethink this in the future when we get other // renderers. - rendered = p.p.s.ContentSpec.TrimShortHTML(rendered) + rendered = pco.po.p.s.ContentSpec.TrimShortHTML(rendered) } return template.HTML(string(rendered)), nil } -func (p *pageContentOutput) RenderWithTemplateInfo(ctx context.Context, info tpl.Info, layout ...string) (template.HTML, error) { - p.p.addDependency(info) - return p.Render(ctx, layout...) -} - -func (p *pageContentOutput) Render(ctx context.Context, layout ...string) (template.HTML, error) { +func (pco *pageContentOutput) Render(ctx context.Context, layout ...string) (template.HTML, error) { if len(layout) == 0 { return "", errors.New("no layout given") } - templ, found, err := p.p.resolveTemplate(layout...) + templ, found, err := pco.po.p.resolveTemplate(layout...) if err != nil { - return "", p.p.wrapError(err) + return "", pco.po.p.wrapError(err) } if !found { return "", nil } - p.p.addDependency(templ.(tpl.Info)) - // Make sure to send the *pageState and not the *pageContentOutput to the template. - res, err := executeToString(ctx, p.p.s.Tmpl(), templ, p.p) + res, err := executeToString(ctx, pco.po.p.s.Tmpl(), templ, pco.po.p) if err != nil { - return "", p.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err)) + return "", pco.po.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err)) } return template.HTML(res), nil } -func (p *pageContentOutput) initRenderHooks() error { - if p == nil { +func (pco *pageContentOutput) initRenderHooks() error { + if pco == nil { return nil } - p.renderHooks.init.Do(func() { - if p.p.pageOutputTemplateVariationsState.Load() == 0 { - p.p.pageOutputTemplateVariationsState.Store(1) + pco.renderHooks.init.Do(func() { + if pco.po.p.pageOutputTemplateVariationsState.Load() == 0 { + pco.po.p.pageOutputTemplateVariationsState.Store(1) } type cacheKey struct { @@ -680,14 +411,15 @@ func (p *pageContentOutput) initRenderHooks() error { var renderCacheMu sync.Mutex resolvePosition := func(ctx any) text.Position { + source := pco.po.p.content.mustSource() var offset int switch v := ctx.(type) { case hooks.CodeblockContext: - offset = bytes.Index(p.p.source.parsed.Input(), []byte(v.Inner())) + offset = bytes.Index(source, []byte(v.Inner())) } - pos := p.p.posFromInput(p.p.source.parsed.Input(), offset) + pos := pco.po.p.posFromInput(source, offset) if pos.LineNumber > 0 { // Move up to the code fence delimiter. @@ -698,16 +430,16 @@ func (p *pageContentOutput) initRenderHooks() error { return pos } - p.renderHooks.getRenderer = func(tp hooks.RendererType, id any) any { + pco.renderHooks.getRenderer = func(tp hooks.RendererType, id any) any { renderCacheMu.Lock() defer renderCacheMu.Unlock() - key := cacheKey{tp: tp, id: id, f: p.f} + key := cacheKey{tp: tp, id: id, f: pco.po.f} if r, ok := renderCache[key]; ok { return r } - layoutDescriptor := p.p.getLayoutDescriptor() + layoutDescriptor := pco.po.p.getLayoutDescriptor() layoutDescriptor.RenderingHook = true layoutDescriptor.LayoutOverride = false layoutDescriptor.Layout = "" @@ -733,19 +465,19 @@ func (p *pageContentOutput) initRenderHooks() error { } getHookTemplate := func(f output.Format) (tpl.Template, bool) { - templ, found, err := p.p.s.Tmpl().LookupLayout(layoutDescriptor, f) + templ, found, err := pco.po.p.s.Tmpl().LookupLayout(layoutDescriptor, f) if err != nil { panic(err) } return templ, found } - templ, found1 := getHookTemplate(p.f) + templ, found1 := getHookTemplate(pco.po.f) - if p.p.reusePageOutputContent() { + if pco.po.p.reusePageOutputContent() { // Check if some of the other output formats would give a different template. - for _, f := range p.p.s.renderFormats { - if f.Name == p.f.Name { + for _, f := range pco.po.p.s.renderFormats { + if f.Name == pco.po.f.Name { continue } templ2, found2 := getHookTemplate(f) @@ -757,7 +489,7 @@ func (p *pageContentOutput) initRenderHooks() error { } if templ != templ2 { - p.p.pageOutputTemplateVariationsState.Store(2) + pco.po.p.pageOutputTemplateVariationsState.Add(1) break } } @@ -765,8 +497,8 @@ func (p *pageContentOutput) initRenderHooks() error { } if !found1 { if tp == hooks.CodeBlockRendererType { - // No user provided tempplate for code blocks, so we use the native Go code version -- which is also faster. - r := p.p.s.ContentSpec.Converters.GetHighlighter() + // No user provided template for code blocks, so we use the native Go version -- which is also faster. + r := pco.po.p.s.ContentSpec.Converters.GetHighlighter() renderCache[key] = r return r } @@ -774,8 +506,7 @@ func (p *pageContentOutput) initRenderHooks() error { } r := hookRendererTemplate{ - templateHandler: p.p.s.Tmpl(), - SearchProvider: templ.(identity.SearchProvider), + templateHandler: pco.po.p.s.Tmpl(), templ: templ, resolvePosition: resolvePosition, } @@ -787,31 +518,11 @@ func (p *pageContentOutput) initRenderHooks() error { return nil } -func (p *pageContentOutput) setAutoSummary() error { - if p.p.source.hasSummaryDivider || p.p.m.summary != "" { - return nil - } - - var summary string - var truncated bool - - if p.p.m.isCJKLanguage { - summary, truncated = p.p.s.ContentSpec.TruncateWordsByRune(p.plainWords) - } else { - summary, truncated = p.p.s.ContentSpec.TruncateWordsToWholeSentence(p.plain) - } - p.summary = template.HTML(summary) - - p.truncated = truncated - - return nil -} - -func (cp *pageContentOutput) getContentConverter() (converter.Converter, error) { - if err := cp.initRenderHooks(); err != nil { +func (pco *pageContentOutput) getContentConverter() (converter.Converter, error) { + if err := pco.initRenderHooks(); err != nil { return nil, err } - return cp.p.getContentConverter(), nil + return pco.po.p.getContentConverter(), nil } func (cp *pageContentOutput) ParseAndRenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.ResultRender, error) { @@ -822,8 +533,8 @@ func (cp *pageContentOutput) ParseAndRenderContent(ctx context.Context, content return cp.renderContentWithConverter(ctx, c, content, renderTOC) } -func (cp *pageContentOutput) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) { - c, err := cp.getContentConverter() +func (pco *pageContentOutput) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) { + c, err := pco.getContentConverter() if err != nil { return nil, false, err } @@ -835,14 +546,14 @@ func (cp *pageContentOutput) ParseContent(ctx context.Context, content []byte) ( Ctx: ctx, Src: content, RenderTOC: true, - GetRenderer: cp.renderHooks.getRenderer, + GetRenderer: pco.renderHooks.getRenderer, } r, err := p.Parse(rctx) return r, ok, err - } -func (cp *pageContentOutput) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { - c, err := cp.getContentConverter() + +func (pco *pageContentOutput) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { + c, err := pco.getContentConverter() if err != nil { return nil, false, err } @@ -854,75 +565,23 @@ func (cp *pageContentOutput) RenderContent(ctx context.Context, content []byte, Ctx: ctx, Src: content, RenderTOC: true, - GetRenderer: cp.renderHooks.getRenderer, + GetRenderer: pco.renderHooks.getRenderer, } r, err := p.Render(rctx, doc) - if err == nil { - if ids, ok := r.(identity.IdentitiesProvider); ok { - for _, v := range ids.GetIdentities() { - cp.trackDependency(v) - } - } - } - return r, ok, err } -func (cp *pageContentOutput) renderContentWithConverter(ctx context.Context, c converter.Converter, content []byte, renderTOC bool) (converter.ResultRender, error) { +func (pco *pageContentOutput) renderContentWithConverter(ctx context.Context, c converter.Converter, content []byte, renderTOC bool) (converter.ResultRender, error) { r, err := c.Convert( converter.RenderContext{ Ctx: ctx, Src: content, RenderTOC: renderTOC, - GetRenderer: cp.renderHooks.getRenderer, + GetRenderer: pco.renderHooks.getRenderer, }) - - if err == nil { - if ids, ok := r.(identity.IdentitiesProvider); ok { - for _, v := range ids.GetIdentities() { - cp.trackDependency(v) - } - } - } - return r, err } -func (p *pageContentOutput) setWordCounts(isCJKLanguage bool) { - if isCJKLanguage { - p.wordCount = 0 - for _, word := range p.plainWords { - runeCount := utf8.RuneCountInString(word) - if len(word) == runeCount { - p.wordCount++ - } else { - p.wordCount += runeCount - } - } - } else { - p.wordCount = helpers.TotalWords(p.plain) - } - - // TODO(bep) is set in a test. Fix that. - if p.fuzzyWordCount == 0 { - p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100 - } - - if isCJKLanguage { - p.readingTime = (p.wordCount + 500) / 501 - } else { - p.readingTime = (p.wordCount + 212) / 213 - } -} - -// A callback to signal that we have inserted a placeholder into the rendered -// content. This avoids doing extra replacement work. -func (p *pageContentOutput) enablePlaceholders() { - p.placeholdersEnabledInit.Do(func() { - p.placeholdersEnabled = true - }) -} - // these will be shifted out when rendering a given output format. type pagePerOutputProviders interface { targetPather diff --git a/hugolib/page__tree.go b/hugolib/page__tree.go index 8b02667f1..e54d596bc 100644 --- a/hugolib/page__tree.go +++ b/hugolib/page__tree.go @@ -14,169 +14,121 @@ package hugolib import ( - "path" + "context" + "fmt" "strings" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) +// pageTree holds the treen navigational method for a Page. type pageTree struct { p *pageState } -func (pt pageTree) IsAncestor(other any) (bool, error) { - if pt.p == nil { - return false, nil - } - - tp, ok := other.(treeRefProvider) +func (pt pageTree) IsAncestor(other any) bool { + n, ok := other.(contentNodeI) if !ok { - return false, nil + return false } - ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef() - if ref1 != nil && ref2 != nil && ref1.key == ref2.key { - return false, nil + if n.Path() == pt.p.Path() { + return false } - if ref1 != nil && ref1.key == "/" { - return true, nil - } - - if ref1 == nil || ref2 == nil { - if ref1 == nil { - // A 404 or other similar standalone page. - return false, nil - } + return strings.HasPrefix(n.Path(), paths.AddTrailingSlash(pt.p.Path())) +} - return ref1.n.p.IsHome(), nil +func (pt pageTree) IsDescendant(other any) bool { + n, ok := other.(contentNodeI) + if !ok { + return false } - if strings.HasPrefix(ref2.key, ref1.key) { - return true, nil + if n.Path() == pt.p.Path() { + return false } - return strings.HasPrefix(ref2.key, ref1.key+cmBranchSeparator), nil + return strings.HasPrefix(pt.p.Path(), paths.AddTrailingSlash(n.Path())) } func (pt pageTree) CurrentSection() page.Page { - p := pt.p - - if p.IsHome() || p.IsSection() { - return p + if kinds.IsBranch(pt.p.Kind()) { + return pt.p } - return p.Parent() -} - -func (pt pageTree) IsDescendant(other any) (bool, error) { - if pt.p == nil { - return false, nil + dir := pt.p.m.pathInfo.Dir() + if dir == "/" { + return pt.p.s.home } - tp, ok := other.(treeRefProvider) - if !ok { - return false, nil + _, n := pt.p.s.pageMap.treePages.LongestPrefix(dir, true, func(n contentNodeI) bool { return n.isContentNodeBranch() }) + if n != nil { + return n.(page.Page) } - ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef() - if ref1 != nil && ref2 != nil && ref1.key == ref2.key { - return false, nil - } + panic(fmt.Sprintf("CurrentSection not found for %q in lang %s", pt.p.Path(), pt.p.Lang())) +} - if ref2 != nil && ref2.key == "/" { - return true, nil +func (pt pageTree) FirstSection() page.Page { + s := pt.p.m.pathInfo.Dir() + if s == "/" { + return pt.p.s.home } - if ref1 == nil || ref2 == nil { - if ref2 == nil { - // A 404 or other similar standalone page. - return false, nil + for { + k, n := pt.p.s.pageMap.treePages.LongestPrefix(s, true, func(n contentNodeI) bool { return n.isContentNodeBranch() }) + if n == nil { + return nil } - return ref2.n.p.IsHome(), nil - } - - if strings.HasPrefix(ref1.key, ref2.key) { - return true, nil - } - - return strings.HasPrefix(ref1.key, ref2.key+cmBranchSeparator), nil -} + // /blog + if strings.Count(k, "/") < 2 { + return n.(page.Page) + } -func (pt pageTree) FirstSection() page.Page { - ref := pt.p.getTreeRef() - if ref == nil { - return pt.p.s.home - } - key := ref.key + if s == "" { + return nil + } - if !ref.isSection() { - key = path.Dir(key) - } + s = paths.Dir(s) - _, b := ref.m.getFirstSection(key) - if b == nil { - return nil } - return b.p } -func (pt pageTree) InSection(other any) (bool, error) { +func (pt pageTree) InSection(other any) bool { if pt.p == nil || types.IsNil(other) { - return false, nil + return false } - tp, ok := other.(treeRefProvider) + p, ok := other.(page.Page) if !ok { - return false, nil + return false } - ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef() - - if ref1 == nil || ref2 == nil { - if ref1 == nil { - // A 404 or other similar standalone page. - return false, nil - } - return ref1.n.p.IsHome(), nil - } - - s1, _ := ref1.getCurrentSection() - s2, _ := ref2.getCurrentSection() - - return s1 == s2, nil -} - -func (pt pageTree) Page() page.Page { - return pt.p + return pt.CurrentSection() == p.CurrentSection() } func (pt pageTree) Parent() page.Page { - p := pt.p - - if p.parent != nil { - return p.parent - } - if pt.p.IsHome() { return nil } - tree := p.getTreeRef() + dir := pt.p.m.pathInfo.ContainerDir() - if tree == nil || pt.p.Kind() == kinds.KindTaxonomy { + if dir == "" { return pt.p.s.home } - _, b := tree.getSection() - if b == nil { - return nil + _, n := pt.p.s.pageMap.treePages.LongestPrefix(dir, true, nil) + if n != nil { + return n.(page.Page) } - - return b.p + return nil } func (pt pageTree) Ancestors() page.Pages { @@ -190,9 +142,57 @@ func (pt pageTree) Ancestors() page.Pages { } func (pt pageTree) Sections() page.Pages { - if pt.p.bucket == nil { + var ( + pages page.Pages + currentBranchPrefix string + s = pt.p.Path() + prefix = paths.AddTrailingSlash(s) + tree = pt.p.s.pageMap.treePages + ) + + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: tree, + Prefix: prefix, + } + w.Handle = func(ss string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if !n.isContentNodeBranch() { + return false, nil + } + if currentBranchPrefix == "" || !strings.HasPrefix(ss, currentBranchPrefix) { + if p, ok := n.(*pageState); ok && p.IsSection() && p.m.shouldList(false) && p.Parent() == pt.p { + pages = append(pages, p) + } else { + w.SkipPrefix(ss + "/") + } + } + currentBranchPrefix = ss + "/" + return false, nil + } + + if err := w.Walk(context.Background()); err != nil { + panic(err) + } + + page.SortByDefault(pages) + return pages +} + +func (pt pageTree) Page() page.Page { + return pt.p +} + +func (p pageTree) SectionsEntries() []string { + sp := p.SectionsPath() + if sp == "/" { + return nil + } + entries := strings.Split(sp[1:], "/") + if len(entries) == 0 { return nil } + return entries +} - return pt.p.bucket.getSections() +func (p pageTree) SectionsPath() string { + return p.CurrentSection().Path() } diff --git a/hugolib/page_test.go b/hugolib/page_test.go index ca6164d2c..f5ff95f3c 100644 --- a/hugolib/page_test.go +++ b/hugolib/page_test.go @@ -447,6 +447,44 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) { } } +func TestPageDatesTerms(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "http://example.com/" +-- content/p1.md -- +--- +title: p1 +date: 2022-01-15 +lastMod: 2022-01-16 +tags: ["a", "b"] +categories: ["c", "d"] +--- +p1 +-- content/p2.md -- +--- +title: p2 +date: 2017-01-16 +lastMod: 2017-01-17 +tags: ["a", "c"] +categories: ["c", "e"] +--- +p2 +-- layouts/_default/list.html -- +{{ .Title }}|Date: {{ .Date.Format "2006-01-02" }}|Lastmod: {{ .Lastmod.Format "2006-01-02" }}| + +` + b := Test(t, files) + + b.AssertFileContent("public/categories/index.html", "Categories|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/categories/c/index.html", "C|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/categories/e/index.html", "E|Date: 2017-01-16|Lastmod: 2017-01-17|") + b.AssertFileContent("public/tags/index.html", "Tags|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/tags/a/index.html", "A|Date: 2022-01-15|Lastmod: 2022-01-16|") + b.AssertFileContent("public/tags/c/index.html", "C|Date: 2017-01-16|Lastmod: 2017-01-17|") +} + func TestPageDatesAllKinds(t *testing.T) { t.Parallel() @@ -469,10 +507,12 @@ categories: ["cool stuff"] s := b.H.Sites[0] checkDate := func(t time.Time, msg string) { + b.Helper() b.Assert(t.Year(), qt.Equals, 2017, qt.Commentf(msg)) } checkDated := func(d resource.Dated, msg string) { + b.Helper() checkDate(d.Date(), "date: "+msg) checkDate(d.Lastmod(), "lastmod: "+msg) } @@ -533,10 +573,10 @@ date: 2012-01-12 b.Assert(p.Lastmod().Year(), qt.Equals, year) } - checkDate(s.getPage("/"), 2018) - checkDate(s.getPage("/no-index"), 2017) - b.Assert(s.getPage("/with-index-no-date").Date().IsZero(), qt.Equals, true) - checkDate(s.getPage("/with-index-date"), 2018) + checkDate(s.getPageOldVersion("/"), 2018) + checkDate(s.getPageOldVersion("/no-index"), 2017) + b.Assert(s.getPageOldVersion("/with-index-no-date").Date().IsZero(), qt.Equals, true) + checkDate(s.getPageOldVersion("/with-index-date"), 2018) b.Assert(s.Site().LastChange().Year(), qt.Equals, 2018) } @@ -713,6 +753,91 @@ func TestPageWithMoreTag(t *testing.T) { testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiterSameLine) } +func TestSummaryInFrontMatter(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.md -- +--- +title: Simple +summary: "Front **matter** summary" +--- +Simple Page +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| + +`).AssertFileContent("public/simple/index.html", "Summary: Front <strong>matter</strong> summary|", "Truncated: false") +} + +func TestSummaryManualSplit(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.md -- +--- +title: Simple +--- +This is **summary**. +<!--more--> +This is **content**. +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| +Content: {{ .Content }}| + +`).AssertFileContent("public/simple/index.html", + "Summary: <p>This is <strong>summary</strong>.</p>|", + "Truncated: true|", + "Content: <p>This is <strong>summary</strong>.</p>\n<p>This is <strong>content</strong>.</p>|", + ) +} + +func TestSummaryManualSplitHTML(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.html -- +--- +title: Simple +--- +<div> +This is <b>summary</b>. +</div> +<!--more--> +<div> +This is <b>content</b>. +</div> +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| +Content: {{ .Content }}| + +`).AssertFileContent("public/simple/index.html", "Summary: <div>\nThis is <b>summary</b>.\n</div>\n|Truncated: true|\nContent: \n\n<div>\nThis is <b>content</b>.\n</div>|") +} + +func TestSummaryAuto(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +summaryLength = 10 +-- content/simple.md -- +--- +title: Simple +--- +This is **summary**. +This is **more summary**. +This is *even more summary**. +This is **more summary**. + +This is **content**. +-- layouts/_default/single.html -- +Summary: {{ .Summary }}|Truncated: {{ .Truncated }}| +Content: {{ .Content }}| + +`).AssertFileContent("public/simple/index.html", + "Summary: This is summary. This is more summary. This is even more summary*.|", + "Truncated: true|", + "Content: <p>This is <strong>summary</strong>.") +} + // #2973 func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages page.Pages) { @@ -1190,26 +1315,89 @@ func TestPagePaths(t *testing.T) { } func TestTranslationKey(t *testing.T) { - t.Parallel() - c := qt.New(t) - cfg, fs := newTestCfg() - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/sect/p1.en.md -- +--- +translationkey: "adfasdf" +title: "p1 en" +--- +-- content/sect/p1.nn.md -- +--- +translationkey: "adfasdf" +title: "p1 nn" +--- +-- layouts/_default/single.html -- +Title: {{ .Title }}|TranslationKey: {{ .TranslationKey }}| +Translations: {{ range .Translations }}{{ .Language.Lang }}|{{ end }}| +AllTranslations: {{ range .AllTranslations }}{{ .Language.Lang }}|{{ end }}| - writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.no.md")), "---\ntitle: \"A1\"\ntranslationKey: \"k1\"\n---\nContent\n") - writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.en.md")), "---\ntitle: \"A2\"\n---\nContent\n") +` + b := Test(t, files) + b.AssertFileContent("public/en/sect/p1/index.html", + "TranslationKey: adfasdf|", + "AllTranslations: en|nn||", + "Translations: nn||", + ) - s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) + b.AssertFileContent("public/nn/sect/p1/index.html", + "TranslationKey: adfasdf|", + "Translations: en||", + "AllTranslations: en|nn||", + ) +} - c.Assert(len(s.RegularPages()), qt.Equals, 2) +// Issue #11540. +func TestTranslationKeyResourceSharing(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/sect/mybundle_en/index.en.md -- +--- +translationkey: "adfasdf" +title: "mybundle en" +--- +-- content/sect/mybundle_en/f1.txt -- +f1.en +-- content/sect/mybundle_en/f2.txt -- +f2.en +-- content/sect/mybundle_nn/index.nn.md -- +--- +translationkey: "adfasdf" +title: "mybundle nn" +--- +-- content/sect/mybundle_nn/f2.nn.txt -- +f2.nn +-- layouts/_default/single.html -- +Title: {{ .Title }}|TranslationKey: {{ .TranslationKey }}| +Resources: {{ range .Resources }}{{ .RelPermalink }}|{{ .Content }}|{{ end }}| - home := s.Home() - c.Assert(home, qt.Not(qt.IsNil)) - c.Assert(home.TranslationKey(), qt.Equals, "home") - c.Assert(s.RegularPages()[0].TranslationKey(), qt.Equals, "page/k1") - p2 := s.RegularPages()[1] +` + b := Test(t, files) + b.AssertFileContent("public/en/sect/mybundle_en/index.html", + "TranslationKey: adfasdf|", + "Resources: /en/sect/mybundle_en/f1.txt|f1.en|/en/sect/mybundle_en/f2.txt|f2.en||", + ) - c.Assert(p2.TranslationKey(), qt.Equals, "page/sect/simple") + b.AssertFileContent("public/nn/sect/mybundle_nn/index.html", + "TranslationKey: adfasdf|", + "Title: mybundle nn|TranslationKey: adfasdf|\nResources: /en/sect/mybundle_en/f1.txt|f1.en|/nn/sect/mybundle_nn/f2.nn.txt|f2.nn||", + ) } func TestChompBOM(t *testing.T) { @@ -1383,12 +1571,6 @@ Content:{{ .Content }} ) } -// https://github.com/gohugoio/hugo/issues/5781 -func TestPageWithZeroFile(t *testing.T) { - newTestSitesBuilder(t).WithLogger(loggers.NewDefault()).WithSimpleConfigFile(). - WithTemplatesAdded("index.html", "{{ .File.Filename }}{{ with .File }}{{ .Dir }}{{ end }}").Build(BuildCfg{}) -} - func TestHomePageWithNoTitle(t *testing.T) { b := newTestSitesBuilder(t).WithConfigFile("toml", ` title = "Site Title" @@ -1499,93 +1681,45 @@ func TestShouldBuildWithClock(t *testing.T) { } } -// "dot" in path: #1885 and #2110 -// disablePathToLower regression: #3374 -func TestPathIssues(t *testing.T) { - for _, disablePathToLower := range []bool{false, true} { - for _, uglyURLs := range []bool{false, true} { - disablePathToLower := disablePathToLower - uglyURLs := uglyURLs - t.Run(fmt.Sprintf("disablePathToLower=%t,uglyURLs=%t", disablePathToLower, uglyURLs), func(t *testing.T) { - t.Parallel() - cfg, fs := newTestCfg() - c := qt.New(t) - - cfg.Set("permalinks", map[string]string{ - "post": ":section/:title", - }) - - cfg.Set("uglyURLs", uglyURLs) - cfg.Set("disablePathToLower", disablePathToLower) - cfg.Set("paginate", 1) - th, configs := newTestHelperFromProvider(cfg, fs, t) - - writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html><body>{{.Content}}</body></html>") - writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"), - "<html><body>P{{.Paginator.PageNumber}}|URL: {{.Paginator.URL}}|{{ if .Paginator.HasNext }}Next: {{.Paginator.Next.URL }}{{ end }}</body></html>") - - for i := 0; i < 3; i++ { - writeSource(t, fs, filepath.Join("content", "post", fmt.Sprintf("doc%d.md", i)), - fmt.Sprintf(`--- -title: "test%d.dot" -tags: -- ".net" ---- -# doc1 -*some content*`, i)) - } - - writeSource(t, fs, filepath.Join("content", "Blog", "Blog1.md"), - fmt.Sprintf(`--- -title: "testBlog" -tags: -- "Blog" ---- -# doc1 -*some blog content*`)) - - s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{}) - - c.Assert(len(s.RegularPages()), qt.Equals, 4) - - pathFunc := func(s string) string { - if uglyURLs { - return strings.Replace(s, "/index.html", ".html", 1) - } - return s - } - - blog := "blog" - - if disablePathToLower { - blog = "Blog" - } - - th.assertFileContent(pathFunc("public/"+blog+"/"+blog+"1/index.html"), "some blog content") - - th.assertFileContent(pathFunc("public/post/test0.dot/index.html"), "some content") - - if uglyURLs { - th.assertFileContent("public/post/page/1.html", `canonical" href="/post.html"`) - th.assertFileContent("public/post.html", `<body>P1|URL: /post.html|Next: /post/page/2.html</body>`) - th.assertFileContent("public/post/page/2.html", `<body>P2|URL: /post/page/2.html|Next: /post/page/3.html</body>`) - } else { - th.assertFileContent("public/post/page/1/index.html", `canonical" href="/post/"`) - th.assertFileContent("public/post/index.html", `<body>P1|URL: /post/|Next: /post/page/2/</body>`) - th.assertFileContent("public/post/page/2/index.html", `<body>P2|URL: /post/page/2/|Next: /post/page/3/</body>`) - th.assertFileContent("public/tags/.net/index.html", `<body>P1|URL: /tags/.net/|Next: /tags/.net/page/2/</body>`) - - } - - p := s.RegularPages()[0] - if uglyURLs { - c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot.html") - } else { - c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot/") - } - }) - } - } +// See https://github.com/gohugoio/hugo/issues/9171 +// We redefined disablePathToLower in v0.121.0. +func TestPagePathDisablePathToLower(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "http://example.com" +disablePathToLower = true +[permalinks] +sect2 = "/:section/:filename/" +sect3 = "/:section/:title/" +-- content/sect/p1.md -- +--- +title: "Page1" +--- +p1. +-- content/sect/p2.md -- +--- +title: "Page2" +slug: "PaGe2" +--- +p2. +-- content/sect2/PaGe3.md -- +--- +title: "Page3" +--- +-- content/seCt3/p4.md -- +--- +title: "Pag.E4" +slug: "PaGe4" +--- +p4. +-- layouts/_default/single.html -- +Single: {{ .Title}}|{{ .RelPermalink }}|{{ .Path }}| +` + b := Test(t, files) + b.AssertFileContent("public/sect/p1/index.html", "Single: Page1|/sect/p1/|/sect/p1") + b.AssertFileContent("public/sect/PaGe2/index.html", "Single: Page2|/sect/PaGe2/|/sect/p2") + b.AssertFileContent("public/sect2/page3/index.html", "Single: Page3|/sect2/page3/|/sect2/page3|") + b.AssertFileContent("public/sect3/Pag.E4/index.html", "Single: Pag.E4|/sect3/Pag.E4/|/sect3/p4|") } // https://github.com/gohugoio/hugo/issues/4675 @@ -1711,50 +1845,6 @@ title: Scratch Me! b.AssertFileContent("public/scratchme/index.html", "C: cv") } -func TestScratchRebuild(t *testing.T) { - t.Parallel() - - files := ` --- config.toml -- --- content/p1.md -- ---- -title: "p1" ---- -{{< scratchme >}} --- layouts/shortcodes/foo.html -- -notused --- layouts/shortcodes/scratchme.html -- -{{ .Page.Scratch.Set "scratch" "foo" }} -{{ .Page.Store.Set "scratch" "bar" }} --- layouts/_default/single.html -- -{{ .Content }} -Scratch: {{ .Scratch.Get "scratch" }}| -Store: {{ .Store.Get "scratch" }}| -` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - Running: true, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", ` -Scratch: foo| -Store: bar| - `) - - b.EditFiles("layouts/shortcodes/foo.html", "edit") - - b.Build() - - b.AssertFileContent("public/p1/index.html", ` -Scratch: | -Store: bar| - `) -} - func TestPageParam(t *testing.T) { t.Parallel() @@ -1879,27 +1969,6 @@ Link with URL as text `) } -func TestPageCaseIssues(t *testing.T) { - t.Parallel() - - b := newTestSitesBuilder(t) - b.WithConfigFile("toml", `defaultContentLanguage = "no" -[languages] -[languages.NO] -title = "Norsk" -`) - b.WithContent("a/B/C/Page1.md", "---\ntitle: Page1\n---") - b.WithTemplates("index.html", ` -{{ $p1 := site.GetPage "a/B/C/Page1" }} -Lang: {{ .Lang }} -Page1: {{ $p1.Path }} -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", "Lang: no", filepath.FromSlash("Page1: a/B/C/Page1.md")) -} - func TestPageHashString(t *testing.T) { files := ` -- config.toml -- @@ -1930,6 +1999,8 @@ title: "p2" p2 := b.H.Sites[0].RegularPages()[1] sites := p1.Sites() + b.Assert(p1, qt.Not(qt.Equals), p2) + b.Assert(identity.HashString(p1), qt.Not(qt.Equals), identity.HashString(p2)) b.Assert(identity.HashString(sites[0]), qt.Not(qt.Equals), identity.HashString(sites[1])) } diff --git a/hugolib/page_unwrap.go b/hugolib/page_unwrap.go index c3e1ce8dd..c22ff2174 100644 --- a/hugolib/page_unwrap.go +++ b/hugolib/page_unwrap.go @@ -16,6 +16,7 @@ package hugolib import ( "fmt" + "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/resources/page" ) @@ -31,6 +32,8 @@ func unwrapPage(in any) (page.Page, error) { return v, nil case pageWrapper: return v.page(), nil + case types.Unwrapper: + return unwrapPage(v.Unwrapv()) case page.Page: return v, nil case nil: diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index 64d329832..123d752e0 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -15,21 +15,14 @@ package hugolib import ( "fmt" - "io" "os" - "path" "path/filepath" - "regexp" + "testing" "github.com/gohugoio/hugo/common/loggers" - "strings" - "testing" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" @@ -44,454 +37,180 @@ import ( qt "github.com/frankban/quicktest" ) -func TestPageBundlerSiteRegular(t *testing.T) { - c := qt.New(t) - baseBaseURL := "https://example.com" - - for _, baseURLPath := range []string{"", "/hugo"} { - for _, canonify := range []bool{false, true} { - for _, ugly := range []bool{false, true} { - baseURLPathId := baseURLPath - if baseURLPathId == "" { - baseURLPathId = "NONE" - } - ugly := ugly - canonify := canonify - c.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId), - func(c *qt.C) { - c.Parallel() - baseURL := baseBaseURL + baseURLPath - relURLBase := baseURLPath - if canonify { - relURLBase = "" - } - fs, cfg := newTestBundleSources(c) - cfg.Set("baseURL", baseURL) - cfg.Set("canonifyURLs", canonify) - cfg.Set("defaultContentLanguageInSubdir", false) - - cfg.Set("permalinks", map[string]string{ - "a": ":sections/:filename", - "b": ":year/:slug/", - "c": ":sections/:slug", - "/": ":filename/", - }) - - cfg.Set("outputFormats", map[string]any{ - "CUSTOMO": map[string]any{ - "mediaType": "text/html", - "baseName": "cindex", - "path": "cpath", - "permalinkable": true, - }, - }) - - cfg.Set("outputs", map[string]any{ - "home": []string{"HTML", "CUSTOMO"}, - "page": []string{"HTML", "CUSTOMO"}, - "section": []string{"HTML", "CUSTOMO"}, - }) - - cfg.Set("uglyURLs", ugly) - configs, err := loadTestConfigFromProvider(cfg) - - c.Assert(err, qt.IsNil) - - b := newTestSitesBuilderFromDepsCfg(c, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - - b.Build(BuildCfg{}) - - s := b.H.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - - singlePage := s.getPage(kinds.KindPage, "a/1.md") - c.Assert(singlePage.BundleType(), qt.Equals, files.ContentClass("")) - - c.Assert(singlePage, qt.Not(qt.IsNil)) - c.Assert(s.getPage("page", "a/1"), qt.Equals, singlePage) - c.Assert(s.getPage("page", "1"), qt.Equals, singlePage) - - c.Assert(content(singlePage), qt.Contains, "TheContent") - - relFilename := func(basePath, outBase string) (string, string) { - rel := basePath - if ugly { - rel = strings.TrimSuffix(basePath, "/") + ".html" - } - - var filename string - if !ugly { - filename = path.Join(basePath, outBase) - } else { - filename = rel - } - - rel = fmt.Sprintf("%s%s", relURLBase, rel) - - return rel, filename - } - - // Check both output formats - rel, filename := relFilename("/a/1/", "index.html") - b.AssertFileContent(filepath.Join("public", filename), - "TheContent", - "Single RelPermalink: "+rel, - ) - - rel, filename = relFilename("/cpath/a/1/", "cindex.html") - - b.AssertFileContent(filepath.Join("public", filename), - "TheContent", - "Single RelPermalink: "+rel, - ) - - b.AssertFileContent(filepath.FromSlash("public/images/hugo-logo.png"), "content") - - // This should be just copied to destination. - b.AssertFileContent(filepath.FromSlash("public/assets/pic1.png"), "content") - - leafBundle1 := s.getPage(kinds.KindPage, "b/my-bundle/index.md") - c.Assert(leafBundle1, qt.Not(qt.IsNil)) - c.Assert(leafBundle1.BundleType(), qt.Equals, files.ContentClassLeaf) - c.Assert(leafBundle1.Section(), qt.Equals, "b") - sectionB := s.getPage(kinds.KindSection, "b") - c.Assert(sectionB, qt.Not(qt.IsNil)) - home := s.Home() - c.Assert(home.BundleType(), qt.Equals, files.ContentClassBranch) - - // This is a root bundle and should live in the "home section" - // See https://github.com/gohugoio/hugo/issues/4332 - rootBundle := s.getPage(kinds.KindPage, "root") - c.Assert(rootBundle, qt.Not(qt.IsNil)) - c.Assert(rootBundle.Parent().IsHome(), qt.Equals, true) - if !ugly { - b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/") - b.AssertFileContent(filepath.FromSlash("public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/") - } - - leafBundle2 := s.getPage(kinds.KindPage, "a/b/index.md") - c.Assert(leafBundle2, qt.Not(qt.IsNil)) - unicodeBundle := s.getPage(kinds.KindPage, "c/bundle/index.md") - c.Assert(unicodeBundle, qt.Not(qt.IsNil)) - - pageResources := leafBundle1.Resources().ByType(pageResourceType) - c.Assert(len(pageResources), qt.Equals, 2) - firstPage := pageResources[0].(page.Page) - secondPage := pageResources[1].(page.Page) - - c.Assert(firstPage.File().Filename(), qt.Equals, filepath.FromSlash("/work/base/b/my-bundle/1.md")) - c.Assert(content(firstPage), qt.Contains, "TheContent") - c.Assert(len(leafBundle1.Resources()), qt.Equals, 6) - - // Verify shortcode in bundled page - c.Assert(content(secondPage), qt.Contains, filepath.FromSlash("MyShort in b/my-bundle/2.md")) - - // https://github.com/gohugoio/hugo/issues/4582 - c.Assert(firstPage.Parent(), qt.Equals, leafBundle1) - c.Assert(secondPage.Parent(), qt.Equals, leafBundle1) - - c.Assert(pageResources.GetMatch("1*"), qt.Equals, firstPage) - c.Assert(pageResources.GetMatch("2*"), qt.Equals, secondPage) - c.Assert(pageResources.GetMatch("doesnotexist*"), qt.IsNil) - - imageResources := leafBundle1.Resources().ByType("image") - c.Assert(len(imageResources), qt.Equals, 3) - - c.Assert(leafBundle1.OutputFormats().Get("CUSTOMO"), qt.Not(qt.IsNil)) - - relPermalinker := func(s string) string { - return fmt.Sprintf(s, relURLBase) - } - - permalinker := func(s string) string { - return fmt.Sprintf(s, baseURL) - } - - if ugly { - b.AssertFileContent("public/2017/pageslug.html", - relPermalinker("Single RelPermalink: %s/2017/pageslug.html"), - permalinker("Single Permalink: %s/2017/pageslug.html"), - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) - } else { - b.AssertFileContent("public/2017/pageslug/index.html", - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) - - b.AssertFileContent("public/cpath/2017/pageslug/cindex.html", - relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"), - relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"), - relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/cpath/2017/pageslug/sunset1.jpg"), - ) - } - - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/c/logo.png"), "content") - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/c/logo.png"), "content") - c.Assert(b.CheckExists("public/cpath/cpath/2017/pageslug/c/logo.png"), qt.Equals, false) - - // Custom media type defined in site config. - c.Assert(len(leafBundle1.Resources().ByType("bepsays")), qt.Equals, 1) - - if ugly { - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug.html"), - "TheContent", - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"), - "Thumb Width: 123", - "Thumb Name: my-sunset-1", - relPermalinker("Short Sunset RelPermalink: %s/2017/pageslug/sunset2.jpg"), - "Short Thumb Width: 56", - "1: Image Title: Sunset Galore 1", - "1: Image Params: map[myparam:My Sunny Param]", - relPermalinker("1: Image RelPermalink: %s/2017/pageslug/sunset1.jpg"), - "2: Image Title: Sunset Galore 2", - "2: Image Params: map[myparam:My Sunny Param]", - "1: Image myParam: Lower: My Sunny Param Caps: My Sunny Param", - "0: Page Title: Bundle Galore", - ) - - // https://github.com/gohugoio/hugo/issues/5882 - b.AssertFileContent( - filepath.FromSlash("public/2017/pageslug.html"), "0: Page RelPermalink: |") - - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug.html"), "TheContent") - - // 은행 - b.AssertFileContent(filepath.FromSlash("public/c/은행/logo-은행.png"), "은행 PNG") - - } else { - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/cindex.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "Single Title") - b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single Title") - - } - }) - } - } - } -} - -func TestPageBundlerSiteMultilingual(t *testing.T) { +func TestPageBundlerBundleInRoot(t *testing.T) { t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +-- content/root/index.md -- +--- +title: "Root" +--- +-- layouts/_default/single.html -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Tree: Section: {{ .Section }}|CurrentSection: {{ .CurrentSection.RelPermalink }}|Parent: {{ .Parent.RelPermalink }}|FirstSection: {{ .FirstSection.RelPermalink }} +` + b := Test(t, files) - for _, ugly := range []bool{false, true} { - ugly := ugly - t.Run(fmt.Sprintf("ugly=%t", ugly), - func(t *testing.T) { - t.Parallel() - c := qt.New(t) - fs, cfg := newTestBundleSourcesMultilingual(t) - cfg.Set("uglyURLs", ugly) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) - - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - b.Build(BuildCfg{}) - - sites := b.H - - c.Assert(len(sites.Sites), qt.Equals, 2) - - s := sites.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - c.Assert(len(s.Pages()), qt.Equals, 16) - // dumpPages(s.AllPages()...) - - c.Assert(len(s.AllPages()), qt.Equals, 31) - - bundleWithSubPath := s.getPage(kinds.KindPage, "lb/index") - c.Assert(bundleWithSubPath, qt.Not(qt.IsNil)) - - // See https://github.com/gohugoio/hugo/issues/4312 - // Before that issue: - // A bundle in a/b/index.en.md - // a/b/index.en.md => OK - // a/b/index => OK - // index.en.md => ambiguous, but OK. - // With bundles, the file name has little meaning, the folder it lives in does. So this should also work: - // a/b - // and probably also just b (aka "my-bundle") - // These may also be translated, so we also need to test that. - // "bf", "my-bf-bundle", "index.md + nn - bfBundle := s.getPage(kinds.KindPage, "bf/my-bf-bundle/index") - c.Assert(bfBundle, qt.Not(qt.IsNil)) - c.Assert(bfBundle.Language().Lang, qt.Equals, "en") - c.Assert(s.getPage(kinds.KindPage, "bf/my-bf-bundle/index.md"), qt.Equals, bfBundle) - c.Assert(s.getPage(kinds.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundle) - c.Assert(s.getPage(kinds.KindPage, "my-bf-bundle"), qt.Equals, bfBundle) - - nnSite := sites.Sites[1] - c.Assert(len(nnSite.RegularPages()), qt.Equals, 7) - - bfBundleNN := nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle/index") - c.Assert(bfBundleNN, qt.Not(qt.IsNil)) - c.Assert(bfBundleNN.Language().Lang, qt.Equals, "nn") - c.Assert(nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle/index.nn.md"), qt.Equals, bfBundleNN) - c.Assert(nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundleNN) - c.Assert(nnSite.getPage(kinds.KindPage, "my-bf-bundle"), qt.Equals, bfBundleNN) - - // See https://github.com/gohugoio/hugo/issues/4295 - // Every resource should have its Name prefixed with its base folder. - cBundleResources := bundleWithSubPath.Resources().Match("c/**") - c.Assert(len(cBundleResources), qt.Equals, 4) - bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*") - c.Assert(bundlePage, qt.Not(qt.IsNil)) - - bcBundleNN, _ := nnSite.getPageNew(nil, "bc") - c.Assert(bcBundleNN, qt.Not(qt.IsNil)) - bcBundleEN, _ := s.getPageNew(nil, "bc") - c.Assert(bcBundleNN.Language().Lang, qt.Equals, "nn") - c.Assert(bcBundleEN.Language().Lang, qt.Equals, "en") - c.Assert(len(bcBundleNN.Resources()), qt.Equals, 3) - c.Assert(len(bcBundleEN.Resources()), qt.Equals, 3) - b.AssertFileContent("public/en/bc/data1.json", "data1") - b.AssertFileContent("public/en/bc/data2.json", "data2") - b.AssertFileContent("public/en/bc/logo-bc.png", "logo") - b.AssertFileContent("public/nn/bc/data1.nn.json", "data1.nn") - b.AssertFileContent("public/nn/bc/data2.json", "data2") - b.AssertFileContent("public/nn/bc/logo-bc.png", "logo") - }) - } + b.AssertFileContent("public/root/index.html", + "Basic: Root|page|leaf|/root/|", + "Tree: Section: |CurrentSection: /|Parent: /|FirstSection: /", + ) } -func TestMultilingualDisableLanguage(t *testing.T) { +func TestPageBundlerShortcodeInBundledPage(t *testing.T) { t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +-- content/section/mybundle/index.md -- +--- +title: "Mybundle" +--- +-- content/section/mybundle/p1.md -- +--- +title: "P1" +--- - c := qt.New(t) - fs, cfg := newTestBundleSourcesMultilingual(t) - cfg.Set("disableLanguages", []string{"nn"}) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) +P1 content. - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - b.Build(BuildCfg{}) - sites := b.H +{{< myShort >}} - c.Assert(len(sites.Sites), qt.Equals, 1) +-- layouts/_default/single.html -- +Bundled page: {{ .RelPermalink}}|{{ with .Resources.Get "p1.md" }}Title: {{ .Title }}|Content: {{ .Content }}{{ end }}| +-- layouts/shortcodes/myShort.html -- +MyShort. - s := sites.Sites[0] +` + b := Test(t, files) - c.Assert(len(s.RegularPages()), qt.Equals, 8) - c.Assert(len(s.Pages()), qt.Equals, 16) - // No nn pages - c.Assert(len(s.AllPages()), qt.Equals, 16) - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - c.Assert(p.Language().Lang != "nn", qt.Equals, true) - return false - }) + b.AssertFileContent("public/section/mybundle/index.html", + "Bundled page: /section/mybundle/|Title: P1|Content: <p>P1 content.</p>\nMyShort.", + ) } -func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) { - skipSymlink(t) - - wd, _ := os.Getwd() - defer func() { - os.Chdir(wd) - }() - - c := qt.New(t) - - // We need to use the OS fs for this. - workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugosym") - c.Assert(err, qt.IsNil) - cfg := config.New() - cfg.Set("workingDir", workingDir) - cfg.Set("publishDir", "public") - fs := hugofs.NewFromOld(hugofs.Os, cfg) - - contentDirName := "content" - - contentDir := filepath.Join(workingDir, contentDirName) - c.Assert(os.MkdirAll(filepath.Join(contentDir, "a"), 0777), qt.IsNil) - - for i := 1; i <= 3; i++ { - c.Assert(os.MkdirAll(filepath.Join(workingDir, fmt.Sprintf("symcontent%d", i)), 0777), qt.IsNil) - } - - c.Assert(os.MkdirAll(filepath.Join(workingDir, "symcontent2", "a1"), 0777), qt.IsNil) - - // Symlinked sections inside content. - os.Chdir(contentDir) - for i := 1; i <= 3; i++ { - c.Assert(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)), qt.IsNil) - } - - c.Assert(os.Chdir(filepath.Join(contentDir, "a")), qt.IsNil) - - // Create a symlink to one single content file - c.Assert(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"), qt.IsNil) - - c.Assert(os.Chdir(filepath.FromSlash("../../symcontent3")), qt.IsNil) +func TestPageBundlerResourceMultipleOutputFormatsWithDifferentPaths(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +[outputformats] +[outputformats.cpath] +mediaType = "text/html" +path = "cpath" +-- content/section/mybundle/index.md -- +--- +title: "My Bundle" +outputs: ["html", "cpath"] +--- +-- content/section/mybundle/hello.txt -- +Hello. +-- content/section/mybundle/p1.md -- +--- +title: "P1" +--- +P1. + +{{< hello >}} + +-- layouts/shortcodes/hello.html -- +Hello HTML. +-- layouts/_default/single.html -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| +-- layouts/shortcodes/hello.cpath -- +Hello CPATH. +-- layouts/_default/single.cpath -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| +` - // Create a circular symlink. Will print some warnings. - c.Assert(os.Symlink(filepath.Join("..", contentDirName), filepath.FromSlash("circus")), qt.IsNil) + b := Test(t, files) - c.Assert(os.Chdir(workingDir), qt.IsNil) + b.AssertFileContent("public/section/mybundle/index.html", + "Basic: My Bundle|page|leaf|/section/mybundle/|", + "Resources: RelPermalink: |Content: <p>P1.</p>\nHello HTML.\n|RelPermalink: /section/mybundle/hello.txt|Content: Hello.||", + ) - defer clean() + b.AssertFileContent("public/cpath/section/mybundle/index.html", "Basic: My Bundle|page|leaf|/section/mybundle/|\nResources: RelPermalink: |Content: <p>P1.</p>\nHello CPATH.\n|RelPermalink: /section/mybundle/hello.txt|Content: Hello.||") +} - cfg.Set("workingDir", workingDir) - cfg.Set("contentDir", contentDirName) - cfg.Set("baseURL", "https://example.com") - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) +func TestPageBundlerMultilingualTextResource(t *testing.T) { + t.Parallel() - layout := `{{ .Title }}|{{ .Content }}` - pageContent := `--- -slug: %s -date: 2017-10-09 + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/mybundle/index.md -- +--- +title: "My Bundle" +--- +-- content/mybundle/index.nn.md -- +--- +title: "My Bundle NN" --- +-- content/mybundle/f1.txt -- +F1 +-- content/mybundle/f2.txt -- +F2 +-- content/mybundle/f2.nn.txt -- +F2 nn. +-- layouts/_default/single.html -- +{{ .Title }}|{{ .RelPermalink }}|{{ .Lang }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| -TheContent. ` + b := Test(t, files) - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{ - Fs: fs, - Configs: configs, - }) - - b.WithTemplates( - "_default/single.html", layout, - "_default/list.html", layout, - ) - - b.WithContent( - "a/regular.md", fmt.Sprintf(pageContent, "a1"), - ) + b.AssertFileContent("public/en/mybundle/index.html", "My Bundle|/en/mybundle/|en|\nResources: RelPermalink: /en/mybundle/f1.txt|Content: F1|RelPermalink: /en/mybundle/f2.txt|Content: F2||") + b.AssertFileContent("public/nn/mybundle/index.html", "My Bundle NN|/nn/mybundle/|nn|\nResources: RelPermalink: /en/mybundle/f1.txt|Content: F1|RelPermalink: /nn/mybundle/f2.nn.txt|Content: F2 nn.||") +} - b.WithSourceFile( - "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"), - "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"), - // Regular files inside symlinked folder. - "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"), - "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"), - - // A bundle - "symcontent2/a1/index.md", fmt.Sprintf(pageContent, ""), - "symcontent2/a1/page.md", fmt.Sprintf(pageContent, "page"), - "symcontent2/a1/logo.png", "image", - - // Assets - "symcontent3/s1.png", "image", - "symcontent3/s2.png", "image", - ) +func TestMultilingualDisableLanguage(t *testing.T) { + t.Parallel() - b.Build(BuildCfg{}) - s := b.H.Sites[0] + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +disableLanguages = ["nn"] +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/p1.md -- +--- +title: "P1" +--- +P1 +-- content/p1.nn.md -- +--- +title: "P1nn" +--- +P1nn +-- layouts/_default/single.html -- +{{ .Title }}|{{ .Content }}|{{ .Lang }}| - c.Assert(len(s.RegularPages()), qt.Equals, 7) - a1Bundle := s.getPage(kinds.KindPage, "symbolic2/a1/index.md") - c.Assert(a1Bundle, qt.Not(qt.IsNil)) - c.Assert(len(a1Bundle.Resources()), qt.Equals, 2) - c.Assert(len(a1Bundle.Resources().ByType(pageResourceType)), qt.Equals, 1) +` + b := Test(t, files) - b.AssertFileContent(filepath.FromSlash("public/a/page/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/symbolic1/s1/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/symbolic2/a1/index.html"), "TheContent") + b.AssertFileContent("public/en/p1/index.html", "P1|<p>P1</p>\n|en|") + b.AssertFileExists("public/public/nn/p1/index.html", false) + b.Assert(len(b.H.Sites), qt.Equals, 1) } func TestPageBundlerHeadless(t *testing.T) { @@ -544,10 +263,10 @@ HEADLESS {{< myShort >}} c.Assert(len(s.RegularPages()), qt.Equals, 1) - regular := s.getPage(kinds.KindPage, "a/index") + regular := s.getPageOldVersion(kinds.KindPage, "a/index") c.Assert(regular.RelPermalink(), qt.Equals, "/s1/") - headless := s.getPage(kinds.KindPage, "b/index") + headless := s.getPageOldVersion(kinds.KindPage, "b/index") c.Assert(headless, qt.Not(qt.IsNil)) c.Assert(headless.Title(), qt.Equals, "Headless Bundle in Topless Bar") c.Assert(headless.RelPermalink(), qt.Equals, "") @@ -576,6 +295,7 @@ HEADLESS {{< myShort >}} // No headless bundles here, please. // https://github.com/gohugoio/hugo/issues/6492 c.Assert(s.RegularPages(), qt.HasLen, 1) + c.Assert(s.Pages(), qt.HasLen, 4) c.Assert(s.home.RegularPages(), qt.HasLen, 1) c.Assert(s.home.Pages(), qt.HasLen, 1) } @@ -686,7 +406,6 @@ Single content. b.Build(BuildCfg{}) b.AssertFileContent("public/nn/mybundle/data.yaml", "data nn") - b.AssertFileContent("public/nn/mybundle/forms.yaml", "forms en") b.AssertFileContent("public/mybundle/data.yaml", "data en") b.AssertFileContent("public/mybundle/forms.yaml", "forms en") @@ -701,293 +420,113 @@ Single content. b.AssertFileContent("public/section-not-bundle/single/index.html", "Section Single", "|<p>Single content.</p>") } -func newTestBundleSources(t testing.TB) (*hugofs.Fs, config.Provider) { - cfg, fs := newTestCfgBasic() - c := qt.New(t) - - workDir := "/work" - cfg.Set("workingDir", workDir) - cfg.Set("contentDir", "base") - cfg.Set("baseURL", "https://example.com") - cfg.Set("mediaTypes", map[string]any{ - "bepsays/bep": map[string]any{ - "suffixes": []string{"bep"}, - }, - }) - - pageContent := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 ---- - -TheContent. -` - - pageContentShortcode := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 ---- - -TheContent. - -{{< myShort >}} -` - - pageWithImageShortcodeAndResourceMetadataContent := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 -resources: -- src: "*.jpg" - name: "my-sunset-:counter" - title: "Sunset Galore :counter" - params: - myParam: "My Sunny Param" ---- - -TheContent. - -{{< myShort >}} -` - - pageContentNoSlug := `--- -title: "Bundle Galore #2" -date: 2017-10-09 ---- - -TheContent. -` - - singleLayout := ` -Single Title: {{ .Title }} -Single RelPermalink: {{ .RelPermalink }} -Single Permalink: {{ .Permalink }} -Content: {{ .Content }} -{{ $sunset := .Resources.GetMatch "my-sunset-1*" }} -{{ with $sunset }} -Sunset RelPermalink: {{ .RelPermalink }} -Sunset Permalink: {{ .Permalink }} -{{ $thumb := .Fill "123x123" }} -Thumb Width: {{ $thumb.Width }} -Thumb Name: {{ $thumb.Name }} -Thumb Title: {{ $thumb.Title }} -Thumb RelPermalink: {{ $thumb.RelPermalink }} -{{ end }} -{{ $types := slice "image" "page" }} -{{ range $types }} -{{ $typeTitle := . | title }} -{{ range $i, $e := $.Resources.ByType . }} -{{ $i }}: {{ $typeTitle }} Title: {{ .Title }} -{{ $i }}: {{ $typeTitle }} Name: {{ .Name }} -{{ $i }}: {{ $typeTitle }} RelPermalink: {{ .RelPermalink }}| -{{ $i }}: {{ $typeTitle }} Params: {{ printf "%v" .Params }} -{{ $i }}: {{ $typeTitle }} myParam: Lower: {{ .Params.myparam }} Caps: {{ .Params.MYPARAM }} -{{ end }} -{{ end }} -` +func TestBundledResourcesMultilingualDuplicateResourceFiles(t *testing.T) { + t.Parallel() - myShort := ` -MyShort in {{ .Page.File.Path }}: -{{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }} -{{ with $sunset }} -Short Sunset RelPermalink: {{ .RelPermalink }} -{{ $thumb := .Fill "56x56" }} -Short Thumb Width: {{ $thumb.Width }} -{{ end }} + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +[markup] +[markup.goldmark] +duplicateResourceFiles = true +[languages] +[languages.en] +weight = 1 +[languages.en.permalinks] +"/" = "/enpages/:slug/" +[languages.nn] +weight = 2 +[languages.nn.permalinks] +"/" = "/nnpages/:slug/" +-- content/mybundle/index.md -- +--- +title: "My Bundle" +--- +{{< getresource "f1.txt" >}} +{{< getresource "f2.txt" >}} +-- content/mybundle/index.nn.md -- +--- +title: "My Bundle NN" +--- +{{< getresource "f1.txt" >}} +f2.nn.txt is the original name. +{{< getresource "f2.nn.txt" >}} +{{< getresource "f2.txt" >}} +{{< getresource "sub/f3.txt" >}} +-- content/mybundle/f1.txt -- +F1 en. +-- content/mybundle/sub/f3.txt -- +F1 en. +-- content/mybundle/f2.txt -- +F2 en. +-- content/mybundle/f2.nn.txt -- +F2 nn. +-- layouts/shortcodes/getresource.html -- +{{ $r := .Page.Resources.Get (.Get 0)}} +Resource: {{ (.Get 0) }}|{{ with $r }}{{ .RelPermalink }}|{{ .Content }}|{{ else }}Not found.{{ end}} +-- layouts/_default/single.html -- +{{ .Title }}|{{ .RelPermalink }}|{{ .Lang }}|{{ .Content }}| ` - - listLayout := `{{ .Title }}|{{ .Content }}` - - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), singleLayout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), listLayout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.html"), myShort) - writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.customo"), myShort) - - writeSource(t, fs, filepath.Join(workDir, "base", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "_1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "_1.png"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "images", "hugo-logo.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "a", "2.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "a", "1.md"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "index.md"), pageContentNoSlug) - writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "ab1.md"), pageContentNoSlug) - - // Mostly plain static assets in a folder with a page in a sub folder thrown in. - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic1.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic2.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent) - - // Bundle - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "index.md"), pageWithImageShortcodeAndResourceMetadataContent) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "2.md"), pageContentShortcode) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "custom-mime.bep"), "bepsays") - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "c", "logo.png"), "content") - - // Bundle with 은행 slug - // See https://github.com/gohugoio/hugo/issues/4241 - writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "index.md"), `--- -title: "은행 은행" -slug: 은행 -date: 2017-10-09 ---- - -Content for 은행. + b := Test(t, files) + + // helpers.PrintFs(b.H.Fs.PublishDir, "", os.Stdout) + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/index.html", ` +My Bundle NN +Resource: f1.txt|/nn/nnpages/my-bundle-nn/f1.txt| +Resource: f2.txt|/nn/nnpages/my-bundle-nn/f2.nn.txt|F2 nn.| +Resource: f2.nn.txt|/nn/nnpages/my-bundle-nn/f2.nn.txt|F2 nn.| +Resource: sub/f3.txt|/nn/nnpages/my-bundle-nn/sub/f3.txt|F1 en.| `) - // Bundle in root - writeSource(t, fs, filepath.Join(workDir, "base", "root", "index.md"), pageWithImageShortcodeAndResourceMetadataContent) - writeSource(t, fs, filepath.Join(workDir, "base", "root", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "root", "c", "logo.png"), "content") + b.AssertFileContent("public/enpages/my-bundle/f2.txt", "F2 en.") + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/f2.nn.txt", "F2 nn") - writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "logo-은행.png"), "은행 PNG") - - // Write a real image into one of the bundle above. - src, err := os.Open("testdata/sunset.jpg") - c.Assert(err, qt.IsNil) - - // We need 2 to test https://github.com/gohugoio/hugo/issues/4202 - out, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset1.jpg")) - c.Assert(err, qt.IsNil) - out2, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset2.jpg")) - c.Assert(err, qt.IsNil) - - _, err = io.Copy(out, src) - c.Assert(err, qt.IsNil) - out.Close() - src.Seek(0, 0) - _, err = io.Copy(out2, src) - out2.Close() - src.Close() - c.Assert(err, qt.IsNil) - - return fs, cfg -} - -func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, config.Provider) { - cfg, fs := newTestCfgBasic() - - workDir := "/work" - cfg.Set("workingDir", workDir) - cfg.Set("contentDir", "base") - cfg.Set("baseURL", "https://example.com") - cfg.Set("defaultContentLanguage", "en") - - langConfig := map[string]any{ - "en": map[string]any{ - "weight": 1, - "languageName": "English", - }, - "nn": map[string]any{ - "weight": 2, - "languageName": "Nynorsk", - }, - } - - cfg.Set("languages", langConfig) - - pageContent := `--- -slug: pageslug -date: 2017-10-09 ---- - -TheContent. -` + b.AssertFileContent("public/enpages/my-bundle/index.html", ` +Resource: f1.txt|/enpages/my-bundle/f1.txt|F1 en.| +Resource: f2.txt|/enpages/my-bundle/f2.txt|F2 en.| +`) + b.AssertFileContent("public/enpages/my-bundle/f1.txt", "F1 en.") - layout := `{{ .Title }}|{{ .Content }}|Lang: {{ .Site.Language.Lang }}` - - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout) - - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mylogo.png"), "content") - - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "en.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "a.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "c.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b", "d.nn.png"), "content") - - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "logo-bc.png"), "logo") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.json"), "data1") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data2.json"), "data2") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.nn.json"), "data1.nn") - - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.nn.md"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "be", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.nn.md"), pageContent) - - // Bundle leaf, multilingual - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "one.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "d", "deep.png"), "content") - - // Translated bundle in some sensible sub path. - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "page.md"), pageContent) - - return fs, cfg + // Should be duplicated to the nn bundle. + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/f1.txt", "F1 en.") } // https://github.com/gohugoio/hugo/issues/5858 func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) { t.Parallel() - b := newTestSitesBuilder(t).Running().WithConfigFile("toml", ` + files := ` +-- hugo.toml -- baseURL = "https://example.org" +disableKinds = ["taxonomy", "term"] +disableLiveReload = true [outputs] - # This looks odd, but it triggers the behaviour in #5858 - # The total output formats list gets sorted, so CSS before HTML. - home = [ "CSS" ] - -`) - b.WithContent("mybundle/index.md", ` +# This looks odd, but it triggers the behaviour in #5858 +# The total output formats list gets sorted, so CSS before HTML. +home = [ "CSS" ] +-- content/mybundle/index.md -- --- title: Page -date: 2017-01-15 --- -`, - "mybundle/data.json", "MyData", - ) +-- content/mybundle/data.json -- +MyData +-- layouts/_default/single.html -- +{{ range .Resources }} +{{ .ResourceType }}|{{ .Title }}| +{{ end }} +` - b.CreateSites().Build(BuildCfg{}) + b := TestRunning(t, files) b.AssertFileContent("public/mybundle/data.json", "MyData") - // Change the bundled JSON file and make sure it gets republished. - b.EditFiles("content/mybundle/data.json", "My changed data") - - b.Build(BuildCfg{}) + b.EditFileReplaceAll("content/mybundle/data.json", "MyData", "My changed data").Build() b.AssertFileContent("public/mybundle/data.json", "My changed data") } +// https://github.com/gohugoio/hugo/issues/5858 + // https://github.com/gohugoio/hugo/issues/4870 func TestBundleSlug(t *testing.T) { t.Parallel() @@ -1016,191 +555,45 @@ slug: %s c.Assert(b.CheckExists("public/about/services2/this-is-another-slug/index.html"), qt.Equals, true) } -func TestBundleMisc(t *testing.T) { - config := ` -baseURL = "https://example.com" -defaultContentLanguage = "en" -defaultContentLanguageInSubdir = true -ignoreFiles = ["README\\.md", "content/en/ignore"] - -[Languages] -[Languages.en] -weight = 99999 -contentDir = "content/en" -[Languages.nn] -weight = 20 -contentDir = "content/nn" -[Languages.sv] -weight = 30 -contentDir = "content/sv" -[Languages.nb] -weight = 40 -contentDir = "content/nb" - -` - - const pageContent = `--- -title: %q ---- -` - createPage := func(s string) string { - return fmt.Sprintf(pageContent, s) - } - - b := newTestSitesBuilder(t).WithConfigFile("toml", config) - b.WithLogger(loggers.NewDefault()) - - b.WithTemplates("_default/list.html", `{{ range .Site.Pages }} -{{ .Kind }}|{{ .Path }}|{{ with .CurrentSection }}CurrentSection: {{ .Path }}{{ end }}|{{ .RelPermalink }}{{ end }} -`) - - b.WithTemplates("_default/single.html", `Single: {{ .Title }}`) - - b.WithContent("en/sect1/sect2/_index.md", createPage("en: Sect 2")) - b.WithContent("en/sect1/sect2/page.md", createPage("en: Page")) - b.WithContent("en/sect1/sect2/data-branch.json", "mydata") - b.WithContent("nn/sect1/sect2/page.md", createPage("nn: Page")) - b.WithContent("nn/sect1/sect2/data-branch.json", "my nn data") - - // En only - b.WithContent("en/enonly/myen.md", createPage("en: Page")) - b.WithContent("en/enonly/myendata.json", "mydata") - - // Leaf - - b.WithContent("nn/b1/index.md", createPage("nn: leaf")) - b.WithContent("en/b1/index.md", createPage("en: leaf")) - b.WithContent("sv/b1/index.md", createPage("sv: leaf")) - b.WithContent("nb/b1/index.md", createPage("nb: leaf")) - - // Should be ignored - b.WithContent("en/ignore/page.md", createPage("en: ignore")) - b.WithContent("en/README.md", createPage("en: ignore")) - - // Both leaf and branch bundle in same dir - b.WithContent("en/b2/index.md", `--- -slug: leaf ---- -`) - b.WithContent("en/b2/_index.md", createPage("en: branch")) - - b.WithContent("en/b1/data1.json", "en: data") - b.WithContent("sv/b1/data1.json", "sv: data") - b.WithContent("sv/b1/data2.json", "sv: data2") - b.WithContent("nb/b1/data2.json", "nb: data2") - - b.WithContent("en/b3/_index.md", createPage("en: branch")) - b.WithContent("en/b3/p1.md", createPage("en: page")) - b.WithContent("en/b3/data1.json", "en: data") - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/en/index.html", - filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"), - "myen.md|CurrentSection: enonly") - - b.AssertFileContentFn("public/en/index.html", func(s string) bool { - // Check ignored files - return !regexp.MustCompile("README|ignore").MatchString(s) - }) - - b.AssertFileContent("public/nn/index.html", filepath.FromSlash("page|sect1/sect2/page.md|CurrentSection: sect1")) - b.AssertFileContentFn("public/nn/index.html", func(s string) bool { - return !strings.Contains(s, "enonly") - }) - - // Check order of inherited data file - b.AssertFileContent("public/nb/b1/data1.json", "en: data") // Default content - b.AssertFileContent("public/nn/b1/data2.json", "sv: data") // First match - - b.AssertFileContent("public/en/enonly/myen/index.html", "Single: en: Page") - b.AssertFileContent("public/en/enonly/myendata.json", "mydata") - - c := qt.New(t) - c.Assert(b.CheckExists("public/sv/enonly/myen/index.html"), qt.Equals, false) - - // Both leaf and branch bundle in same dir - // We log a warning about it, but we keep both. - b.AssertFileContent("public/en/b2/index.html", - "/en/b2/leaf/", - filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md")) -} - -// Issue 6136 +// See #11663 func TestPageBundlerPartialTranslations(t *testing.T) { - config := ` -baseURL = "https://example.org" + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] defaultContentLanguage = "en" defaultContentLanguageInSubDir = true -disableKinds = ["taxonomy", "term"] [languages] [languages.nn] -languageName = "Nynorsk" weight = 2 -title = "Tittel på Nynorsk" [languages.en] -title = "Title in English" -languageName = "English" weight = 1 -` - - pageContent := func(id string) string { - return fmt.Sprintf(` +-- content/section/mybundle/index.md -- --- -title: %q +title: "Mybundle" +--- +-- content/section/mybundle/bundledpage.md -- +--- +title: "Bundled page en" +--- +-- content/section/mybundle/bundledpage.nn.md -- +--- +title: "Bundled page nn" --- -`, id) - } - - dataContent := func(id string) string { - return id - } - - b := newTestSitesBuilder(t).WithConfigFile("toml", config) - - b.WithContent("blog/sect1/_index.nn.md", pageContent("s1.nn")) - b.WithContent("blog/sect1/data.json", dataContent("s1.data")) - - b.WithContent("blog/sect1/b1/index.nn.md", pageContent("s1.b1.nn")) - b.WithContent("blog/sect1/b1/data.json", dataContent("s1.b1.data")) - - b.WithContent("blog/sect2/_index.md", pageContent("s2")) - b.WithContent("blog/sect2/data.json", dataContent("s2.data")) - - b.WithContent("blog/sect2/b1/index.md", pageContent("s2.b1")) - b.WithContent("blog/sect2/b1/data.json", dataContent("s2.b1.data")) - - b.WithContent("blog/sect2/b2/index.md", pageContent("s2.b2")) - b.WithContent("blog/sect2/b2/bp.md", pageContent("s2.b2.bundlecontent")) - - b.WithContent("blog/sect2/b3/index.md", pageContent("s2.b3")) - b.WithContent("blog/sect2/b3/bp.nn.md", pageContent("s2.b3.bundlecontent.nn")) - b.WithContent("blog/sect2/b4/index.nn.md", pageContent("s2.b4")) - b.WithContent("blog/sect2/b4/bp.nn.md", pageContent("s2.b4.bundlecontent.nn")) +-- layouts/_default/single.html -- +Bundled page: {{ .RelPermalink}}|Len resources: {{ len .Resources }}| - b.WithTemplates("index.html", ` -Num Pages: {{ len .Site.Pages }} -{{ range .Site.Pages }} -{{ .Kind }}|{{ .RelPermalink }}|Content: {{ .Title }}|Resources: {{ range .Resources }}R: {{ .Title }}|{{ .Content }}|{{ end -}} -{{ end }} -`) - b.Build(BuildCfg{}) +` + b := Test(t, files) - b.AssertFileContent("public/nn/index.html", - "Num Pages: 6", - "page|/nn/blog/sect1/b1/|Content: s1.b1.nn|Resources: R: data.json|s1.b1.data|", - "page|/nn/blog/sect2/b3/|Content: s2.b3|Resources: R: s2.b3.bundlecontent.nn|", - "page|/nn/blog/sect2/b4/|Content: s2.b4|Resources: R: s2.b4.bundlecontent.nn", + b.AssertFileContent("public/en/section/mybundle/index.html", + "Bundled page: /en/section/mybundle/|Len resources: 1|", ) - b.AssertFileContent("public/en/index.html", - "Num Pages: 6", - "section|/en/blog/sect2/|Content: s2|Resources: R: data.json|s2.data|", - "page|/en/blog/sect2/b1/|Content: s2.b1|Resources: R: data.json|s2.b1.data|", - "page|/en/blog/sect2/b2/|Content: s2.b2|Resources: R: s2.b2.bundlecontent|", - ) + b.AssertFileExists("public/nn/section/mybundle/index.html", false) } // #6208 @@ -1329,7 +722,7 @@ func TestPageBundlerHome(t *testing.T) { cfg.Set("publishDir", "public") fs := hugofs.NewFromOld(hugofs.Os, cfg) - os.MkdirAll(filepath.Join(workDir, "content"), 0777) + os.MkdirAll(filepath.Join(workDir, "content"), 0o777) defer clean() diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index 26da4905e..8e05ad7e6 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,91 +18,65 @@ import ( "path" "path/filepath" "strings" - "sync" - - "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -// PageCollections contains the page collections for a site. -type PageCollections struct { +// pageFinder provides ways to find a Page in a Site. +type pageFinder struct { pageMap *pageMap - - // Lazy initialized page collections - pages *lazyPagesFactory - regularPages *lazyPagesFactory - allPages *lazyPagesFactory - allRegularPages *lazyPagesFactory -} - -// Pages returns all pages. -// This is for the current language only. -func (c *PageCollections) Pages() page.Pages { - return c.pages.get() -} - -// RegularPages returns all the regular pages. -// This is for the current language only. -func (c *PageCollections) RegularPages() page.Pages { - return c.regularPages.get() -} - -// AllPages returns all pages for all languages. -func (c *PageCollections) AllPages() page.Pages { - return c.allPages.get() } -// AllRegularPages returns all regular pages for all languages. -func (c *PageCollections) AllRegularPages() page.Pages { - return c.allRegularPages.get() -} - -type lazyPagesFactory struct { - pages page.Pages - - init sync.Once - factory page.PagesFactory -} - -func (l *lazyPagesFactory) get() page.Pages { - l.init.Do(func() { - l.pages = l.factory() - }) - return l.pages -} - -func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory { - return &lazyPagesFactory{factory: factory} -} - -func newPageCollections(m *pageMap) *PageCollections { +func newPageFinder(m *pageMap) *pageFinder { if m == nil { panic("must provide a pageMap") } + c := &pageFinder{pageMap: m} + return c +} - c := &PageCollections{pageMap: m} +// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive +// search path than getPage. +func (c *pageFinder) getPageRef(context page.Page, ref string) (page.Page, error) { + n, err := c.getContentNode(context, true, ref) + if err != nil { + return nil, err + } - c.pages = newLazyPagesFactory(func() page.Pages { - return m.createListAllPages() - }) + if p, ok := n.(page.Page); ok { + return p, nil + } + return nil, nil +} - c.regularPages = newLazyPagesFactory(func() page.Pages { - return c.findPagesByKindIn(kinds.KindPage, c.pages.get()) - }) +func (c *pageFinder) getPage(context page.Page, ref string) (page.Page, error) { + n, err := c.getContentNode(context, false, filepath.ToSlash(ref)) + if err != nil { + return nil, err + } + if p, ok := n.(page.Page); ok { + return p, nil + } + return nil, nil +} - return c +// Only used in tests. +func (c *pageFinder) getPageOldVersion(kind string, sections ...string) page.Page { + refs := append([]string{kind}, path.Join(sections...)) + p, _ := c.getPageForRefs(refs...) + return p } // This is an adapter func for the old API with Kind as first argument. // This is invoked when you do .Site.GetPage. We drop the Kind and fails // if there are more than 2 arguments, which would be ambiguous. -func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) { +func (c *pageFinder) getPageForRefs(ref ...string) (page.Page, error) { var refs []string for _, r := range ref { // A common construct in the wild is @@ -141,201 +115,156 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) { key = "/" + key } - return c.getPageNew(nil, key) + return c.getPage(nil, key) } -// Only used in tests. -func (c *PageCollections) getPage(typ string, sections ...string) page.Page { - refs := append([]string{typ}, path.Join(sections...)) - p, _ := c.getPageOldVersion(refs...) - return p -} +const defaultContentExt = ".md" -// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive -// search path than getPageNew. -func (c *PageCollections) getPageRef(context page.Page, ref string) (page.Page, error) { - n, err := c.getContentNode(context, true, ref) - if err != nil || n == nil || n.p == nil { - return nil, err - } - return n.p, nil -} - -func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) { - n, err := c.getContentNode(context, false, ref) - if err != nil || n == nil || n.p == nil { - return nil, err - } - return n.p, nil -} - -func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) { - var n *contentNode - - pref := helpers.AddTrailingSlash(ref) - s, v, found := c.pageMap.sections.LongestPrefix(pref) - - if found { - n = v.(*contentNode) +func (c *pageFinder) getContentNode(context page.Page, isReflink bool, ref string) (contentNodeI, error) { + inRef := ref + if ref == "" { + ref = "/" } - if found && s == pref { - // A section - return n, "" + if paths.HasExt(ref) { + return c.getContentNodeForRef(context, isReflink, true, inRef, ref) } - m := c.pageMap - - filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/") - langSuffix := "." + m.s.Lang() - - // Trim both extension and any language code. - name := paths.PathNoExt(filename) - name = strings.TrimSuffix(name, langSuffix) - - // These are reserved bundle names and will always be stored by their owning - // folder name. - name = strings.TrimSuffix(name, "/index") - name = strings.TrimSuffix(name, "/_index") + var refs []string - if !found { - return nil, name + // We are always looking for a content file and having an extension greatly simplifies the code that follows, + // even in the case where the extension does not match this one. + if ref == "/" { + refs = append(refs, "/_index"+defaultContentExt) + } else if strings.HasSuffix(ref, "/index") { + refs = append(refs, ref+"/index"+defaultContentExt) + refs = append(refs, ref+defaultContentExt) + } else { + refs = append(refs, ref+defaultContentExt) } - // Check if it's a section with filename provided. - if !n.p.File().IsZero() && n.p.File().LogicalName() == filename { - return n, name + for _, ref := range refs { + n, err := c.getContentNodeForRef(context, isReflink, false, inRef, ref) + if n != nil || err != nil { + return n, err + } } - return m.getPage(s, name), name + return nil, nil } -// For Ref/Reflink and .Site.GetPage do simple name lookups for the potentially ambiguous myarticle.md and /myarticle.md, -// but not when we get ./myarticle*, section/myarticle. -func shouldDoSimpleLookup(ref string) bool { - if ref[0] == '.' { - return false - } +func (c *pageFinder) getContentNodeForRef(context page.Page, isReflink, hadExtension bool, inRef, ref string) (contentNodeI, error) { + s := c.pageMap.s + contentPathParser := s.Conf.PathParser() - slashCount := strings.Count(ref, "/") + if context != nil && !strings.HasPrefix(ref, "/") { + // Try the page-relative path first. + // Branch pages: /mysection, "./mypage" => /mysection/mypage + // Regular pages: /mysection/mypage.md, Path=/mysection/mypage, "./someotherpage" => /mysection/mypage/../someotherpage + // Regular leaf bundles: /mysection/mypage/index.md, Path=/mysection/mypage, "./someotherpage" => /mysection/mypage/../someotherpage + // Given the above, for regular pages we use the containing folder. + var baseDir string + if pi := context.PathInfo(); pi != nil { + if pi.IsBranchBundle() || (hadExtension) { + baseDir = pi.Dir() + } else { + baseDir = pi.ContainerDir() + } + } - if slashCount > 1 { - return false - } + rel := path.Join(baseDir, inRef) - return slashCount == 0 || ref[0] == '/' -} + if !hadExtension && !paths.HasExt(rel) { + // See comment above. + rel += defaultContentExt + } -func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) { - ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref))) + relPath := contentPathParser.Parse(files.ComponentFolderContent, rel) - if ref == "" { - ref = "/" - } - - inRef := ref - navUp := strings.HasPrefix(ref, "..") - var doSimpleLookup bool - if isReflink || context == nil { - doSimpleLookup = shouldDoSimpleLookup(ref) - } + n, err := c.getContentNodeFromPath(relPath, ref) + if n != nil || err != nil { + return n, err + } - if context != nil && !strings.HasPrefix(ref, "/") { - // Try the page-relative path. - var base string - if context.File().IsZero() { - base = context.SectionsPath() - } else { - meta := context.File().FileInfo().Meta() - base = filepath.ToSlash(filepath.Dir(meta.Path)) - if meta.Classifier == files.ContentClassLeaf { - // Bundles are stored in subfolders e.g. blog/mybundle/index.md, - // so if the user has not explicitly asked to go up, - // look on the "blog" level. - if !navUp { - base = path.Dir(base) - } + if hadExtension && context.File() != nil { + if n, err := c.getContentNodeFromRefReverseLookup(inRef, context.File().FileInfo()); n != nil || err != nil { + return n, err } } - ref = path.Join("/", strings.ToLower(base), ref) + } - if !strings.HasPrefix(ref, "/") { - ref = "/" + ref + if strings.HasPrefix(ref, ".") { + // Page relative, no need to look further. + return nil, nil } - m := c.pageMap + refPath := contentPathParser.Parse(files.ComponentFolderContent, ref) - // It's either a section, a page in a section or a taxonomy node. - // Start with the most likely: - n, name := c.getSectionOrPage(ref) - if n != nil { - return n, nil - } + n, err := c.getContentNodeFromPath(refPath, ref) - if !strings.HasPrefix(inRef, "/") { - // Many people will have "post/foo.md" in their content files. - if n, _ := c.getSectionOrPage("/" + inRef); n != nil { - return n, nil - } + if n != nil || err != nil { + return n, err } - // Check if it's a taxonomy node - pref := helpers.AddTrailingSlash(ref) - s, v, found := m.taxonomies.LongestPrefix(pref) - - if found { - if !m.onSameLevel(pref, s) { - return nil, nil + if hadExtension && s.home != nil && s.home.File() != nil { + if n, err := c.getContentNodeFromRefReverseLookup(inRef, s.home.File().FileInfo()); n != nil || err != nil { + return n, err } - return v.(*contentNode), nil } - getByName := func(s string) (*contentNode, error) { - n := m.pageReverseIndex.Get(s) - if n != nil { - if n == ambiguousContentNode { - return nil, fmt.Errorf("page reference %q is ambiguous", ref) - } - return n, nil + var doSimpleLookup bool + if isReflink || context == nil { + slashCount := strings.Count(inRef, "/") + if slashCount <= 1 { + doSimpleLookup = slashCount == 0 || ref[0] == '/' } + } + if !doSimpleLookup { return nil, nil } - var module string - if context != nil && !context.File().IsZero() { - module = context.File().FileInfo().Meta().Module + n = c.pageMap.pageReverseIndex.Get(refPath.BaseNameNoIdentifier()) + if n == ambiguousContentNode { + return nil, fmt.Errorf("page reference %q is ambiguous", inRef) } - if module == "" && !c.pageMap.s.home.File().IsZero() { - module = c.pageMap.s.home.File().FileInfo().Meta().Module - } + return n, nil +} - if module != "" { - n, err := getByName(module + ref) - if err != nil { - return nil, err - } - if n != nil { - return n, nil - } +func (c *pageFinder) getContentNodeFromRefReverseLookup(ref string, fi hugofs.FileMetaInfo) (contentNodeI, error) { + s := c.pageMap.s + meta := fi.Meta() + dir := meta.Filename + if !fi.IsDir() { + dir = filepath.Dir(meta.Filename) } - if !doSimpleLookup { - return nil, nil + realFilename := filepath.Join(dir, ref) + + pcs, err := s.BaseFs.Content.ReverseLookup(realFilename) + if err != nil { + return nil, err } - // Ref/relref supports this potentially ambiguous lookup. - return getByName(path.Base(name)) + // There may be multiple matches, but we will only use the first one. + for _, pc := range pcs { + pi := s.Conf.PathParser().Parse(pc.Component, pc.Path) + if n := c.pageMap.treePages.Get(pi.Base()); n != nil { + return n, nil + } + } + return nil, nil } -func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { - var pages page.Pages - for _, p := range inPages { - if p.Kind() == kind { - pages = append(pages, p) - } +func (c *pageFinder) getContentNodeFromPath(refPath *paths.Path, ref string) (contentNodeI, error) { + s := refPath.Base() + + n := c.pageMap.treePages.Get(s) + if n != nil { + return n, nil } - return pages + + return nil, nil } diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go index b11fc9899..8fd4f0739 100644 --- a/hugolib/pagecollections_test.go +++ b/hugolib/pagecollections_test.go @@ -63,12 +63,12 @@ func BenchmarkGetPage(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - home, _ := s.getPageNew(nil, "/") + home, _ := s.getPage(nil, "/") if home == nil { b.Fatal("Home is nil") } - p, _ := s.getPageNew(nil, pagePaths[i]) + p, _ := s.getPage(nil, pagePaths[i]) if p == nil { b.Fatal("Section is nil") } @@ -107,7 +107,7 @@ func TestBenchmarkGetPageRegular(t *testing.T) { for i := 0; i < 10; i++ { pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i)) - page, _ := s.getPageNew(nil, pp) + page, _ := s.getPage(nil, pp) c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp)) } } @@ -127,7 +127,7 @@ func BenchmarkGetPageRegular(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - page, _ := s.getPageNew(nil, pagePaths[i]) + page, _ := s.getPage(nil, pagePaths[i]) c.Assert(page, qt.Not(qt.IsNil)) } }) @@ -147,7 +147,7 @@ func BenchmarkGetPageRegular(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - page, _ := s.getPageNew(pages[i], pagePaths[i]) + page, _ := s.getPage(pages[i], pagePaths[i]) c.Assert(page, qt.Not(qt.IsNil)) } }) @@ -226,7 +226,7 @@ func TestGetPage(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) - sec3, err := s.getPageNew(nil, "/sect3") + sec3, err := s.getPage(nil, "/sect3") c.Assert(err, qt.IsNil) c.Assert(sec3, qt.Not(qt.IsNil)) @@ -313,15 +313,36 @@ func TestGetPage(t *testing.T) { } } - // test new internal Site.getPageNew + // test new internal Site.getPage for _, ref := range test.pathVariants { - page2, err := s.getPageNew(test.context, ref) + page2, err := s.getPage(test.context, ref) test.check(page2, err, errorMsg, c) } }) } } +// #11664 +func TestGetPageIndexIndex(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +-- content/mysect/index/index.md -- +--- +title: "Mysect Index" +--- +-- layouts/index.html -- +GetPage 1: {{ with site.GetPage "mysect/index/index.md" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Path }}{{ end }}| +GetPage 2: {{ with site.GetPage "mysect/index" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Path }}{{ end }}| +` + + b := Test(t, files) + b.AssertFileContent("public/index.html", + "GetPage 1: Mysect Index|/mysect/index/|/mysect/index|", + "GetPage 2: Mysect Index|/mysect/index/|/mysect/index|", + ) +} + // https://github.com/gohugoio/hugo/issues/6034 func TestGetPageRelative(t *testing.T) { b := newTestSitesBuilder(t) @@ -348,6 +369,172 @@ NOT FOUND b.AssertFileContent("public/who/index.html", `NOT FOUND`) } +func TestGetPageIssue11883(t *testing.T) { + files := ` +-- hugo.toml -- +-- p1/index.md -- +--- +title: p1 +--- +-- p1/p1.xyz -- +xyz. +-- layouts/index.html -- +Home. {{ with .Page.GetPage "p1.xyz" }}{{ else }}OK 1{{ end }} {{ with .Site.GetPage "p1.xyz" }}{{ else }}OK 2{{ end }} +` + + b := Test(t, files) + b.AssertFileContent("public/index.html", "Home. OK 1 OK 2") +} + +func TestGetPageBundleToRegular(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/s1/p1/index.md -- +--- +title: p1 +--- +-- content/s1/p2.md -- +--- +title: p2 +--- +-- layouts/_default/single.html -- +{{ with .GetPage "p2" }} + OK: {{ .LinkTitle }} +{{ else }} + Unable to get p2. +{{ end }} +` + + b := Test(t, files) + b.AssertFileContent("public/s1/p1/index.html", "OK: p2") + b.AssertFileContent("public/s1/p2/index.html", "OK: p2") +} + +func TestPageGetPageVariations(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/s1/p1/index.md -- +--- +title: p1 +--- +-- content/s1/p2.md -- +--- +title: p2 +--- +-- content/s2/p3/index.md -- +--- +title: p3 +--- +-- content/p2.md -- +--- +title: p2_root +--- +-- layouts/index.html -- +/s1/p2.md: {{ with .GetPage "/s1/p2.md" }}{{ .Title }}{{ end }}| +/s1/p2: {{ with .GetPage "/s1/p2" }}{{ .Title }}{{ end }}| +/s1/p1/index.md: {{ with .GetPage "/s1/p1/index.md" }}{{ .Title }}{{ end }}| +/s1/p1: {{ with .GetPage "/s1/p1" }}{{ .Title }}{{ end }}| +-- layouts/_default/single.html -- +../p2: {{ with .GetPage "../p2" }}{{ .Title }}{{ end }}| +../p2.md: {{ with .GetPage "../p2.md" }}{{ .Title }}{{ end }}| +p1/index.md: {{ with .GetPage "p1/index.md" }}{{ .Title }}{{ end }}| +../s2/p3/index.md: {{ with .GetPage "../s2/p3/index.md" }}{{ .Title }}{{ end }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/index.html", ` +/s1/p2.md: p2| +/s1/p2: p2| +/s1/p1/index.md: p1| +/s1/p1: p1| +`) + + b.AssertFileContent("public/s1/p1/index.html", ` +../p2: p2_root| +../p2.md: p2| + +`) + + b.AssertFileContent("public/s1/p2/index.html", ` +../p2: p2_root| +../p2.md: p2_root| +p1/index.md: p1| +../s2/p3/index.md: p3| + +`) +} + +func TestPageGetPageMountsReverseLookup(t *testing.T) { + tempDir := t.TempDir() + + files := ` +-- README.md -- +--- +title: README +--- +-- blog/b1.md -- +--- +title: b1 +--- +{{< ref "../docs/d1.md" >}} +-- blog/b2/index.md -- +--- +title: b2 +--- +{{< ref "../../docs/d1.md" >}} +-- docs/d1.md -- +--- +title: d1 +--- +-- hugo.toml -- +baseURL = "https://example.com/" +[module] +[[module.mounts]] +source = "layouts" +target = "layouts" +[[module.mounts]] +source = "README.md" +target = "content/_index.md" +[[module.mounts]] +source = "blog" +target = "content/posts" +[[module.mounts]] +source = "docs" +target = "content/mydocs" +-- layouts/shortcodes/ref.html -- +{{ $ref := .Get 0 }} +.Page.GetPage({{ $ref }}).Title: {{ with .Page.GetPage $ref }}{{ .Title }}{{ end }}| +-- layouts/index.html -- +Home. +/blog/b1.md: {{ with .GetPage "/blog/b1.md" }}{{ .Title }}{{ end }}| +/blog/b2/index.md: {{ with .GetPage "/blog/b2/index.md" }}{{ .Title }}{{ end }}| +/docs/d1.md: {{ with .GetPage "/docs/d1.md" }}{{ .Title }}{{ end }}| +/README.md: {{ with .GetPage "/README.md" }}{{ .Title }}{{ end }}| +-- layouts/_default/single.html -- +Single. +/README.md: {{ with .GetPage "/README.md" }}{{ .Title }}{{ end }}| +{{ .Content }} + + +` + b := Test(t, files, TestOptWithConfig(func(cfg *IntegrationTestConfig) { cfg.WorkingDir = tempDir })) + + b.AssertFileContent("public/index.html", + ` +/blog/b1.md: b1| +/blog/b2/index.md: b2| +/docs/d1.md: d1| +/README.md: README +`, + ) + + b.AssertFileContent("public/mydocs/d1/index.html", `README.md: README|`) + + b.AssertFileContent("public/posts/b1/index.html", `.Page.GetPage(../docs/d1.md).Title: d1|`) + b.AssertFileContent("public/posts/b2/index.html", `.Page.GetPage(../../docs/d1.md).Title: d1|`) +} + // https://github.com/gohugoio/hugo/issues/7016 func TestGetPageMultilingual(t *testing.T) { b := newTestSitesBuilder(t) @@ -386,15 +573,6 @@ NOT FOUND b.AssertFileContent("public/en/index.html", `NOT FOUND`) } -func TestShouldDoSimpleLookup(t *testing.T) { - c := qt.New(t) - - c.Assert(shouldDoSimpleLookup("foo.md"), qt.Equals, true) - c.Assert(shouldDoSimpleLookup("/foo.md"), qt.Equals, true) - c.Assert(shouldDoSimpleLookup("./foo.md"), qt.Equals, false) - c.Assert(shouldDoSimpleLookup("docs/foo.md"), qt.Equals, false) -} - func TestRegularPagesRecursive(t *testing.T) { b := newTestSitesBuilder(t) @@ -449,5 +627,4 @@ RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .Kind }}:{{ .RelPerm }).Build() b.AssertFileContent("public/index.html", `RegularPagesRecursive: page:/p1/|page:/post/p2/||End.`) - } diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go index c57c707de..acdc674e6 100644 --- a/hugolib/pages_capture.go +++ b/hugolib/pages_capture.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2021 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,190 +15,188 @@ package hugolib import ( "context" + "errors" "fmt" - pth "path" + "os" "path/filepath" - "reflect" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/common/maps" - + "strings" + "sync" + "sync/atomic" + "time" + + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/common/rungroup" + "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/parser/pageparser" - - "github.com/gohugoio/hugo/hugofs/files" + "github.com/spf13/afero" "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/afero" -) - -const ( - walkIsRootFileMetaKey = "walkIsRootFileMetaKey" ) func newPagesCollector( + ctx context.Context, + h *HugoSites, sp *source.SourceSpec, - contentMap *pageMaps, logger loggers.Logger, - contentTracker *contentChangeMap, - proc pagesCollectorProcessorProvider, filenames ...string) *pagesCollector { + infoLogger logg.LevelLogger, + m *pageMap, + ids []pathChange, +) *pagesCollector { return &pagesCollector{ - fs: sp.SourceFs, - contentMap: contentMap, - proc: proc, + ctx: ctx, + h: h, + fs: sp.BaseFs.Content.Fs, + m: m, sp: sp, logger: logger, - filenames: filenames, - tracker: contentTracker, + infoLogger: infoLogger, + ids: ids, + seenDirs: make(map[string]bool), } } -type contentDirKey struct { - dirname string - filename string - tp bundleDirType -} - -type fileinfoBundle struct { - header hugofs.FileMetaInfo - resources []hugofs.FileMetaInfo -} - -func (b *fileinfoBundle) containsResource(name string) bool { - for _, r := range b.resources { - if r.Name() == name { - return true - } - } - - return false -} - -type pageBundles map[string]*fileinfoBundle - type pagesCollector struct { - sp *source.SourceSpec - fs afero.Fs - logger loggers.Logger + ctx context.Context + h *HugoSites + sp *source.SourceSpec + logger loggers.Logger + infoLogger logg.LevelLogger - contentMap *pageMaps + m *pageMap - // Ordered list (bundle headers first) used in partial builds. - filenames []string + fs afero.Fs - // Content files tracker used in partial builds. - tracker *contentChangeMap + // List of paths that have changed. Used in partial builds. + ids []pathChange + seenDirs map[string]bool - proc pagesCollectorProcessorProvider + g rungroup.Group[hugofs.FileMetaInfo] } -// isCascadingEdit returns whether the dir represents a cascading edit. -// That is, if a front matter cascade section is removed, added or edited. -// If this is the case we must re-evaluate its descendants. -func (c *pagesCollector) isCascadingEdit(dir contentDirKey) (bool, string) { - // This is either a section or a taxonomy node. Find it. - prefix := cleanTreeKey(dir.dirname) - - section := "/" - var isCascade bool - - c.contentMap.walkBranchesPrefix(prefix, func(s string, n *contentNode) bool { - if n.fi == nil || dir.filename != n.fi.Meta().Filename { - return false - } - - f, err := n.fi.Meta().Open() - if err != nil { - // File may have been removed, assume a cascading edit. - // Some false positives is not too bad. - isCascade = true - return true - } - - pf, err := pageparser.ParseFrontMatterAndContent(f) - f.Close() - if err != nil { - isCascade = true - return true - } - - if n.p == nil || n.p.bucket == nil { - return true - } +func (c *pagesCollector) copyFile(fim hugofs.FileMetaInfo) error { + meta := fim.Meta() + f, err := meta.Open() + if err != nil { + return fmt.Errorf("copyFile: failed to open: %w", err) + } - section = s + s := c.m.s - maps.PrepareParams(pf.FrontMatter) - cascade1, ok := pf.FrontMatter["cascade"] - hasCascade := n.p.bucket.cascade != nil && len(n.p.bucket.cascade) > 0 - if !ok { - isCascade = hasCascade + target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.PathInfo.Path()) - return true - } - - if !hasCascade { - isCascade = true - return true - } + defer f.Close() - for _, v := range n.p.bucket.cascade { - isCascade = !reflect.DeepEqual(cascade1, v) - if isCascade { - break - } - } + fs := s.PublishFsStatic - return true - }) + s.PathSpec.ProcessingStats.Incr(&s.PathSpec.ProcessingStats.Files) - return isCascade, section + return helpers.WriteToDisk(filepath.Clean(target), f, fs) } -// Collect. +// Collect collects content by walking the file system and storing +// it in the content tree. +// It may be restricted by filenames set on the collector (partial build). func (c *pagesCollector) Collect() (collectErr error) { - c.proc.Start(context.Background()) + var ( + numWorkers = c.h.numWorkers + numFilesProcessedTotal atomic.Uint64 + numFilesProcessedLast uint64 + fileBatchTimer = time.Now() + fileBatchTimerMu sync.Mutex + ) + + l := c.infoLogger.WithField("substep", "collect") + + logFilesProcessed := func(force bool) { + fileBatchTimerMu.Lock() + if force || time.Since(fileBatchTimer) > 3*time.Second { + numFilesProcessedBatch := numFilesProcessedTotal.Load() - numFilesProcessedLast + numFilesProcessedLast = numFilesProcessedTotal.Load() + loggers.TimeTrackf(l, fileBatchTimer, + logg.Fields{ + logg.Field{Name: "files", Value: numFilesProcessedBatch}, + logg.Field{Name: "files_total", Value: numFilesProcessedTotal.Load()}, + }, + "", + ) + fileBatchTimer = time.Now() + } + fileBatchTimerMu.Unlock() + } + defer func() { - err := c.proc.Wait() - if collectErr == nil { - collectErr = err - } + logFilesProcessed(true) }() - if len(c.filenames) == 0 { - // Collect everything. - collectErr = c.collectDir("", false, nil) - } else { - for _, pm := range c.contentMap.pmaps { - pm.cfg.isRebuild = true - } - dirs := make(map[contentDirKey]bool) - for _, filename := range c.filenames { - dir, btype := c.tracker.resolveAndRemove(filename) - dirs[contentDirKey{dir, filename, btype}] = true - } - - for dir := range dirs { - for _, pm := range c.contentMap.pmaps { - pm.s.ResourceSpec.DeleteBySubstring(dir.dirname) + c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{ + NumWorkers: numWorkers, + Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error { + if err := c.m.AddFi(fi); err != nil { + if errors.Is(err, pageparser.ErrPlainHTMLDocumentsNotSupported) { + // Reclassify this as a static file. + if err := c.copyFile(fi); err != nil { + return err + } + } else { + return hugofs.AddFileInfoToError(err, fi, c.fs) + } } + numFilesProcessedTotal.Add(1) + if numFilesProcessedTotal.Load()%1000 == 0 { + logFilesProcessed(false) + } + return nil + }, + }) - switch dir.tp { - case bundleLeaf: - collectErr = c.collectDir(dir.dirname, true, nil) - case bundleBranch: - isCascading, section := c.isCascadingEdit(dir) - - if isCascading { - c.contentMap.deleteSection(section) - } - collectErr = c.collectDir(dir.dirname, !isCascading, nil) - default: + if c.ids == nil { + // Collect everything. + collectErr = c.collectDir(nil, false, nil) + } else { + for _, s := range c.h.Sites { + s.pageMap.cfg.isRebuild = true + } + + for _, id := range c.ids { + if id.p.IsLeafBundle() { + collectErr = c.collectDir( + id.p, + false, + func(fim hugofs.FileMetaInfo) bool { + return true + }, + ) + } else if id.p.IsBranchBundle() { + collectErr = c.collectDir( + id.p, + false, + func(fim hugofs.FileMetaInfo) bool { + if fim.IsDir() { + return true + } + fimp := fim.Meta().PathInfo + if fimp == nil { + return false + } + + return strings.HasPrefix(fimp.Path(), paths.AddTrailingSlash(id.p.Dir())) + }, + ) + } else { // We always start from a directory. - collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool { - return dir.filename == fim.Meta().Filename + collectErr = c.collectDir(id.p, id.isDir, func(fim hugofs.FileMetaInfo) bool { + if id.delete || id.isDir { + if id.isDir { + return strings.HasPrefix(fim.Meta().PathInfo.Path(), paths.AddTrailingSlash(id.p.Path())) + } + + return id.p.Dir() == fim.Meta().PathInfo.Dir() + } + return id.p.Path() == fim.Meta().PathInfo.Path() }) } @@ -209,160 +207,51 @@ func (c *pagesCollector) Collect() (collectErr error) { } - return -} - -func (c *pagesCollector) isBundleHeader(fi hugofs.FileMetaInfo) bool { - class := fi.Meta().Classifier - return class == files.ContentClassLeaf || class == files.ContentClassBranch -} - -func (c *pagesCollector) getLang(fi hugofs.FileMetaInfo) string { - lang := fi.Meta().Lang - if lang != "" { - return lang - } - return c.sp.Cfg.DefaultContentLanguage() -} - -func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirType, bundles pageBundles) error { - getBundle := func(lang string) *fileinfoBundle { - return bundles[lang] + werr := c.g.Wait() + if collectErr == nil { + collectErr = werr } - cloneBundle := func(lang string) *fileinfoBundle { - // Every bundled content file needs a content file header. - // Use the default content language if found, else just - // pick one. - var ( - source *fileinfoBundle - found bool - ) - - source, found = bundles[c.sp.Cfg.DefaultContentLanguage()] - if !found { - for _, b := range bundles { - source = b - break - } - } - - if source == nil { - panic(fmt.Sprintf("no source found, %d", len(bundles))) - } - - clone := c.cloneFileInfo(source.header) - clone.Meta().Lang = lang - - return &fileinfoBundle{ - header: clone, - } - } + return +} - lang := c.getLang(info) - bundle := getBundle(lang) - isBundleHeader := c.isBundleHeader(info) - if bundle != nil && isBundleHeader { - // index.md file inside a bundle, see issue 6208. - info.Meta().Classifier = files.ContentClassContent - isBundleHeader = false - } - classifier := info.Meta().Classifier - isContent := classifier == files.ContentClassContent - if bundle == nil { - if isBundleHeader { - bundle = &fileinfoBundle{header: info} - bundles[lang] = bundle +func (c *pagesCollector) collectDir(dirPath *paths.Path, isDir bool, inFilter func(fim hugofs.FileMetaInfo) bool) error { + var dpath string + if dirPath != nil { + if isDir { + dpath = filepath.FromSlash(dirPath.Path()) } else { - if btyp == bundleBranch { - // No special logic for branch bundles. - // Every language needs its own _index.md file. - // Also, we only clone bundle headers for lonesome, bundled, - // content files. - return c.handleFiles(info) - } - - if isContent { - bundle = cloneBundle(lang) - bundles[lang] = bundle - } + dpath = filepath.FromSlash(dirPath.Dir()) } } - if !isBundleHeader && bundle != nil { - bundle.resources = append(bundle.resources, info) - } - - if classifier == files.ContentClassFile { - translations := info.Meta().Translations - - for lang, b := range bundles { - if !stringSliceContains(lang, translations...) && !b.containsResource(info.Name()) { - - // Clone and add it to the bundle. - clone := c.cloneFileInfo(info) - clone.Meta().Lang = lang - b.resources = append(b.resources, clone) - } - } + if c.seenDirs[dpath] { + return nil } + c.seenDirs[dpath] = true - return nil -} - -func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaInfo { - return hugofs.NewFileMetaInfo(fi, hugofs.NewFileMeta()) -} - -func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error { - fi, err := c.fs.Stat(dirname) + root, err := c.fs.Stat(dpath) if err != nil { - if herrors.IsNotExist(err) { - // May have been deleted. + if os.IsNotExist(err) { return nil } return err } - handleDir := func( - btype bundleDirType, - dir hugofs.FileMetaInfo, - path string, - readdir []hugofs.FileMetaInfo) error { - if btype > bundleNot && c.tracker != nil { - c.tracker.add(path, btype) - } - - if btype == bundleBranch { - if err := c.handleBundleBranch(readdir); err != nil { - return err - } - // A branch bundle is only this directory level, so keep walking. - return nil - } else if btype == bundleLeaf { - if err := c.handleBundleLeaf(dir, path, readdir); err != nil { - return err - } - - return nil - } - - if err := c.handleFiles(readdir...); err != nil { - return err - } + rootm := root.(hugofs.FileMetaInfo) - return nil + if err := c.collectDirDir(dpath, rootm, inFilter); err != nil { + return err } - filter := func(fim hugofs.FileMetaInfo) bool { - if fim.Meta().SkipDir { - return false - } + return nil +} +func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, inFilter func(fim hugofs.FileMetaInfo) bool) error { + filter := func(fim hugofs.FileMetaInfo) bool { if c.sp.IgnoreFile(fim.Meta().Filename) { return false } - if inFilter != nil { return inFilter(fim) } @@ -370,83 +259,63 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func( } preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) { - var btype bundleDirType - filtered := readdir[:0] for _, fi := range readdir { if filter(fi) { filtered = append(filtered, fi) - - if c.tracker != nil { - // Track symlinks. - c.tracker.addSymbolicLinkMapping(fi) - } } } - walkRoot := dir.Meta().IsRootFile readdir = filtered + if len(readdir) == 0 { + return nil, nil + } - // We merge language directories, so there can be duplicates, but they - // will be ordered, most important first. - var duplicates []int - seen := make(map[string]bool) - - for i, fi := range readdir { - + // Pick the first regular file. + var first hugofs.FileMetaInfo + for _, fi := range readdir { if fi.IsDir() { continue } + first = fi + break + } - meta := fi.Meta() - meta.IsRootFile = walkRoot - class := meta.Classifier - translationBase := meta.TranslationBaseNameWithExt - key := pth.Join(meta.Lang, translationBase) - - if seen[key] { - duplicates = append(duplicates, i) - continue - } - seen[key] = true + if first == nil { + // Only dirs, keep walking. + return readdir, nil + } - var thisBtype bundleDirType + // Any bundle file will always be first. + firstPi := first.Meta().PathInfo + if firstPi == nil { + panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename)) + } - switch class { - case files.ContentClassLeaf: - thisBtype = bundleLeaf - case files.ContentClassBranch: - thisBtype = bundleBranch + if firstPi.IsLeafBundle() { + if err := c.handleBundleLeaf(dir, first, path, readdir); err != nil { + return nil, err } + return nil, filepath.SkipDir + } - // Folders with both index.md and _index.md type of files have - // undefined behaviour and can never work. - // The branch variant will win because of sort order, but log - // a warning about it. - if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype { - c.logger.Warnf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename) - // Reclassify it so it will be handled as a content file inside the - // section, which is in line with the <= 0.55 behaviour. - meta.Classifier = files.ContentClassContent - } else if thisBtype > bundleNot { - btype = thisBtype + for _, fi := range readdir { + if fi.IsDir() { + continue } - } - - if len(duplicates) > 0 { - for i := len(duplicates) - 1; i >= 0; i-- { - idx := duplicates[i] - readdir = append(readdir[:idx], readdir[idx+1:]...) + meta := fi.Meta() + pi := meta.PathInfo + if pi == nil { + panic(fmt.Sprintf("no path info for %q", meta.Filename)) } - } - err := handleDir(btype, dir, path, readdir) - if err != nil { - return nil, err - } + if meta.Lang == "" { + panic("lang not set") + } - if btype == bundleLeaf || partial { - return nil, filepath.SkipDir + if err := c.g.Enqueue(fi); err != nil { + return nil, err + } } // Keep walking. @@ -454,126 +323,56 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func( } var postHook hugofs.WalkHook - if c.tracker != nil { - postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) { - if c.tracker == nil { - // Nothing to do. - return readdir, nil - } - - return readdir, nil - } - } - - wfn := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } + wfn := func(path string, fi hugofs.FileMetaInfo) error { return nil } - fim := fi.(hugofs.FileMetaInfo) - // Make sure the pages in this directory gets re-rendered, - // even in fast render mode. - fim.Meta().IsRootFile = true - - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Fs: c.fs, - Logger: c.logger, - Root: dirname, - Info: fim, - HookPre: preHook, - HookPost: postHook, - WalkFn: wfn, - }) + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Logger: c.logger, + Root: path, + Info: root, + Fs: c.fs, + HookPre: preHook, + HookPost: postHook, + WalkFn: wfn, + }) return w.Walk() } -func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error { - // Maps bundles to its language. - bundles := pageBundles{} - - var contentFiles []hugofs.FileMetaInfo - - for _, fim := range readdir { - - if fim.IsDir() { - continue +func (c *pagesCollector) handleBundleLeaf(dir, bundle hugofs.FileMetaInfo, inPath string, readdir []hugofs.FileMetaInfo) error { + bundlePi := bundle.Meta().PathInfo + walk := func(path string, info hugofs.FileMetaInfo) error { + if info.IsDir() { + return nil } - meta := fim.Meta() + pi := info.Meta().PathInfo - switch meta.Classifier { - case files.ContentClassContent: - contentFiles = append(contentFiles, fim) - default: - if err := c.addToBundle(fim, bundleBranch, bundles); err != nil { - return err + if info != bundle { + // Everything inside a leaf bundle is a Resource, + // even the content pages. + // Note that we do allow index.md as page resources, but not in the bundle root. + if !pi.IsLeafBundle() || pi.Dir() != bundlePi.Dir() { + paths.ModifyPathBundleTypeResource(pi) } } - } - - // Make sure the section is created before its pages. - if err := c.proc.Process(bundles); err != nil { - return err - } - - return c.handleFiles(contentFiles...) -} - -func (c *pagesCollector) handleBundleLeaf(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) error { - // Maps bundles to its language. - bundles := pageBundles{} - - walk := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } - if info.IsDir() { - return nil - } - - return c.addToBundle(info, bundleLeaf, bundles) + return c.g.Enqueue(info) } // Start a new walker from the given path. - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Root: path, - Fs: c.fs, - Logger: c.logger, - Info: dir, - DirEntries: readdir, - WalkFn: walk, - }) - - if err := w.Walk(); err != nil { - return err - } - - return c.proc.Process(bundles) -} - -func (c *pagesCollector) handleFiles(fis ...hugofs.FileMetaInfo) error { - for _, fi := range fis { - if fi.IsDir() { - continue - } + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Root: inPath, + Fs: c.fs, + Logger: c.logger, + Info: dir, + DirEntries: readdir, + WalkFn: walk, + }) - if err := c.proc.Process(fi); err != nil { - return err - } - } - return nil -} - -func stringSliceContains(k string, values ...string) bool { - for _, v := range values { - if k == v { - return true - } - } - return false + return w.Walk() } diff --git a/hugolib/pages_capture_test.go b/hugolib/pages_capture_test.go deleted file mode 100644 index c771d30ee..000000000 --- a/hugolib/pages_capture_test.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "context" - "fmt" - "path/filepath" - "testing" - - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/source" - "github.com/spf13/afero" -) - -func TestPagesCapture(t *testing.T) { - - c := qt.New(t) - - afs := afero.NewMemMapFs() - - writeFile := func(filename string) { - c.Assert(afero.WriteFile(afs, filepath.Join("content", filepath.FromSlash(filename)), []byte(fmt.Sprintf("content-%s", filename)), 0755), qt.IsNil) - } - - writeFile("_index.md") - writeFile("logo.png") - writeFile("root.md") - writeFile("blog/index.md") - writeFile("blog/hello.md") - writeFile("blog/images/sunset.png") - writeFile("pages/page1.md") - writeFile("pages/page2.md") - - cfg := config.New() - d := testconfig.GetTestDeps(afs, cfg) - sourceSpec := source.NewSourceSpec(d.PathSpec, nil, d.BaseFs.Content.Fs) - - t.Run("Collect", func(t *testing.T) { - c := qt.New(t) - proc := &testPagesCollectorProcessor{} - coll := newPagesCollector(sourceSpec, nil, loggers.NewDefault(), nil, proc) - c.Assert(coll.Collect(), qt.IsNil) - // 2 bundles, 3 pages. - c.Assert(len(proc.items), qt.Equals, 5) - }) - -} - -type testPagesCollectorProcessor struct { - items []any - waitErr error -} - -func (proc *testPagesCollectorProcessor) Process(item any) error { - proc.items = append(proc.items, item) - return nil -} - -func (proc *testPagesCollectorProcessor) Start(ctx context.Context) context.Context { - return ctx -} - -func (proc *testPagesCollectorProcessor) Wait() error { return proc.waitErr } diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go index 55241d306..8a5d6c184 100644 --- a/hugolib/pages_language_merge_test.go +++ b/hugolib/pages_language_merge_test.go @@ -70,8 +70,8 @@ func TestMergeLanguages(t *testing.T) { c.Assert(len(firstNN.Sites()), qt.Equals, 4) c.Assert(firstNN.Sites().First().Language().Lang, qt.Equals, "en") - nnBundle := nnSite.getPage("page", "bundle") - enBundle := enSite.getPage("page", "bundle") + nnBundle := nnSite.getPageOldVersion("page", "bundle") + enBundle := enSite.getPageOldVersion("page", "bundle") c.Assert(len(enBundle.Resources()), qt.Equals, 6) c.Assert(len(nnBundle.Resources()), qt.Equals, 2) diff --git a/hugolib/pages_process.go b/hugolib/pages_process.go deleted file mode 100644 index b0c04244b..000000000 --- a/hugolib/pages_process.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "context" - "fmt" - "path/filepath" - - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/source" - - "github.com/gohugoio/hugo/hugofs/files" - "golang.org/x/sync/errgroup" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/hugofs" -) - -func newPagesProcessor(h *HugoSites, sp *source.SourceSpec) *pagesProcessor { - procs := make(map[string]pagesCollectorProcessorProvider) - for _, s := range h.Sites { - procs[s.Lang()] = &sitePagesProcessor{ - m: s.pageMap, - errorSender: s.h, - itemChan: make(chan interface{}, config.GetNumWorkerMultiplier()*2), - } - } - return &pagesProcessor{ - procs: procs, - } -} - -type pagesCollectorProcessorProvider interface { - Process(item any) error - Start(ctx context.Context) context.Context - Wait() error -} - -type pagesProcessor struct { - // Per language/Site - procs map[string]pagesCollectorProcessorProvider -} - -func (proc *pagesProcessor) Process(item any) error { - switch v := item.(type) { - // Page bundles mapped to their language. - case pageBundles: - for _, vv := range v { - proc.getProcFromFi(vv.header).Process(vv) - } - case hugofs.FileMetaInfo: - proc.getProcFromFi(v).Process(v) - default: - panic(fmt.Sprintf("unrecognized item type in Process: %T", item)) - - } - - return nil -} - -func (proc *pagesProcessor) Start(ctx context.Context) context.Context { - for _, p := range proc.procs { - ctx = p.Start(ctx) - } - return ctx -} - -func (proc *pagesProcessor) Wait() error { - var err error - for _, p := range proc.procs { - if e := p.Wait(); e != nil { - err = e - } - } - return err -} - -func (proc *pagesProcessor) getProcFromFi(fi hugofs.FileMetaInfo) pagesCollectorProcessorProvider { - if p, found := proc.procs[fi.Meta().Lang]; found { - return p - } - return defaultPageProcessor -} - -type nopPageProcessor int - -func (nopPageProcessor) Process(item any) error { - return nil -} - -func (nopPageProcessor) Start(ctx context.Context) context.Context { - return context.Background() -} - -func (nopPageProcessor) Wait() error { - return nil -} - -var defaultPageProcessor = new(nopPageProcessor) - -type sitePagesProcessor struct { - m *pageMap - errorSender herrors.ErrorSender - - ctx context.Context - itemChan chan any - itemGroup *errgroup.Group -} - -func (p *sitePagesProcessor) Process(item any) error { - select { - case <-p.ctx.Done(): - return nil - default: - p.itemChan <- item - } - return nil -} - -func (p *sitePagesProcessor) Start(ctx context.Context) context.Context { - p.itemGroup, ctx = errgroup.WithContext(ctx) - p.ctx = ctx - p.itemGroup.Go(func() error { - for item := range p.itemChan { - if err := p.doProcess(item); err != nil { - return err - } - } - return nil - }) - return ctx -} - -func (p *sitePagesProcessor) Wait() error { - close(p.itemChan) - return p.itemGroup.Wait() -} - -func (p *sitePagesProcessor) copyFile(fim hugofs.FileMetaInfo) error { - meta := fim.Meta() - f, err := meta.Open() - if err != nil { - return fmt.Errorf("copyFile: failed to open: %w", err) - } - - s := p.m.s - - target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path) - - defer f.Close() - - fs := s.PublishFsStatic - - return s.publish(&s.PathSpec.ProcessingStats.Files, target, f, fs) -} - -func (p *sitePagesProcessor) doProcess(item any) error { - m := p.m - switch v := item.(type) { - case *fileinfoBundle: - if err := m.AddFilesBundle(v.header, v.resources...); err != nil { - return err - } - case hugofs.FileMetaInfo: - if p.shouldSkip(v) { - return nil - } - meta := v.Meta() - - classifier := meta.Classifier - switch classifier { - case files.ContentClassContent: - if err := m.AddFilesBundle(v); err != nil { - return err - } - case files.ContentClassFile: - if err := p.copyFile(v); err != nil { - return err - } - default: - panic(fmt.Sprintf("invalid classifier: %q", classifier)) - } - default: - panic(fmt.Sprintf("unrecognized item type in Process: %T", item)) - } - return nil -} - -func (p *sitePagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool { - return p.m.s.conf.IsLangDisabled(fim.Meta().Lang) -} diff --git a/hugolib/paths/paths.go b/hugolib/paths/paths.go index 83d5921e0..397dba3f8 100644 --- a/hugolib/paths/paths.go +++ b/hugolib/paths/paths.go @@ -87,12 +87,13 @@ func (p *Paths) AllModules() modules.Modules { } // GetBasePath returns any path element in baseURL if needed. +// The path returned will have a leading, but no trailing slash. func (p *Paths) GetBasePath(isRelativeURL bool) string { if isRelativeURL && p.Cfg.CanonifyURLs() { // The baseURL will be prepended later. return "" } - return p.Cfg.BaseURL().BasePath + return p.Cfg.BaseURL().BasePathNoTrailingSlash } func (p *Paths) Lang() string { diff --git a/hugolib/rebuild_test.go b/hugolib/rebuild_test.go new file mode 100644 index 000000000..d3ac5665d --- /dev/null +++ b/hugolib/rebuild_test.go @@ -0,0 +1,1256 @@ +package hugolib + +import ( + "fmt" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/fortytw2/leaktest" + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/resources/resource_transformers/tocss/dartsass" + "github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss" +) + +const rebuildFilesSimple = ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +[outputs] +home = ["html"] +section = ["html"] +page = ["html"] +-- content/mysection/_index.md -- +--- +title: "My Section" +--- +-- content/mysection/mysectionbundle/index.md -- +--- +title: "My Section Bundle" +--- +My Section Bundle Content. +-- content/mysection/mysectionbundle/mysectionbundletext.txt -- +My Section Bundle Text 2 Content. +-- content/mysection/mysectionbundle/mysectionbundlecontent.md -- +--- +title: "My Section Bundle Content" +--- +My Section Bundle Content. +-- content/mysection/_index.md -- +--- +title: "My Section" +--- +-- content/mysection/mysectiontext.txt -- +-- content/_index.md -- +--- +title: "Home" +--- +Home Content. +-- content/hometext.txt -- +Home Text Content. +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}$ +Resources: {{ range $i, $e := .Resources }}{{ $i }}:{{ .RelPermalink }}|{{ .Content }}|{{ end }}$ +Len Resources: {{ len .Resources }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}$ +Len Resources: {{ len .Resources }}| +Resources: {{ range $i, $e := .Resources }}{{ $i }}:{{ .RelPermalink }}|{{ .Content }}|{{ end }}$ +-- layouts/shortcodes/foo.html -- +Foo. + +` + +func TestRebuildEditTextFileInLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/mysectionbundle/index.html", + "Resources: 0:/mysection/mysectionbundle/mysectionbundletext.txt|My Section Bundle Text 2 Content.|1:|<p>My Section Bundle Content.</p>\n|$") + + b.EditFileReplaceAll("content/mysection/mysectionbundle/mysectionbundletext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/mysection/mysectionbundle/index.html", + "Text 2 Content Edited") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditTextFileInHomeBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/index.html", "Home Content.") + b.AssertFileContent("public/index.html", "Home Text Content.") + + b.EditFileReplaceAll("content/hometext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/index.html", "Home Content.") + b.AssertFileContent("public/index.html", "Home Text Content Edited.") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditTextFileInBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/index.html", "My Section") + + b.EditFileReplaceAll("content/mysection/mysectiontext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/mysection/index.html", "My Section") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildRenameTextFileInLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/mysectionbundle/index.html", "My Section Bundle Text 2 Content.") + + b.RenameFile("content/mysection/mysectionbundle/mysectionbundletext.txt", "content/mysection/mysectionbundle/mysectionbundletext2.txt").Build() + b.AssertFileContent("public/mysection/mysectionbundle/index.html", "mysectionbundletext2", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(3) + b.AssertRenderCountContent(3) +} + +func TestRebuildRenameTextFileInBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/index.html", "My Section") + + b.RenameFile("content/mysection/mysectiontext.txt", "content/mysection/mysectiontext2.txt").Build() + b.AssertFileContent("public/mysection/index.html", "mysectiontext2", "My Section") + b.AssertRenderCountPage(2) + b.AssertRenderCountContent(2) +} + +func TestRebuildRenameTextFileInHomeBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/index.html", "Home Text Content.") + + b.RenameFile("content/hometext.txt", "content/hometext2.txt").Build() + b.AssertFileContent("public/index.html", "hometext2", "Home Text Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuildRenameDirectoryWithLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.RenameDir("content/mysection/mysectionbundle", "content/mysection/mysectionbundlerenamed").Build() + b.AssertFileContent("public/mysection/mysectionbundlerenamed/index.html", "My Section Bundle") + b.AssertRenderCountPage(1) +} + +func TestRebuildRenameDirectoryWithBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.RenameDir("content/mysection", "content/mysectionrenamed").Build() + b.AssertFileContent("public/mysectionrenamed/index.html", "My Section") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/index.html", "My Section Bundle") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/mysectionbundletext.txt", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuildRenameDirectoryWithRegularPageUsedInHome(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +-- content/foo/p1.md -- +--- +title: "P1" +--- +-- layouts/index.html -- +Pages: {{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Pages: /foo/p1/|$") + + b.RenameDir("content/foo", "content/bar").Build() + + b.AssertFileContent("public/index.html", "Pages: /bar/p1/|$") +} + +func TestRebuildAddRegularFileRegularPageUsedInHomeMultilingual(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +[languages.fr] +weight = 3 +[languages.a] +weight = 4 +[languages.b] +weight = 5 +[languages.c] +weight = 6 +[languages.d] +weight = 7 +[languages.e] +weight = 8 +[languages.f] +weight = 9 +[languages.g] +weight = 10 +[languages.h] +weight = 11 +[languages.i] +weight = 12 +[languages.j] +weight = 13 +-- content/foo/_index.md -- +-- content/foo/data.txt -- +-- content/foo/p1.md -- +-- content/foo/p1.nn.md -- +-- content/foo/p1.fr.md -- +-- content/foo/p1.a.md -- +-- content/foo/p1.b.md -- +-- content/foo/p1.c.md -- +-- content/foo/p1.d.md -- +-- content/foo/p1.e.md -- +-- content/foo/p1.f.md -- +-- content/foo/p1.g.md -- +-- content/foo/p1.h.md -- +-- content/foo/p1.i.md -- +-- content/foo/p1.j.md -- +-- layouts/index.html -- +RegularPages: {{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "RegularPages: /foo/p1/|$") + b.AssertFileContent("public/nn/index.html", "RegularPages: /nn/foo/p1/|$") + b.AssertFileContent("public/i/index.html", "RegularPages: /i/foo/p1/|$") + + b.AddFiles("content/foo/p2.md", ``).Build() + + b.AssertFileContent("public/index.html", "RegularPages: /foo/p1/|/foo/p2/|$") + b.AssertFileContent("public/fr/index.html", "RegularPages: /fr/foo/p1/|$") + + b.AddFiles("content/foo/p2.fr.md", ``).Build() + b.AssertFileContent("public/fr/index.html", "RegularPages: /fr/foo/p1/|/fr/foo/p2/|$") + + b.AddFiles("content/foo/p2.i.md", ``).Build() + b.AssertFileContent("public/i/index.html", "RegularPages: /i/foo/p1/|/i/foo/p2/|$") +} + +func TestRebuildRenameDirectoryWithBranchBundleFastRender(t *testing.T) { + recentlyVisited := types.NewEvictingStringQueue(10).Add("/a/b/c/") + b := TestRunning(t, rebuildFilesSimple, func(cfg *IntegrationTestConfig) { cfg.BuildCfg = BuildCfg{RecentlyVisited: recentlyVisited} }) + b.RenameDir("content/mysection", "content/mysectionrenamed").Build() + b.AssertFileContent("public/mysectionrenamed/index.html", "My Section") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/index.html", "My Section Bundle") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/mysectionbundletext.txt", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuilErrorRecovery(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + _, err := b.EditFileReplaceAll("content/mysection/mysectionbundle/index.md", "My Section Bundle Content.", "My Section Bundle Content\n\n\n\n{{< foo }}.").BuildE() + + b.Assert(err, qt.Not(qt.IsNil)) + b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`"/content/mysection/mysectionbundle/index.md:8:9": unrecognized character`)) + + // Fix the error + b.EditFileReplaceAll("content/mysection/mysectionbundle/index.md", "{{< foo }}", "{{< foo >}}").Build() +} + +func TestRebuildScopedToOutputFormat(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +outputs: ["html", "json"] +--- +P1 Content. + +{{< myshort >}} +-- layouts/_default/single.html -- +Single HTML: {{ .Title }}|{{ .Content }}| +-- layouts/_default/single.json -- +Single JSON: {{ .Title }}|{{ .Content }}| +-- layouts/shortcodes/myshort.html -- +My short. +` + b := Test(t, files, TestOptRunning()) + b.AssertRenderCountPage(3) + b.AssertRenderCountContent(1) + b.AssertFileContent("public/p1/index.html", "Single HTML: P1|<p>P1 Content.</p>\n") + b.AssertFileContent("public/p1/index.json", "Single JSON: P1|<p>P1 Content.</p>\n") + b.EditFileReplaceAll("layouts/_default/single.html", "Single HTML", "Single HTML Edited").Build() + b.AssertFileContent("public/p1/index.html", "Single HTML Edited: P1|<p>P1 Content.</p>\n") + b.AssertRenderCountPage(1) + + // Edit shortcode. Note that this is reused across all output formats. + b.EditFileReplaceAll("layouts/shortcodes/myshort.html", "My short", "My short edited").Build() + b.AssertFileContent("public/p1/index.html", "My short edited") + b.AssertFileContent("public/p1/index.json", "My short edited") + b.AssertRenderCountPage(3) // rss (uses .Content) + 2 single pages. +} + +func TestRebuildBaseof(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +title = "Hugo Site" +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/_default/baseof.html -- +Baseof: {{ .Title }}| +{{ block "main" . }}default{{ end }} +-- layouts/index.html -- +{{ define "main" }} +Home: {{ .Title }}|{{ .Content }}| +{{ end }} +` + b := Test(t, files, TestOptRunning()) + b.AssertFileContent("public/index.html", "Baseof: Hugo Site|", "Home: Hugo Site||") + b.EditFileReplaceFunc("layouts/_default/baseof.html", func(s string) string { + return strings.Replace(s, "Baseof", "Baseof Edited", 1) + }).Build() + b.AssertFileContent("public/index.html", "Baseof Edited: Hugo Site|", "Home: Hugo Site||") +} + +func TestRebuildSingleWithBaseof(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +title = "Hugo Site" +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +--- +P1 Content. +-- layouts/_default/baseof.html -- +Baseof: {{ .Title }}| +{{ block "main" . }}default{{ end }} +-- layouts/index.html -- +Home. +-- layouts/_default/single.html -- +{{ define "main" }} +Single: {{ .Title }}|{{ .Content }}| +{{ end }} +` + b := Test(t, files, TestOptRunning()) + b.AssertFileContent("public/p1/index.html", "Baseof: P1|\n\nSingle: P1|<p>P1 Content.</p>\n|") + b.EditFileReplaceFunc("layouts/_default/single.html", func(s string) string { + return strings.Replace(s, "Single", "Single Edited", 1) + }).Build() + b.AssertFileContent("public/p1/index.html", "Baseof: P1|\n\nSingle Edited: P1|<p>P1 Content.</p>\n|") +} + +func TestRebuildFromString(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +layout: "l1" +--- +P1 Content. +-- content/p2.md -- +--- +title: "P2" +layout: "l2" +--- +P2 Content. +-- assets/mytext.txt -- +My Text +-- layouts/_default/l1.html -- +{{ $r := partial "get-resource.html" . }} +L1: {{ .Title }}|{{ .Content }}|R: {{ $r.Content }}| +-- layouts/_default/l2.html -- +L2. +-- layouts/partials/get-resource.html -- +{{ $mytext := resources.Get "mytext.txt" }} +{{ $txt := printf "Text: %s" $mytext.Content }} +{{ $r := resources.FromString "r.txt" $txt }} +{{ return $r }} + +` + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "L1: P1|<p>P1 Content.</p>\n|R: Text: My Text|") + + b.EditFileReplaceAll("assets/mytext.txt", "My Text", "My Text Edited").Build() + + b.AssertFileContent("public/p1/index.html", "L1: P1|<p>P1 Content.</p>\n|R: Text: My Text Edited|") + + b.AssertRenderCountPage(1) +} + +func TestRebuildDeeplyNestedLink(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/s/p1.md -- +--- +title: "P1" +--- +-- content/s/p2.md -- +--- +title: "P2" +--- +-- content/s/p3.md -- +--- +title: "P3" +--- +-- content/s/p4.md -- +--- +title: "P4" +--- +-- content/s/p5.md -- +--- +title: "P5" +--- +-- content/s/p6.md -- +--- +title: "P6" +--- +-- content/s/p7.md -- +--- +title: "P7" +--- +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +Single. +-- layouts/_default/single.html -- +Next: {{ with .PrevInSection }}{{ .Title }}{{ end }}| +Prev: {{ with .NextInSection }}{{ .Title }}{{ end }}| + + +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/s/p1/index.html", "Next: P2|") + b.EditFileReplaceAll("content/s/p7.md", "P7", "P7 Edited").Build() + b.AssertFileContent("public/s/p6/index.html", "Next: P7 Edited|") +} + +func TestRebuildVariations(t *testing.T) { + // t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4 + // This leaktest seems to be a little bit shaky on Travis. + if !htesting.IsCI() { + defer leaktest.CheckTimeout(t, 10*time.Second)() + } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disdableKinds = ["term", "taxonomy"] +disableLiveReload = true +defaultContentLanguage = "nn" +paginate = 20 +[security] +enableInlineShortcodes = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/mysect/p1/index.md -- +--- +title: "P1" +--- +P1 Content. +{{< include "mysect/p2" >}} +§§§go { page="mysect/p3" } +hello +§§§ + +{{< foo.inline >}}Foo{{< /foo.inline >}} +-- content/mysect/p2/index.md -- +--- +title: "P2" +--- +P2 Content. +-- content/mysect/p3/index.md -- +--- +title: "P3" +--- +P3 Content. +-- content/mysect/sub/_index.md -- +-- content/mysect/sub/p4/index.md -- +--- +title: "P4" +--- +P4 Content. +-- content/mysect/sub/p5/index.md -- +--- +title: "P5" +lastMod: 2019-03-02 +--- +P5 Content. +-- content/myothersect/_index.md -- +--- +cascade: +- _target: + cascadeparam: "cascadevalue" +--- +-- content/myothersect/sub/_index.md -- +-- content/myothersect/sub/p6/index.md -- +--- +title: "P6" +--- +P6 Content. +-- content/translations/p7.en.md -- +--- +title: "P7 EN" +--- +P7 EN Content. +-- content/translations/p7.nn.md -- +--- +title: "P7 NN" +--- +P7 NN Content. +-- layouts/index.html -- +Home: {{ .Title }}|{{ .Content }}| +RegularPages: {{ range .RegularPages }}{{ .RelPermalink }}|{{ end }}$ +Len RegularPagesRecursive: {{ len .RegularPagesRecursive }} +Site.Lastmod: {{ .Site.Lastmod.Format "2006-01-02" }}| +Paginate: {{ range (.Paginate .Site.RegularPages).Pages }}{{ .RelPermalink }}|{{ .Title }}|{{ end }}$ +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +Single Partial Cached: {{ partialCached "pcached" . }}| +Page.Lastmod: {{ .Lastmod.Format "2006-01-02" }}| +Cascade param: {{ .Params.cascadeparam }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +RegularPages: {{ range .RegularPages }}{{ .Title }}|{{ end }}$ +Len RegularPagesRecursive: {{ len .RegularPagesRecursive }} +RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .RelPermalink }}|{{ end }}$ +List Partial P1: {{ partial "p1" . }}| +Page.Lastmod: {{ .Lastmod.Format "2006-01-02" }}| +Cascade param: {{ .Params.cascadeparam }}| +-- layouts/partials/p1.html -- +Partial P1. +-- layouts/partials/pcached.html -- +Partial Pcached. +-- layouts/shortcodes/include.html -- +{{ $p := site.GetPage (.Get 0)}} +{{ with $p }} +Shortcode Include: {{ .Title }}| +{{ end }} +Shortcode .Page.Title: {{ .Page.Title }}| +Shortcode Partial P1: {{ partial "p1" . }}| +-- layouts/_default/_markup/render-codeblock.html -- +{{ $p := site.GetPage (.Attributes.page)}} +{{ with $p }} +Codeblock Include: {{ .Title }}| +{{ end }} + + + +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + BuildCfg: BuildCfg{ + testCounters: &buildCounters{}, + }, + // Verbose: true, + // LogLevel: logg.LevelTrace, + }, + ).Build() + + // When running the server, this is done on shutdown. + // Do this here to satisfy the leak detector above. + defer func() { + b.Assert(b.H.Close(), qt.IsNil) + }() + + contentRenderCount := b.counters.contentRenderCounter.Load() + pageRenderCount := b.counters.pageRenderCounter.Load() + + b.Assert(contentRenderCount > 0, qt.IsTrue) + b.Assert(pageRenderCount > 0, qt.IsTrue) + + // Test cases: + // - Edit content file direct + // - Edit content file transitive shortcode + // - Edit content file transitive render hook + // - Rename one languge version of a content file + // - Delete content file, check site.RegularPages and section.RegularPagesRecursive (length) + // - Add content file (see above). + // - Edit shortcode + // - Edit inline shortcode + // - Edit render hook + // - Edit partial used in template + // - Edit partial used in shortcode + // - Edit partial cached. + // - Edit lastMod date in content file, check site.Lastmod. + editFile := func(filename string, replacementFunc func(s string) string) { + b.EditFileReplaceFunc(filename, replacementFunc).Build() + b.Assert(b.counters.contentRenderCounter.Load() < contentRenderCount, qt.IsTrue, qt.Commentf("count %d < %d", b.counters.contentRenderCounter.Load(), contentRenderCount)) + b.Assert(b.counters.pageRenderCounter.Load() < pageRenderCount, qt.IsTrue, qt.Commentf("count %d < %d", b.counters.pageRenderCounter.Load(), pageRenderCount)) + } + + b.AssertFileContent("public/index.html", "RegularPages: $", "Len RegularPagesRecursive: 7", "Site.Lastmod: 2019-03-02") + + b.AssertFileContent("public/mysect/p1/index.html", + "Single: P1|<p>P1 Content.", + "Shortcode Include: P2|", + "Codeblock Include: P3|") + + editFile("content/mysect/p1/index.md", func(s string) string { + return strings.ReplaceAll(s, "P1", "P1 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Single: P1 Edited|<p>P1 Edited Content.") + b.AssertFileContent("public/index.html", "RegularPages: $", "Len RegularPagesRecursive: 7", "Paginate: /mysect/sub/p5/|P5|/mysect/p1/|P1 Edited") + b.AssertFileContent("public/mysect/index.html", "RegularPages: P1 Edited|P2|P3|$", "Len RegularPagesRecursive: 5") + + // p2 is included in p1 via shortcode. + editFile("content/mysect/p2/index.md", func(s string) string { + return strings.ReplaceAll(s, "P2", "P2 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Include: P2 Edited|") + + // p3 is included in p1 via codeblock hook. + editFile("content/mysect/p3/index.md", func(s string) string { + return strings.ReplaceAll(s, "P3", "P3 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Codeblock Include: P3 Edited|") + + // Remove a content file in a nested section. + b.RemoveFiles("content/mysect/sub/p4/index.md").Build() + b.AssertFileContent("public/mysect/index.html", "RegularPages: P1 Edited|P2 Edited|P3 Edited", "Len RegularPagesRecursive: 4") + b.AssertFileContent("public/mysect/sub/index.html", "RegularPages: P5|$", "RegularPagesRecursive: 1") + + // Rename one of the translations. + b.AssertFileContent("public/translations/index.html", "RegularPagesRecursive: /translations/p7/") + b.AssertFileContent("public/en/translations/index.html", "RegularPagesRecursive: /en/translations/p7/") + b.RenameFile("content/translations/p7.nn.md", "content/translations/p7rename.nn.md").Build() + b.AssertFileContent("public/translations/index.html", "RegularPagesRecursive: /translations/p7rename/") + b.AssertFileContent("public/en/translations/index.html", "RegularPagesRecursive: /en/translations/p7/") + + // Edit shortcode + editFile("layouts/shortcodes/include.html", func(s string) string { + return s + "\nShortcode Include Edited." + }) + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Include Edited.") + + // Edit render hook + editFile("layouts/_default/_markup/render-codeblock.html", func(s string) string { + return s + "\nCodeblock Include Edited." + }) + b.AssertFileContent("public/mysect/p1/index.html", "Codeblock Include Edited.") + + // Edit partial p1 + editFile("layouts/partials/p1.html", func(s string) string { + return strings.Replace(s, "Partial P1", "Partial P1 Edited", 1) + }) + b.AssertFileContent("public/mysect/index.html", "List Partial P1: Partial P1 Edited.") + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Partial P1: Partial P1 Edited.") + + // Edit partial cached. + editFile("layouts/partials/pcached.html", func(s string) string { + return strings.Replace(s, "Partial Pcached", "Partial Pcached Edited", 1) + }) + b.AssertFileContent("public/mysect/p1/index.html", "Pcached Edited.") + + // Edit lastMod date in content file, check site.Lastmod. + editFile("content/mysect/sub/p5/index.md", func(s string) string { + return strings.Replace(s, "2019-03-02", "2020-03-10", 1) + }) + b.AssertFileContent("public/index.html", "Site.Lastmod: 2020-03-10|") + b.AssertFileContent("public/mysect/index.html", "Page.Lastmod: 2020-03-10|") + + // Adjust the date back a few days. + editFile("content/mysect/sub/p5/index.md", func(s string) string { + return strings.Replace(s, "2020-03-10", "2019-03-08", 1) + }) + b.AssertFileContent("public/mysect/index.html", "Page.Lastmod: 2019-03-08|") + b.AssertFileContent("public/index.html", "Site.Lastmod: 2019-03-08|") + + // Check cascade mods. + b.AssertFileContent("public/myothersect/index.html", "Cascade param: cascadevalue|") + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: cascadevalue|") + b.AssertFileContent("public/myothersect/sub/p6/index.html", "Cascade param: cascadevalue|") + + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "cascadevalue", "cascadevalue edited", 1) + }) + b.AssertFileContent("public/myothersect/index.html", "Cascade param: cascadevalue edited|") + b.AssertFileContent("public/myothersect/sub/p6/index.html", "Cascade param: cascadevalue edited|") + + // Repurpose the cascadeparam to set the title. + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "cascadeparam:", "title:", 1) + }) + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: |", "List: cascadevalue edited|") + + // Revert it. + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "title:", "cascadeparam:", 1) + }) + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: cascadevalue edited|", "List: |") +} + +func TestRebuildVariationsJSNoneFingerprinted(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/p1/index.md -- +--- +title: "P1" +--- +P1. +-- content/p2/index.md -- +--- +title: "P2" +--- +P2. +-- content/p3/index.md -- +--- +title: "P3" +--- +P3. +-- content/p4/index.md -- +--- +title: "P4" +--- +P4. +-- assets/main.css -- +body { + background: red; +} +-- layouts/default/list.html -- +List. +-- layouts/_default/single.html -- +Single. +{{ $css := resources.Get "main.css" | minify }} +RelPermalink: {{ $css.RelPermalink }}| + +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "RelPermalink: /main.min.css|") + b.AssertFileContent("public/main.min.css", "body{background:red}") + + b.EditFileReplaceAll("assets/main.css", "red", "blue") + b.RemoveFiles("content/p2/index.md") + b.RemoveFiles("content/p3/index.md") + b.Build() + + b.AssertFileContent("public/main.min.css", "body{background:blue}") +} + +func TestRebuildVariationsJSInNestedCachedPartialFingerprinted(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/p1/index.md -- +--- +title: "P1" +--- +P1. +-- content/p2/index.md -- +--- +title: "P2" +--- +P2. +-- content/p3/index.md -- +--- +title: "P3" +--- +P3. +-- content/p4/index.md -- +--- +title: "P4" +--- +P4. +-- assets/js/main.js -- +console.log("Hello"); +-- layouts/_default/list.html -- +List. {{ partial "head.html" . }}$ +-- layouts/_default/single.html -- +Single. {{ partial "head.html" . }}$ +-- layouts/partials/head.html -- +{{ partialCached "js.html" . }}$ +-- layouts/partials/js.html -- +{{ $js := resources.Get "js/main.js" | js.Build | fingerprint }} +RelPermalink: {{ $js.RelPermalink }}| +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + b.AssertFileContent("public/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + + b.EditFileReplaceAll("assets/js/main.js", "Hello", "Hello is Edited").Build() + + for i := 1; i < 5; i++ { + b.AssertFileContent(fmt.Sprintf("public/p%d/index.html", i), "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") + } + + b.AssertFileContent("public/index.html", "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") +} + +func TestRebuildVariationsJSInNestedPartialFingerprintedInBase(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- assets/js/main.js -- +console.log("Hello"); +-- layouts/_default/baseof.html -- +Base. {{ partial "common/head.html" . }}$ +{{ block "main" . }}default{{ end }} +-- layouts/_default/list.html -- +{{ define "main" }}main{{ end }} +-- layouts/partials/common/head.html -- +{{ partial "myfiles/js.html" . }}$ +-- layouts/partials/myfiles/js.html -- +{{ $js := resources.Get "js/main.js" | js.Build | fingerprint }} +RelPermalink: {{ $js.RelPermalink }}| +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + + b.EditFileReplaceAll("assets/js/main.js", "Hello", "Hello is Edited").Build() + + b.AssertFileContent("public/index.html", "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") +} + +func TestRebuildVariationsJSBundled(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/_index.md -- +--- +title: "Home" +--- +-- content/p1.md -- +--- +title: "P1" +layout: "main" +--- +-- content/p2.md -- +--- +title: "P2" +--- +{{< jsfingerprinted >}} +-- content/p3.md -- +--- +title: "P3" +layout: "plain" +--- +{{< jsfingerprinted >}} +-- content/main.js -- +console.log("Hello"); +-- content/foo.js -- +console.log("Foo"); +-- layouts/index.html -- +Home. +{{ $js := site.Home.Resources.Get "main.js" }} +{{ with $js }} +<script src="{{ .RelPermalink }}"></script> +{{ end }} +-- layouts/_default/single.html -- +Single. Deliberately no .Content in here. +-- layouts/_default/plain.html -- +Content: {{ .Content }}| +-- layouts/_default/main.html -- +{{ $js := site.Home.Resources.Get "main.js" }} +{{ with $js }} +<script> +{{ .Content }} +</script> +{{ end }} +-- layouts/shortcodes/jsfingerprinted.html -- +{{ $js := site.Home.Resources.Get "foo.js" | fingerprint }} +<script src="{{ $js.RelPermalink }}"></script> +` + + testCounters := &buildCounters{} + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + // LogLevel: logg.LevelTrace, + // Verbose: true, + BuildCfg: BuildCfg{ + testCounters: testCounters, + }, + }, + ).Build() + + b.AssertFileContent("public/index.html", `<script src="/main.js"></script>`) + b.AssertFileContent("public/p1/index.html", "<script>\n\"console.log(\\\"Hello\\\");\"\n</script>") + b.AssertFileContent("public/p2/index.html", "Single. Deliberately no .Content in here.") + b.AssertFileContent("public/p3/index.html", "foo.57b4465b908531b43d4e4680ab1063d856b475cb1ae81ad43e0064ecf607bec1.js") + b.AssertRenderCountPage(4) + + // Edit JS file. + b.EditFileReplaceFunc("content/main.js", func(s string) string { + return strings.Replace(s, "Hello", "Hello is Edited", 1) + }).Build() + + b.AssertFileContent("public/p1/index.html", "<script>\n\"console.log(\\\"Hello is Edited\\\");\"\n</script>") + // The p1 (the one inlining the JS) should be rebuilt. + b.AssertRenderCountPage(2) + // But not the content file. + b.AssertRenderCountContent(0) + + // This is included with RelPermalink in a shortcode used in p3, but it's fingerprinted + // so we need to rebuild on change. + b.EditFileReplaceFunc("content/foo.js", func(s string) string { + return strings.Replace(s, "Foo", "Foo Edited", 1) + }).Build() + + // Verify that the hash has changed. + b.AssertFileContent("public/p3/index.html", "foo.3a332a088521231e5fc9bd22f15e0ccf507faa7b373fbff22959005b9a80481c.js") + + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditData(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableLiveReload = true +[security] +enableInlineShortcodes=true +-- data/mydata.yaml -- +foo: bar +-- content/_index.md -- +--- +title: "Home" +--- +{{< data "mydata.foo" >}}} +-- content/p1.md -- +--- +title: "P1" +--- + +Foo inline: {{< foo.inline >}}{{ site.Data.mydata.foo }}|{{< /foo.inline >}} +-- layouts/shortcodes/data.html -- +{{ $path := split (.Get 0) "." }} +{{ $data := index site.Data $path }} +Foo: {{ $data }}| +-- layouts/index.html -- +Content: {{ .Content }}| +-- layouts/_default/single.html -- +Single: {{ .Content }}| +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Foo: bar|") + b.AssertFileContent("public/p1/index.html", "Foo inline: bar|") + b.EditFileReplaceFunc("data/mydata.yaml", func(s string) string { + return strings.Replace(s, "bar", "bar edited", 1) + }).Build() + b.AssertFileContent("public/index.html", "Foo: bar edited|") + b.AssertFileContent("public/p1/index.html", "Foo inline: bar edited|") +} + +func TestRebuildEditHomeContent(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +-- content/_index.md -- +--- +title: "Home" +--- +Home. +-- layouts/index.html -- +Content: {{ .Content }} +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Content: <p>Home.</p>") + b.EditFileReplaceAll("content/_index.md", "Home.", "Home").Build() + b.AssertFileContent("public/index.html", "Content: <p>Home</p>") +} + +func TestRebuildVariationsAssetsJSImport(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/index.html -- +Home. {{ now }} +{{ with (resources.Get "js/main.js" | js.Build | fingerprint) }} +<script>{{ .Content | safeJS }}</script> +{{ end }} +-- assets/js/lib/foo.js -- +export function foo() { + console.log("Foo"); +} +-- assets/js/main.js -- +import { foo } from "./lib/foo.js"; +console.log("Hello"); +foo(); +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + // LogLevel: logg.LevelTrace, + NeedsOsFS: true, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "Hello", "Foo") + // Edit the imported file. + b.EditFileReplaceAll("assets/js/lib/foo.js", "Foo", "Foo Edited").Build() + b.AssertFileContent("public/index.html", "Home.", "Hello", "Foo Edited") +} + +func TestRebuildVariationsAssetsPostCSSImport(t *testing.T) { + if !htesting.IsCI() { + t.Skip("skip CI only") + } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "rss"] +disableLiveReload = true +-- assets/css/lib/foo.css -- +body { + background: red; +} +-- assets/css/main.css -- +@import "lib/foo.css"; +-- package.json -- +{ + "devDependencies": { + "postcss-cli": "^9.0.1" + } +} +-- content/p1.md -- +--- +title: "P1" +--- +-- content/p2.md -- +--- +title: "P2" +layout: "foo" +--- +{{< fingerprinted >}} +-- content/p3.md -- +--- +title: "P3" +layout: "foo" +--- +{{< notfingerprinted >}} +-- layouts/shortcodes/fingerprinted.html -- +Fingerprinted. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts | fingerprint) }} +<style src="{{ .RelPermalink }}"></style> +{{ end }} +-- layouts/shortcodes/notfingerprinted.html -- +Fingerprinted. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} +<style src="{{ .RelPermalink }}"></style> +{{ end }} +-- layouts/index.html -- +Home. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} +<style>{{ .Content | safeCSS }}</style> +{{ end }} +-- layouts/_default/foo.html -- +Foo. +{{ .Title }}|{{ .Content }}| +-- layouts/_default/single.html -- +Single. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} +<style src="{{ .RelPermalink }}"></style> +{{ end }} +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + NeedsOsFS: true, + NeedsNpmInstall: true, + // LogLevel: logg.LevelTrace, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "<style>body {\n\tbackground: red;\n}</style>") + b.AssertFileContent("public/p1/index.html", "Single.", "/css/main.css") + b.AssertRenderCountPage(4) + + // Edit the imported file. + b.EditFileReplaceFunc("assets/css/lib/foo.css", func(s string) string { + return strings.Replace(s, "red", "blue", 1) + }).Build() + + b.AssertRenderCountPage(3) + + b.AssertFileContent("public/index.html", "Home.", "<style>body {\n\tbackground: blue;\n}</style>") +} + +func TestRebuildVariationsAssetsSassImport(t *testing.T) { + if !htesting.IsCI() { + t.Skip("skip CI only") + } + + filesTemplate := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- assets/css/lib/foo.scss -- +body { + background: red; +} +-- assets/css/main.scss -- +@import "lib/foo"; +-- layouts/index.html -- +Home. +{{ $opts := dict "transpiler" "TRANSPILER" }} +{{ with (resources.Get "css/main.scss" | toCSS $opts) }} +<style>{{ .Content | safeCSS }}</style> +{{ end }} +` + + runTest := func(transpiler string) { + t.Run(transpiler, func(t *testing.T) { + files := strings.Replace(filesTemplate, "TRANSPILER", transpiler, 1) + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + NeedsOsFS: true, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "background: red") + + // Edit the imported file. + b.EditFileReplaceFunc("assets/css/lib/foo.scss", func(s string) string { + return strings.Replace(s, "red", "blue", 1) + }).Build() + + b.AssertFileContent("public/index.html", "Home.", "background: blue") + }) + } + + if scss.Supports() { + runTest("libsass") + } + + if dartsass.Supports() { + runTest("dartsass") + } +} + +func benchmarkFilesEdit(count int) string { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disdableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +-- content/mysect/_index.md -- +--- +title: "My Sect" +--- + ` + + contentTemplate := ` +--- +title: "P%d" +--- +P%d Content. +` + + for i := 0; i < count; i++ { + files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i)) + } + + return files +} + +func BenchmarkRebuildContentFileChange(b *testing.B) { + files := benchmarkFilesEdit(500) + + cfg := IntegrationTestConfig{ + T: b, + TxtarString: files, + Running: true, + // Verbose: true, + // LogLevel: logg.LevelInfo, + } + builders := make([]*IntegrationTestBuilder, b.N) + + for i := range builders { + builders[i] = NewIntegrationTestBuilder(cfg) + builders[i].Build() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + bb := builders[i] + bb.EditFileReplaceFunc("content/mysect/p123/index.md", func(s string) string { + return s + "... Edited" + }).Build() + // fmt.Println(bb.LogString()) + } +} diff --git a/hugolib/rendershortcodes_test.go b/hugolib/rendershortcodes_test.go index c6fa711cc..d0bc0546c 100644 --- a/hugolib/rendershortcodes_test.go +++ b/hugolib/rendershortcodes_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -63,7 +63,7 @@ Fragments: {{ .Fragments.Identifiers }}| HasShortcode Level 1: {{ .HasShortcode "include" }}| HasShortcode Level 2: {{ .HasShortcode "withmarkdown" }}| HasShortcode Level 3: {{ .HasShortcode "level3" }}| -HasSHortcode not found: {{ .HasShortcode "notfound" }}| +HasShortcode not found: {{ .HasShortcode "notfound" }}| Content: {{ .Content }}| ` @@ -79,11 +79,8 @@ Content: {{ .Content }}| "HasShortcode Level 1: true|", "HasShortcode Level 2: true|", "HasShortcode Level 3: true|", - "HasSHortcode not found: false|", + "HasShortcode not found: false|", ) - - // TODO1 more assertions. - } func TestRenderShortcodesNestedMultipleOutputFormatTemplates(t *testing.T) { @@ -130,7 +127,6 @@ JSON: {{ .Content }} b.AssertFileContent("public/p1/index.html", "Myshort HTML") b.AssertFileContent("public/p1/index.json", "Myshort JSON") - } func TestRenderShortcodesEditNested(t *testing.T) { @@ -159,27 +155,12 @@ title: "p2" Myshort Original. -- layouts/_default/single.html -- {{ .Content }} - - - ` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - Running: true, - }, - ).Build() - + b := TestRunning(t, files) b.AssertFileContent("public/p1/index.html", "Myshort Original.") - b.EditFileReplace("layouts/shortcodes/myshort.html", func(s string) string { - return "Myshort Edited." - }) - b.Build() + b.EditFileReplaceAll("layouts/shortcodes/myshort.html", "Original", "Edited").Build() b.AssertFileContent("public/p1/index.html", "Myshort Edited.") - } func TestRenderShortcodesEditIncludedPage(t *testing.T) { @@ -223,10 +204,9 @@ Myshort Original. b.AssertFileContent("public/p1/index.html", "Original") - b.EditFileReplace("content/p2.md", func(s string) string { + b.EditFileReplaceFunc("content/p2.md", func(s string) string { return strings.Replace(s, "Original", "Edited", 1) }) b.Build() b.AssertFileContent("public/p1/index.html", "Edited") - } diff --git a/hugolib/renderstring_test.go b/hugolib/renderstring_test.go index e0a4cd036..40980bdcb 100644 --- a/hugolib/renderstring_test.go +++ b/hugolib/renderstring_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -121,7 +121,6 @@ HasShortcode: foo:{{ .HasShortcode "foo" }}:false ` t.Run("Basic", func(t *testing.T) { - b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, @@ -139,11 +138,9 @@ HasShortcode: mark2:true:true HasShortcode: foo:false:false Page Type: *hugolib.pageForShortcode`, ) - }) t.Run("Edit shortcode", func(t *testing.T) { - b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, @@ -157,7 +154,6 @@ Page Type: *hugolib.pageForShortcode`, b.AssertFileContent("public/p1/index.html", `Edit shortcode`, ) - }) } @@ -189,7 +185,6 @@ Page Kind: home Has myshort: true Has other: false `) - } func TestRenderStringWithShortcodeIssue10654(t *testing.T) { diff --git a/hugolib/resource_chain_test.go b/hugolib/resource_chain_test.go index 17c3b2f0c..1365db72c 100644 --- a/hugolib/resource_chain_test.go +++ b/hugolib/resource_chain_test.go @@ -36,11 +36,10 @@ func TestResourceChainBasic(t *testing.T) { failIfHandler := func(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/fail.jpg" { - http.Error(w, "{ msg: failed }", 501) + http.Error(w, "{ msg: failed }", http.StatusNotImplemented) return } h.ServeHTTP(w, r) - }) } ts := httptest.NewServer( @@ -89,7 +88,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with . fs := b.Fs.Source imageDir := filepath.Join("assets", "images") - b.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil) + b.Assert(os.MkdirAll(imageDir, 0o777), qt.IsNil) src, err := os.Open("testdata/sunset.jpg") b.Assert(err, qt.IsNil) out, err := fs.Create(filepath.Join(imageDir, "sunset.jpg")) @@ -101,18 +100,18 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with . b.Running() for i := 0; i < 2; i++ { - + b.Logf("Test run %d", i) b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", fmt.Sprintf(` -SUNSET: images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 -FIT: images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 +SUNSET: /images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 +FIT: /images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 CSS integrity Data first: sha256-od9YaHw8nMOL8mUy97Sy8sKwMV3N4hI3aVmZXATxH+8= /styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css CSS integrity Data last: /styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css sha256-HPxSmGg2QF03+ZmKY/1t2GCOjEEOXj2x2qow94vCc7o= -SUNSET REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 -FIT REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_0_200x200_fit_q75_box.jpg|200 +SUNSET REMOTE: /sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 +FIT REMOTE: /sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 REMOTE NOT FOUND: OK LOCAL NOT FOUND: OK PRINT PROTOCOL ERROR DETAILS: Err: error calling resources.GetRemote: Get "gopher://example.org": unsupported protocol scheme "gopher"|| @@ -125,9 +124,9 @@ FAILED REMOTE ERROR DETAILS CONTENT: |failed to fetch remote resource: Not Imple b.AssertFileContent("public/styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css", "body{background-color:#add8e6}") b.AssertFileContent("public//styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css", "body{background-color:orange}") - b.EditFiles("page1.md", ` + b.EditFiles("content/_index.md", ` --- -title: "Page 1 edit" +title: "Home edit" summary: "Edited summary" --- @@ -135,9 +134,6 @@ Edited content. `) - b.Assert(b.Fs.WorkingDirWritable.Remove("public"), qt.IsNil) - b.H.ResourceSpec.ClearCaches() - } } @@ -147,7 +143,9 @@ func TestResourceChainPostProcess(t *testing.T) { rnd := rand.New(rand.NewSource(time.Now().UnixNano())) b := newTestSitesBuilder(t) - b.WithConfigFile("toml", `[minify] + b.WithConfigFile("toml", ` +disableLiveReload = true +[minify] minifyOutput = true [minify.tdewolff] [minify.tdewolff.html] @@ -184,7 +182,7 @@ End.`) b.AssertFileContent("public/index.html", `Start. HELLO: /hello.min.a2d1cb24f24b322a7dad520414c523e9.html|Integrity: md5-otHLJPJLMip9rVIEFMUj6Q==|MediaType: text/html -HELLO2: Name: hello.html|Content: <h1>Hello World!</h1>|Title: hello.html|ResourceType: text +HELLO2: Name: /hello.html|Content: <h1>Hello World!</h1>|Title: /hello.html|ResourceType: text <a href=hugo.rocks>foo</a> <a href="/hello.min.a2d1cb24f24b322a7dad520414c523e9.html" integrity="md5-otHLJPJLMip9rVIEFMUj6Q==">Hello</a> End.`) @@ -317,7 +315,6 @@ func TestResourceChains(t *testing.T) { } http.Error(w, "Not found", http.StatusNotFound) - return })) t.Cleanup(func() { ts.Close() @@ -680,22 +677,6 @@ $color: #333; } } -func TestMultiSiteResource(t *testing.T) { - t.Parallel() - c := qt.New(t) - - b := newMultiSiteTestDefaultBuilder(t) - - b.CreateSites().Build(BuildCfg{}) - - // This build is multilingual, but not multihost. There should be only one pipes.txt - b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /blog/text/pipes.txt") - c.Assert(b.CheckExists("public/fr/text/pipes.txt"), qt.Equals, false) - c.Assert(b.CheckExists("public/en/text/pipes.txt"), qt.Equals, false) - b.AssertFileContent("public/en/index.html", "Default Home Page", "String Resource: /blog/text/pipes.txt") - b.AssertFileContent("public/text/pipes.txt", "Hugo Pipes") -} - func TestResourcesMatch(t *testing.T) { t.Parallel() diff --git a/hugolib/rss_test.go b/hugolib/rss_test.go index ba2491c66..0c3c21b90 100644 --- a/hugolib/rss_test.go +++ b/hugolib/rss_test.go @@ -45,7 +45,7 @@ func TestRSSOutput(t *testing.T) { // Section RSS th.assertFileContent(filepath.Join("public", "sect", rssURI), "<?xml", "rss version", "Sects on RSSTest") // Taxonomy RSS - th.assertFileContent(filepath.Join("public", "categories", "hugo", rssURI), "<?xml", "rss version", "hugo on RSSTest") + th.assertFileContent(filepath.Join("public", "categories", "hugo", rssURI), "<?xml", "rss version", "Hugo on RSSTest") // RSS Item Limit content := readWorkingDir(t, fs, filepath.Join("public", rssURI)) diff --git a/hugolib/securitypolicies_test.go b/hugolib/securitypolicies_test.go index 60ef9a7e2..b0d39c697 100644 --- a/hugolib/securitypolicies_test.go +++ b/hugolib/securitypolicies_test.go @@ -101,9 +101,7 @@ func TestSecurityPolicies(t *testing.T) { testVariant(c, cb, `(?s).*python(\.exe)?" is not whitelisted in policy "security\.exec\.allow".*`) } else { testVariant(c, cb, `(?s).*"rst2html(\.py)?" is not whitelisted in policy "security\.exec\.allow".*`) - } - }) c.Run("Pandoc, denied", func(c *qt.C) { @@ -116,7 +114,7 @@ func TestSecurityPolicies(t *testing.T) { b.WithContent("page.pdc", "foo") } - testVariant(c, cb, `"(?s).*pandoc" is not whitelisted in policy "security\.exec\.allow".*`) + testVariant(c, cb, `(?s).*pandoc" is not whitelisted in policy "security\.exec\.allow".*`) }) c.Run("Dart SASS, OK", func(c *qt.C) { @@ -219,5 +217,4 @@ urls="none" `) }) }) - } diff --git a/hugolib/shortcode.go b/hugolib/shortcode.go index 77c5b4e7e..c5125f717 100644 --- a/hugolib/shortcode.go +++ b/hugolib/shortcode.go @@ -28,6 +28,7 @@ import ( "sync" "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/parser/pageparser" "github.com/gohugoio/hugo/resources/page" @@ -43,7 +44,7 @@ import ( var ( _ urls.RefLinker = (*ShortcodeWithPage)(nil) - _ pageWrapper = (*ShortcodeWithPage)(nil) + _ types.Unwrapper = (*ShortcodeWithPage)(nil) _ text.Positioner = (*ShortcodeWithPage)(nil) ) @@ -175,15 +176,16 @@ func (scp *ShortcodeWithPage) Get(key any) any { return x.Interface() } -func (scp *ShortcodeWithPage) page() page.Page { +// For internal use only. +func (scp *ShortcodeWithPage) Unwrapv() any { return scp.Page } // Note - this value must not contain any markup syntax const shortcodePlaceholderPrefix = "HAHAHUGOSHORTCODE" -func createShortcodePlaceholder(sid string, id, ordinal int) string { - return shortcodePlaceholderPrefix + strconv.Itoa(id) + sid + strconv.Itoa(ordinal) + "HBHB" +func createShortcodePlaceholder(sid string, id uint64, ordinal int) string { + return shortcodePlaceholderPrefix + strconv.FormatUint(id, 10) + sid + strconv.Itoa(ordinal) + "HBHB" } type shortcode struct { @@ -193,7 +195,6 @@ type shortcode struct { inner []any // string or nested shortcode params any // map or array ordinal int - err error indentation string // indentation from source. @@ -271,9 +272,8 @@ func (sc shortcode) String() string { } type shortcodeHandler struct { - p *pageState - - s *Site + filename string + s *Site // Ordered list of shortcodes for a page. shortcodes []*shortcode @@ -286,9 +286,9 @@ type shortcodeHandler struct { enableInlineShortcodes bool } -func newShortcodeHandler(p *pageState, s *Site) *shortcodeHandler { +func newShortcodeHandler(filename string, s *Site) *shortcodeHandler { sh := &shortcodeHandler{ - p: p, + filename: filename, s: s, enableInlineShortcodes: s.ExecHelper.Sec().EnableInlineShortcodes, shortcodes: make([]*shortcode, 0, 4), @@ -312,14 +312,16 @@ func prepareShortcode( sc *shortcode, parent *ShortcodeWithPage, p *pageState, + isRenderString bool, ) (shortcodeRenderer, error) { toParseErr := func(err error) error { - return p.parseError(fmt.Errorf("failed to render shortcode %q: %w", sc.name, err), p.source.parsed.Input(), sc.pos) + source := p.content.mustSource() + return p.parseError(fmt.Errorf("failed to render shortcode %q: %w", sc.name, err), source, sc.pos) } // Allow the caller to delay the rendering of the shortcode if needed. var fn shortcodeRenderFunc = func(ctx context.Context) ([]byte, bool, error) { - r, err := doRenderShortcode(ctx, level, s, tplVariants, sc, parent, p) + r, err := doRenderShortcode(ctx, level, s, tplVariants, sc, parent, p, isRenderString) if err != nil { return nil, false, toParseErr(err) } @@ -341,6 +343,7 @@ func doRenderShortcode( sc *shortcode, parent *ShortcodeWithPage, p *pageState, + isRenderString bool, ) (shortcodeRenderer, error) { var tmpl tpl.Template @@ -354,13 +357,16 @@ func doRenderShortcode( if !p.s.ExecHelper.Sec().EnableInlineShortcodes { return zeroShortcode, nil } - templName := path.Join("_inline_shortcode", p.File().Path(), sc.name) + templName := path.Join("_inline_shortcode", p.Path(), sc.name) if sc.isClosing { templStr := sc.innerString() var err error tmpl, err = s.TextTmpl().Parse(templName, templStr) if err != nil { + if isRenderString { + return zeroShortcode, p.wrapError(err) + } fe := herrors.NewFileErrorFromName(err, p.File().Filename()) pos := fe.Position() pos.LineNumber += p.posOffset(sc.pos).LineNumber @@ -376,6 +382,7 @@ func doRenderShortcode( return zeroShortcode, fmt.Errorf("no earlier definition of shortcode %q found", sc.name) } } + tmpl = tpl.AddIdentity(tmpl) } else { var found, more bool tmpl, found, more = s.Tmpl().LookupVariant(sc.name, tplVariants) @@ -398,7 +405,7 @@ func doRenderShortcode( case string: inner += innerData case *shortcode: - s, err := prepareShortcode(ctx, level+1, s, tplVariants, innerData, data, p) + s, err := prepareShortcode(ctx, level+1, s, tplVariants, innerData, data, p, isRenderString) if err != nil { return zeroShortcode, err } @@ -505,7 +512,7 @@ func (s *shortcodeHandler) hasName(name string) bool { return ok } -func (s *shortcodeHandler) prepareShortcodesForPage(ctx context.Context, p *pageState, f output.Format) (map[string]shortcodeRenderer, error) { +func (s *shortcodeHandler) prepareShortcodesForPage(ctx context.Context, p *pageState, f output.Format, isRenderString bool) (map[string]shortcodeRenderer, error) { rendered := make(map[string]shortcodeRenderer) tplVariants := tpl.TemplateVariants{ @@ -514,7 +521,7 @@ func (s *shortcodeHandler) prepareShortcodesForPage(ctx context.Context, p *page } for _, v := range s.shortcodes { - s, err := prepareShortcode(ctx, 0, s.s, tplVariants, v, nil, p) + s, err := prepareShortcode(ctx, 0, s.s, tplVariants, v, nil, p, isRenderString) if err != nil { return nil, err } @@ -525,6 +532,25 @@ func (s *shortcodeHandler) prepareShortcodesForPage(ctx context.Context, p *page return rendered, nil } +func posFromInput(filename string, input []byte, offset int) text.Position { + if offset < 0 { + return text.Position{ + Filename: filename, + } + } + lf := []byte("\n") + input = input[:offset] + lineNumber := bytes.Count(input, lf) + 1 + endOfLastLine := bytes.LastIndex(input, lf) + + return text.Position{ + Filename: filename, + LineNumber: lineNumber, + ColumnNumber: offset - endOfLastLine, + Offset: offset, + } +} + // pageTokens state: // - before: positioned just before the shortcode start // - after: shortcode(s) consumed (plural when they are nested) diff --git a/hugolib/shortcode_page.go b/hugolib/shortcode_page.go index f351daae0..7c32f2ea1 100644 --- a/hugolib/shortcode_page.go +++ b/hugolib/shortcode_page.go @@ -17,6 +17,7 @@ import ( "context" "html/template" + "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/resources/page" ) @@ -73,6 +74,8 @@ type pageForShortcode struct { p *pageState } +var _ types.Unwrapper = (*pageForShortcode)(nil) + func newPageForShortcode(p *pageState) page.Page { return &pageForShortcode{ PageWithoutContent: p, @@ -83,7 +86,8 @@ func newPageForShortcode(p *pageState) page.Page { } } -func (p *pageForShortcode) page() page.Page { +// For internal use. +func (p *pageForShortcode) Unwrapv() any { return p.PageWithoutContent.(page.Page) } @@ -92,15 +96,17 @@ func (p *pageForShortcode) String() string { } func (p *pageForShortcode) TableOfContents(context.Context) template.HTML { - p.p.enablePlaceholders() return p.toc } +var _ types.Unwrapper = (*pageForRenderHooks)(nil) + // This is what is sent into the content render hooks (link, image). type pageForRenderHooks struct { page.PageWithoutContent page.TableOfContentsProvider page.ContentProvider + p *pageState } func newPageForRenderHook(p *pageState) page.Page { @@ -108,9 +114,10 @@ func newPageForRenderHook(p *pageState) page.Page { PageWithoutContent: p, ContentProvider: page.NopPage, TableOfContentsProvider: p, + p: p, } } -func (p *pageForRenderHooks) page() page.Page { - return p.PageWithoutContent.(page.Page) +func (p *pageForRenderHooks) Unwrapv() any { + return p.p } diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go index ff9ae35fc..656364021 100644 --- a/hugolib/shortcode_test.go +++ b/hugolib/shortcode_test.go @@ -110,7 +110,7 @@ title: "Shortcodes Galore!" p, err := pageparser.ParseMain(strings.NewReader(test.input), pageparser.Config{}) c.Assert(err, qt.IsNil) - handler := newShortcodeHandler(nil, s) + handler := newShortcodeHandler("", s) iter := p.Iterator() short, err := handler.extractShortcode(0, 0, p.Input(), iter) @@ -186,7 +186,7 @@ CSV: {{< myShort >}} b.Assert(len(h.Sites), qt.Equals, 1) s := h.Sites[0] - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) b.Assert(home, qt.Not(qt.IsNil)) b.Assert(len(home.OutputFormats()), qt.Equals, 3) @@ -829,7 +829,6 @@ title: "Hugo Rocks!" <h2 id="doc">Doc</h2> `, ) - } // https://github.com/gohugoio/hugo/issues/6857 @@ -927,7 +926,6 @@ title: "p1" b.AssertFileContent("public/p1/index.html", ` <x `) - } func TestShortcodePreserveIndentation(t *testing.T) { @@ -967,7 +965,6 @@ title: "p1" ).Build() b.AssertFileContent("public/p1/index.html", "<ol>\n<li>\n<p>List 1</p>\n<ol>\n<li>Item Mark1 1</li>\n<li>Item Mark1 2</li>\n<li>Item Mark2 1</li>\n<li>Item Mark2 2\n<ol>\n<li>Item Mark2 2-1</li>\n</ol>\n</li>\n<li>Item Mark2 3</li>\n</ol>\n</li>\n</ol>") - } func TestShortcodeCodeblockIndent(t *testing.T) { @@ -998,7 +995,6 @@ echo "foo"; ).Build() b.AssertFileContent("public/p1/index.html", "<pre><code>echo "foo";\n</code></pre>") - } func TestShortcodeHighlightDeindent(t *testing.T) { @@ -1041,7 +1037,6 @@ title: "p1" </code></pre> `) - } // Issue 10236. @@ -1073,7 +1068,6 @@ Title: {{ .Get "title" | safeHTML }} ).Build() b.AssertFileContent("public/p1/index.html", `Title: Steve "Francia".`) - } // Issue 10391. @@ -1166,7 +1160,6 @@ C'est un test ).Build() b.AssertFileContent("public/fr/p2/index.html", `plus-dinformations`) - } // Issue 10671. @@ -1281,5 +1274,4 @@ Hello. ).Build() b.AssertFileContent("public/p1/index.html", "<span style=\"color:#a6e22e\">Hello.</span>") - } diff --git a/hugolib/site.go b/hugolib/site.go index c682eebc9..312f6b97f 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,18 +19,18 @@ import ( "io" "mime" "net/url" - "path" "path/filepath" "runtime" "sort" "strings" + "sync" "time" "github.com/bep/logg" - "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/htime" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/hugolib/doctree" "golang.org/x/text/unicode/norm" "github.com/gohugoio/hugo/common/paths" @@ -41,11 +41,6 @@ import ( "github.com/gohugoio/hugo/markup/converter" - "github.com/gohugoio/hugo/hugofs/files" - hglob "github.com/gohugoio/hugo/hugofs/glob" - - "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/publisher" @@ -55,19 +50,14 @@ import ( "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/lazy" "github.com/fsnotify/fsnotify" bp "github.com/gohugoio/hugo/bufferpool" - "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/tpl" - - "github.com/spf13/afero" ) func (s *Site) Taxonomies() page.TaxonomyList { @@ -75,25 +65,32 @@ func (s *Site) Taxonomies() page.TaxonomyList { return s.taxonomies } -type taxonomiesConfig map[string]string +type ( + taxonomiesConfig map[string]string + taxonomiesConfigValues struct { + views []viewName + viewsByTreeKey map[string]viewName + } +) -func (t taxonomiesConfig) Values() []viewName { - var vals []viewName +func (t taxonomiesConfig) Values() taxonomiesConfigValues { + var views []viewName for k, v := range t { - vals = append(vals, viewName{singular: k, plural: v}) + views = append(views, viewName{singular: k, plural: v, pluralTreeKey: cleanTreeKey(v)}) } - sort.Slice(vals, func(i, j int) bool { - return vals[i].plural < vals[j].plural + sort.Slice(views, func(i, j int) bool { + return views[i].plural < views[j].plural }) - return vals -} + viewsByTreeKey := make(map[string]viewName) + for _, v := range views { + viewsByTreeKey[v.pluralTreeKey] = v + } -type siteConfigHolder struct { - sitemap config.SitemapConfig - taxonomiesConfig taxonomiesConfig - timeout time.Duration - hasCJKLanguage bool + return taxonomiesConfigValues{ + views: views, + viewsByTreeKey: viewsByTreeKey, + } } // Lazily loaded site dependencies. @@ -111,15 +108,6 @@ func (init *siteInit) Reset() { init.taxonomies.Reset() } -func (s *Site) initInit(ctx context.Context, init *lazy.Init, pctx pageContext) bool { - _, err := init.Do(ctx) - - if err != nil { - s.h.FatalError(pctx.wrapError(err)) - } - return err == nil -} - func (s *Site) prepareInits() { s.init = &siteInit{} @@ -153,11 +141,6 @@ func (s *Site) prepareInits() { }) s.init.prevNextInSection = init.Branch(func(context.Context) (any, error) { - var sections page.Pages - s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) { - sections = append(sections, n.p) - }) - setNextPrev := func(pas page.Pages) { for i, p := range pas { np, ok := p.(nextPrevInSectionProvider) @@ -183,40 +166,35 @@ func (s *Site) prepareInits() { } } - for _, sect := range sections { - treeRef := sect.(treeRefProvider).getTreeRef() - - var pas page.Pages - treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) { - pas = append(pas, c.p) - }) - page.SortByDefault(pas) + sections := s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: "", + KeyPart: "sectionorhome", + Include: pagePredicates.KindSection.Or(pagePredicates.KindHome), + }, + IncludeSelf: true, + Recursive: true, + }, + ) - setNextPrev(pas) + for _, section := range sections { + setNextPrev(section.RegularPages()) } - // The root section only goes one level down. - treeRef := s.home.getTreeRef() - - var pas page.Pages - treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) { - pas = append(pas, c.p) - }) - page.SortByDefault(pas) - - setNextPrev(pas) - return nil, nil }) s.init.menus = init.Branch(func(context.Context) (any, error) { - s.assembleMenus() - return nil, nil + err := s.assembleMenus() + return nil, err }) - s.init.taxonomies = init.Branch(func(context.Context) (any, error) { - err := s.pageMap.assembleTaxonomies() - return nil, err + s.init.taxonomies = init.Branch(func(ctx context.Context) (any, error) { + if err := s.pageMap.CreateSiteTaxonomies(ctx); err != nil { + return nil, err + } + return s.taxonomies, nil }) } @@ -232,20 +210,25 @@ func (s *Site) Menus() navigation.Menus { func (s *Site) initRenderFormats() { formatSet := make(map[string]bool) formats := output.Formats{} - rssDisabled := !s.conf.IsKindEnabled("rss") - s.pageMap.pageTrees.WalkRenderable(func(s string, n *contentNode) bool { - for _, f := range n.p.m.configuredOutputFormats { - if rssDisabled && f.Name == "rss" { - // legacy - continue - } - if !formatSet[f.Name] { - formats = append(formats, f) - formatSet[f.Name] = true + + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if p, ok := n.(*pageState); ok { + for _, f := range p.m.configuredOutputFormats { + if !formatSet[f.Name] { + formats = append(formats, f) + formatSet[f.Name] = true + } + } } - } - return false - }) + return false, nil + }, + } + + if err := w.Walk(context.TODO()); err != nil { + panic(err) + } // Add the per kind configured output formats for _, kind := range kinds.AllKindsInPages { @@ -275,10 +258,6 @@ func (s *Site) Languages() langs.Languages { return s.h.Configs.Languages } -func (s *Site) isEnabled(kind string) bool { - return s.conf.IsKindEnabled(kind) -} - type siteRefLinker struct { s *Site @@ -303,7 +282,7 @@ func (s siteRefLinker) logNotFound(ref, what string, p page.Page, position text. } else if p == nil { s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q: %s", s.s.Lang(), ref, what) } else { - s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Pathc(), what) + s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Path(), what) } } @@ -391,8 +370,26 @@ func (s *Site) watching() bool { } type whatChanged struct { - source bool - files map[string]bool + mu sync.Mutex + + contentChanged bool + identitySet identity.Identities +} + +func (w *whatChanged) Add(ids ...identity.Identity) { + w.mu.Lock() + defer w.mu.Unlock() + + for _, id := range ids { + w.identitySet[id] = true + } +} + +func (w *whatChanged) Changes() []identity.Identity { + if w == nil || w.identitySet == nil { + return nil + } + return w.identitySet.AsSlice() } // RegisterMediaTypes will register the Site's media types in the mime @@ -405,10 +402,10 @@ func (s *Site) RegisterMediaTypes() { } } -func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event { - var filtered []fsnotify.Event +func (h *HugoSites) fileEventsFilter(events []fsnotify.Event) []fsnotify.Event { seen := make(map[fsnotify.Event]bool) + n := 0 for _, ev := range events { // Avoid processing the same event twice. if seen[ev] { @@ -416,17 +413,7 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event { } seen[ev] = true - if s.SourceSpec.IgnoreFile(ev.Name) { - continue - } - - // Throw away any directories - isRegular, err := s.SourceSpec.IsRegularSourceFile(ev.Name) - if err != nil && herrors.IsNotExist(err) && (ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename) { - // Force keep of event - isRegular = true - } - if !isRegular { + if h.SourceSpec.IgnoreFile(ev.Name) { continue } @@ -434,23 +421,22 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event { ev.Name = norm.NFC.String(ev.Name) } - filtered = append(filtered, ev) + events[n] = ev + n++ } - - return filtered + return events[:n] } -func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event { - var filtered []fsnotify.Event - +func (h *HugoSites) fileEventsTranslate(events []fsnotify.Event) []fsnotify.Event { eventMap := make(map[string][]fsnotify.Event) // We often get a Remove etc. followed by a Create, a Create followed by a Write. - // Remove the superfluous events to mage the update logic simpler. + // Remove the superfluous events to make the update logic simpler. for _, ev := range events { eventMap[ev.Name] = append(eventMap[ev.Name], ev) } + n := 0 for _, ev := range events { mapped := eventMap[ev.Name] @@ -472,236 +458,77 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event { } } - filtered = append(filtered, kept) + events[n] = kept + n++ } - return filtered + return events } -// reBuild partially rebuilds a site given the filesystem events. -// It returns whatever the content source was changed. -// TODO(bep) clean up/rewrite this method. -func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error { - events = s.filterFileEvents(events) - events = s.translateFileEvents(events) - - changeIdentities := make(identity.Identities) - - s.Log.Debugf("Rebuild for events %q", events) +func (h *HugoSites) fileEventsContentPaths(p []pathChange) []pathChange { + var bundles []pathChange + var dirs []pathChange + var regular []pathChange - h := s.h - - // First we need to determine what changed - - var ( - sourceChanged = []fsnotify.Event{} - sourceReallyChanged = []fsnotify.Event{} - contentFilesChanged []string - - tmplChanged bool - tmplAdded bool - dataChanged bool - i18nChanged bool - - sourceFilesChanged = make(map[string]bool) - ) - - var cacheBusters []func(string) bool - bcfg := s.conf.Build - - for _, ev := range events { - component, relFilename := s.BaseFs.MakePathRelative(ev.Name) - if relFilename != "" { - p := hglob.NormalizePath(path.Join(component, relFilename)) - g, err := bcfg.MatchCacheBuster(s.Log, p) - if err == nil && g != nil { - cacheBusters = append(cacheBusters, g) - } - } - - id, found := s.eventToIdentity(ev) - if found { - changeIdentities[id] = id - - switch id.Type { - case files.ComponentFolderContent: - s.Log.Println("Source changed", ev) - sourceChanged = append(sourceChanged, ev) - case files.ComponentFolderLayouts: - tmplChanged = true - if !s.Tmpl().HasTemplate(id.Path) { - tmplAdded = true - } - if tmplAdded { - s.Log.Println("Template added", ev) - } else { - s.Log.Println("Template changed", ev) - } - - case files.ComponentFolderData: - s.Log.Println("Data changed", ev) - dataChanged = true - case files.ComponentFolderI18n: - s.Log.Println("i18n changed", ev) - i18nChanged = true - - } + var others []pathChange + for _, p := range p { + if p.isDir { + dirs = append(dirs, p) + } else { + others = append(others, p) } } - changed := &whatChanged{ - source: len(sourceChanged) > 0, - files: sourceFilesChanged, - } - - config.whatChanged = changed - - if err := init(config); err != nil { - return err - } - - var cacheBusterOr func(string) bool - if len(cacheBusters) > 0 { - cacheBusterOr = func(s string) bool { - for _, cb := range cacheBusters { - if cb(s) { - return true + // Remve all files below dir. + if len(dirs) > 0 { + n := 0 + for _, d := range dirs { + dir := d.p.Path() + "/" + for _, o := range others { + if !strings.HasPrefix(o.p.Path(), dir) { + others[n] = o + n++ } } - return false - } - } - - // These in memory resource caches will be rebuilt on demand. - if len(cacheBusters) > 0 { - s.h.ResourceSpec.ResourceCache.DeleteMatches(cacheBusterOr) - } - - if tmplChanged || i18nChanged { - s.h.init.Reset() - var prototype *deps.Deps - for i, s := range s.h.Sites { - if err := s.Deps.Compile(prototype); err != nil { - return err - } - if i == 0 { - prototype = s.Deps - } - } - } - - if dataChanged { - s.h.init.data.Reset() - } - - for _, ev := range sourceChanged { - removed := false - - if ev.Op&fsnotify.Remove == fsnotify.Remove { - removed = true - } - - // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file - // Sometimes a rename operation means that file has been renamed other times it means - // it's been updated - if ev.Op&fsnotify.Rename == fsnotify.Rename { - // If the file is still on disk, it's only been updated, if it's not, it's been moved - if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil { - removed = true - } - } - - if removed && files.IsContentFile(ev.Name) { - h.removePageByFilename(ev.Name) - } - - sourceReallyChanged = append(sourceReallyChanged, ev) - sourceFilesChanged[ev.Name] = true - } - if config.ErrRecovery || tmplAdded || dataChanged { - h.resetPageState() - } else { - h.resetPageStateFromEvents(changeIdentities) - } - - if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 { - var filenamesChanged []string - for _, e := range sourceReallyChanged { - filenamesChanged = append(filenamesChanged, e.Name) } - if len(contentFilesChanged) > 0 { - filenamesChanged = append(filenamesChanged, contentFilesChanged...) - } - - filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged) - - if err := s.readAndProcessContent(*config, filenamesChanged...); err != nil { - return err - } - + others = others[:n] } - return nil -} - -func (s *Site) process(config BuildCfg) (err error) { - if err = s.readAndProcessContent(config); err != nil { - err = fmt.Errorf("readAndProcessContent: %w", err) - return - } - return err -} - -func (s *Site) render(ctx *siteRenderContext) (err error) { - if err := page.Clear(); err != nil { - return err - } - - if ctx.outIdx == 0 { - // Note that even if disableAliases is set, the aliases themselves are - // preserved on page. The motivation with this is to be able to generate - // 301 redirects in a .htacess file and similar using a custom output format. - if !s.conf.DisableAliases { - // Aliases must be rendered before pages. - // Some sites, Hugo docs included, have faulty alias definitions that point - // to itself or another real page. These will be overwritten in the next - // step. - if err = s.renderAliases(); err != nil { - return - } + for _, p := range others { + if p.p.IsBundle() { + bundles = append(bundles, p) + } else { + regular = append(regular, p) } } - if err = s.renderPages(ctx); err != nil { - return - } - - if ctx.outIdx == 0 { - if err = s.renderSitemap(); err != nil { - return - } + // Remove any files below leaf bundles. + // Remove any files in the same folder as branch bundles. + var keepers []pathChange - if ctx.multihost { - if err = s.renderRobotsTXT(); err != nil { - return + for _, o := range regular { + keep := true + for _, b := range bundles { + prefix := b.p.Base() + "/" + if b.p.IsLeafBundle() && strings.HasPrefix(o.p.Path(), prefix) { + keep = false + break + } else if b.p.IsBranchBundle() && o.p.Dir() == b.p.Dir() { + keep = false + break } } - if err = s.render404(); err != nil { - return + if keep { + keepers = append(keepers, o) } } - if !ctx.renderSingletonPages() { - return - } + keepers = append(dirs, keepers...) + keepers = append(bundles, keepers...) - if err = s.renderMainLanguageRedirect(); err != nil { - return - } - - return + return keepers } // HomeAbsURL is a convenience method giving the absolute URL to the home page. @@ -723,47 +550,20 @@ func (s *Site) SitemapAbsURL() string { return p } -func (s *Site) eventToIdentity(e fsnotify.Event) (identity.PathIdentity, bool) { - for _, fs := range s.BaseFs.SourceFilesystems.FileSystems() { - if p := fs.Path(e.Name); p != "" { - return identity.NewPathIdentity(fs.Name, filepath.ToSlash(p)), true - } - } - return identity.PathIdentity{}, false -} - -func (s *Site) readAndProcessContent(buildConfig BuildCfg, filenames ...string) error { - if s.Deps == nil { - panic("nil deps on site") - } - - sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs) - - proc := newPagesProcessor(s.h, sourceSpec) - - c := newPagesCollector(sourceSpec, s.h.getContentMaps(), s.Log, s.h.ContentChanges, proc, filenames...) - - if err := c.Collect(); err != nil { - return err - } - - return nil -} - func (s *Site) createNodeMenuEntryURL(in string) string { if !strings.HasPrefix(in, "/") { return in } // make it match the nodes menuEntryURL := in - menuEntryURL = helpers.SanitizeURLKeepTrailingSlash(s.s.PathSpec.URLize(menuEntryURL)) + menuEntryURL = s.s.PathSpec.URLize(menuEntryURL) if !s.conf.CanonifyURLs { menuEntryURL = paths.AddContextRoot(s.s.PathSpec.Cfg.BaseURL().String(), menuEntryURL) } return menuEntryURL } -func (s *Site) assembleMenus() { +func (s *Site) assembleMenus() error { s.menus = make(navigation.Menus) type twoD struct { @@ -775,14 +575,9 @@ func (s *Site) assembleMenus() { // add menu entries from config to flat hash for name, menu := range s.conf.Menus.Config { for _, me := range menu { - if types.IsNil(me.Page) { - if me.PageRef != "" { - // Try to resolve the page. - p, _ := s.getPageNew(nil, me.PageRef) - if !types.IsNil(p) { - navigation.SetPageValues(me, p) - } - } + if types.IsNil(me.Page) && me.PageRef != "" { + // Try to resolve the page. + me.Page, _ = s.getPage(nil, me.PageRef) } // If page is still nill, we must make sure that we have a URL that considers baseURL etc. @@ -797,37 +592,32 @@ func (s *Site) assembleMenus() { sectionPagesMenu := s.conf.SectionPagesMenu if sectionPagesMenu != "" { - s.pageMap.sections.Walk(func(s string, v any) bool { - p := v.(*contentNode).p - if p.IsHome() { - return false + if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) { + if p.IsHome() || !p.m.shouldBeCheckedForMenuDefinitions() { + return false, nil } - // From Hugo 0.22 we have nested sections, but until we get a - // feel of how that would work in this setting, let us keep - // this menu for the top level only. + // The section pages menus are attached to the top level section. id := p.Section() if _, ok := flat[twoD{sectionPagesMenu, id}]; ok { - return false + return false, nil } - me := navigation.MenuEntry{ MenuConfig: navigation.MenuConfig{ Identifier: id, Name: p.LinkTitle(), Weight: p.Weight(), }, + Page: p, } navigation.SetPageValues(&me, p) flat[twoD{sectionPagesMenu, me.KeyName()}] = &me - - return false - }) + return false, nil + }); err != nil { + return err + } } - // Add menu entries provided by pages - s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool { - p := n.p - + if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) { for name, me := range p.pageMenus.menus() { if _, ok := flat[twoD{name, me.KeyName()}]; ok { err := p.wrapError(fmt.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name)) @@ -836,9 +626,10 @@ func (s *Site) assembleMenus() { } flat[twoD{name, me.KeyName()}] = me } - - return false - }) + return false, nil + }); err != nil { + return err + } // Create Children Menus First for _, e := range flat { @@ -871,6 +662,8 @@ func (s *Site) assembleMenus() { s.menus[menu.MenuName] = s.menus[menu.MenuName].Add(e) } } + + return nil } // get any language code to prefix the target file path with. @@ -893,39 +686,12 @@ func (s *Site) getLanguagePermalinkLang(alwaysInSubDir bool) string { } return s.GetLanguagePrefix() - -} - -func (s *Site) getTaxonomyKey(key string) string { - if s.conf.DisablePathToLower { - return s.PathSpec.MakePath(key) - } - return strings.ToLower(s.PathSpec.MakePath(key)) } // Prepare site for a new full build. func (s *Site) resetBuildState(sourceChanged bool) { s.relatedDocsHandler = s.relatedDocsHandler.Clone() s.init.Reset() - - if sourceChanged { - s.pageMap.contentMap.pageReverseIndex.Reset() - s.PageCollections = newPageCollections(s.pageMap) - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - p.pagePages = &pagePages{} - if p.bucket != nil { - p.bucket.pagesMapBucketPages = &pagesMapBucketPages{} - } - p.parent = nil - p.Scratcher = maps.NewScratcher() - return false - }) - } else { - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - p.Scratcher = maps.NewScratcher() - return false - }) - } } func (s *Site) errorCollator(results <-chan error, errs chan<- error) { @@ -947,7 +713,7 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) { // as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }}, // i.e. 2 arguments, so we test for that. func (s *Site) GetPage(ref ...string) (page.Page, error) { - p, err := s.s.getPageOldVersion(ref...) + p, err := s.s.getPageForRefs(ref...) if p == nil { // The nil struct has meaning in some situations, mostly to avoid breaking @@ -959,22 +725,6 @@ func (s *Site) GetPage(ref ...string) (page.Page, error) { return p, err } -func (s *Site) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (page.Page, error) { - p, err := s.GetPage(ref...) - if p != nil { - // Track pages referenced by templates/shortcodes - // when in server mode. - if im, ok := info.(identity.Manager); ok { - im.Add(p) - } - } - return p, err -} - -func (s *Site) permalink(link string) string { - return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.Cfg.BaseURL().String()) -} - func (s *Site) absURLPath(targetPath string) string { var path string if s.conf.RelativeURLs { @@ -990,46 +740,23 @@ func (s *Site) absURLPath(targetPath string) string { return path } -func (s *Site) lookupLayouts(layouts ...string) tpl.Template { - for _, l := range layouts { - if templ, found := s.Tmpl().Lookup(l); found { - return templ - } - } - - return nil -} - -func (s *Site) renderAndWriteXML(ctx context.Context, statCounter *uint64, name string, targetPath string, d any, templ tpl.Template) error { - renderBuffer := bp.GetBuffer() - defer bp.PutBuffer(renderBuffer) - - if err := s.renderForTemplate(ctx, name, "", d, renderBuffer, templ); err != nil { - return err - } - - pd := publisher.Descriptor{ - Src: renderBuffer, - TargetPath: targetPath, - StatCounter: statCounter, - // For the minification part of XML, - // we currently only use the MIME type. - OutputFormat: output.RSSFormat, - AbsURLPath: s.absURLPath(targetPath), - } - - return s.publisher.Publish(pd) -} +const ( + pageDependencyScopeDefault int = iota + pageDependencyScopeGlobal +) -func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, templ tpl.Template) error { - s.h.IncrPageRender() +func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, d any, templ tpl.Template) error { + s.h.buildCounters.pageRenderCounter.Add(1) renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) of := p.outputFormat() - ctx := tpl.SetPageInContext(context.Background(), p) + p.incrRenderState() + + ctx := tpl.Context.Page.Set(context.Background(), p) + ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, p) - if err := s.renderForTemplate(ctx, p.Kind(), of.Name, p, renderBuffer, templ); err != nil { + if err := s.renderForTemplate(ctx, p.Kind(), of.Name, d, renderBuffer, templ); err != nil { return err } @@ -1078,7 +805,6 @@ var infoOnMissingLayout = map[string]bool{ // where ITEM is the thing being hooked. type hookRendererTemplate struct { templateHandler tpl.TemplateHandler - identity.SearchProvider templ tpl.Template resolvePosition func(ctx any) text.Position } @@ -1119,92 +845,17 @@ func (s *Site) renderForTemplate(ctx context.Context, name, outputFormat string, return } -func (s *Site) lookupTemplate(layouts ...string) (tpl.Template, bool) { - for _, l := range layouts { - if templ, found := s.Tmpl().Lookup(l); found { - return templ, true - } - } - - return nil, false -} - -func (s *Site) publish(statCounter *uint64, path string, r io.Reader, fs afero.Fs) (err error) { - s.PathSpec.ProcessingStats.Incr(statCounter) - - return helpers.WriteToDisk(filepath.Clean(path), r, fs) -} - -func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) string { - if fi.TranslationBaseName() == "_index" { - if fi.Dir() == "" { - return kinds.KindHome - } - - return s.kindFromSections(sections) - - } - - return kinds.KindPage -} - -func (s *Site) kindFromSections(sections []string) string { - if len(sections) == 0 { - return kinds.KindHome - } - - return s.kindFromSectionPath(path.Join(sections...)) -} - -func (s *Site) kindFromSectionPath(sectionPath string) string { - var taxonomiesConfig taxonomiesConfig = s.conf.Taxonomies - for _, plural := range taxonomiesConfig { - if plural == sectionPath { - return kinds.KindTaxonomy - } - - if strings.HasPrefix(sectionPath, plural) { - return kinds.KindTerm - } - - } - - return kinds.KindSection -} - -func (s *Site) newPage( - n *contentNode, - parentbBucket *pagesMapBucket, - kind, title string, - sections ...string) *pageState { - m := map[string]any{} - if title != "" { - m["title"] = title - } - - p, err := newPageFromMeta( - n, - parentbBucket, - m, - &pageMeta{ - s: s, - kind: kind, - sections: sections, - }) - if err != nil { - panic(err) - } - - return p -} - func (s *Site) shouldBuild(p page.Page) bool { + if !s.conf.IsKindEnabled(p.Kind()) { + return false + } return shouldBuild(s.Conf.BuildFuture(), s.Conf.BuildExpired(), s.Conf.BuildDrafts(), p.Draft(), p.PublishDate(), p.ExpiryDate()) } func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool, - publishDate time.Time, expiryDate time.Time) bool { + publishDate time.Time, expiryDate time.Time, +) bool { if !(buildDrafts || !Draft) { return false } @@ -1217,3 +868,38 @@ func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bo } return true } + +func (s *Site) render(ctx *siteRenderContext) (err error) { + if err := page.Clear(); err != nil { + return err + } + + if ctx.outIdx == 0 { + // Note that even if disableAliases is set, the aliases themselves are + // preserved on page. The motivation with this is to be able to generate + // 301 redirects in a .htacess file and similar using a custom output format. + if !s.conf.DisableAliases { + // Aliases must be rendered before pages. + // Some sites, Hugo docs included, have faulty alias definitions that point + // to itself or another real page. These will be overwritten in the next + // step. + if err = s.renderAliases(); err != nil { + return + } + } + } + + if err = s.renderPages(ctx); err != nil { + return + } + + if !ctx.shouldRenderStandalonePage("") { + return + } + + if err = s.renderMainLanguageRedirect(); err != nil { + return + } + + return +} diff --git a/hugolib/site_benchmark_new_test.go b/hugolib/site_benchmark_new_test.go index 5f4d3f117..023d8e4d5 100644 --- a/hugolib/site_benchmark_new_test.go +++ b/hugolib/site_benchmark_new_test.go @@ -420,11 +420,11 @@ baseURL = "https://example.com" createContent := func(dir, name string) { var content string if strings.Contains(name, "_index") { - content = pageContent(1) + // Empty } else { content = pageContentWithCategory(1, fmt.Sprintf("category%d", r.Intn(5)+1)) - sb.WithContent(filepath.Join("content", dir, name), content) } + sb.WithContent(filepath.Join("content", dir, name), content) } for level := 1; level <= r.Intn(5)+1; level++ { @@ -454,6 +454,9 @@ baseURL = "https://example.com" func TestBenchmarkSite(b *testing.T) { benchmarks := getBenchmarkSiteTestCases() for _, bm := range benchmarks { + if bm.name != "Deep content tree" { + continue + } b.Run(bm.name, func(b *testing.T) { s := bm.create(b) @@ -478,13 +481,13 @@ title: %s Edited!!`, p.Title())) - counters := &testCounters{} + counters := &buildCounters{} b.Build(BuildCfg{testCounters: counters}) // We currently rebuild all the language versions of the same content file. // We could probably optimize that case, but it's not trivial. - b.Assert(int(counters.contentRenderCounter), qt.Equals, 4) + b.Assert(int(counters.contentRenderCounter.Load()), qt.Equals, 4) b.AssertFileContent("public"+p.RelPermalink()+"index.html", "Edited!!") } @@ -534,7 +537,7 @@ func BenchmarkSiteNew(b *testing.B) { panic("infinite loop") } p = pages[rnd.Intn(len(pages))] - if !p.File().IsZero() { + if p.File() != nil { break } } diff --git a/hugolib/site_new.go b/hugolib/site_new.go index da9d19f21..ddf45c286 100644 --- a/hugolib/site_new.go +++ b/hugolib/site_new.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -22,8 +22,8 @@ import ( "sort" "time" - radix "github.com/armon/go-radix" "github.com/bep/logg" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/maps" @@ -31,6 +31,7 @@ import ( "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs/i18n" @@ -39,9 +40,9 @@ import ( "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/publisher" - "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page/pagemeta" + "github.com/gohugoio/hugo/resources/page/siteidentities" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" @@ -50,8 +51,10 @@ import ( var _ page.Site = (*Site)(nil) type Site struct { - conf *allconfig.Config - language *langs.Language + conf *allconfig.Config + language *langs.Language + languagei int + pageMap *pageMap // The owning container. h *HugoSites @@ -59,12 +62,10 @@ type Site struct { *deps.Deps // Page navigation. - *PageCollections + *pageFinder taxonomies page.TaxonomyList menus navigation.Menus - siteBucket *pagesMapBucket - // Shortcut to the home page. Note that this may be nil if // home page, for some odd reason, is disabled. home *pageState @@ -93,7 +94,7 @@ type Site struct { func (s *Site) Debug() { fmt.Println("Debugging site", s.Lang(), "=>") - fmt.Println(s.pageMap.testDump()) + // fmt.Println(s.pageMap.testDump()) } // NewHugoSites creates HugoSites from the given config. @@ -127,10 +128,13 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { logger = loggers.New(logOpts) } + memCache := dynacache.New(dynacache.Options{Running: conf.Running(), Log: logger}) + firstSiteDeps := &deps.Deps{ Fs: cfg.Fs, Log: logger, Conf: conf, + MemCache: memCache, TemplateProvider: tplimpl.DefaultTemplateProvider, TranslationProvider: i18n.NewTranslationProvider(), } @@ -142,14 +146,40 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { confm := cfg.Configs var sites []*Site + ns := &contentNodeShifter{ + numLanguages: len(confm.Languages), + } + + treeConfig := doctree.Config[contentNodeI]{ + Shifter: ns, + } + + pageTrees := &pageTrees{ + treePages: doctree.New( + treeConfig, + ), + treeResources: doctree.New( + treeConfig, + ), + treeTaxonomyEntries: doctree.NewTreeShiftTree[*weightedContentNode](doctree.DimensionLanguage.Index(), len(confm.Languages)), + } + + pageTrees.treePagesResources = doctree.WalkableTrees[contentNodeI]{ + pageTrees.treePages, + pageTrees.treeResources, + } + + pageTrees.resourceTrees = doctree.MutableTrees{ + pageTrees.treeResources, + } + for i, confp := range confm.ConfigLangs() { language := confp.Language() - if confp.IsLangDisabled(language.Lang) { + if language.Disabled { continue } k := language.Lang conf := confm.LanguageConfigMap[k] - frontmatterHandler, err := pagemeta.NewFrontmatterHandler(firstSiteDeps.Log, conf.Frontmatter) if err != nil { return nil, err @@ -158,11 +188,9 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { langs.SetParams(language, conf.Params) s := &Site{ - conf: conf, - language: language, - siteBucket: &pagesMapBucket{ - cascade: conf.Cascade.Config, - }, + conf: conf, + language: language, + languagei: i, frontmatterHandler: frontmatterHandler, } @@ -177,20 +205,9 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { s.Deps = d } - // Site deps start. - var taxonomiesConfig taxonomiesConfig = conf.Taxonomies - pm := &pageMap{ - contentMap: newContentMap(contentMapConfig{ - lang: k, - taxonomyConfig: taxonomiesConfig.Values(), - taxonomyDisabled: !conf.IsKindEnabled(kinds.KindTerm), - taxonomyTermDisabled: !conf.IsKindEnabled(kinds.KindTaxonomy), - pageDisabled: !conf.IsKindEnabled(kinds.KindPage), - }), - s: s, - } + s.pageMap = newPageMap(i, s, memCache, pageTrees) - s.PageCollections = newPageCollections(pm) + s.pageFinder = newPageFinder(s.pageMap) s.siteRefLinker, err = newSiteRefLinker(s) if err != nil { return nil, err @@ -217,17 +234,26 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { return nil, errors.New("no sites to build") } - // Sort the sites by language weight (if set) or lang. + // Pull the default content language to the top, then sort the sites by language weight (if set) or lang. + defaultContentLanguage := confm.Base.DefaultContentLanguage sort.Slice(sites, func(i, j int) bool { li := sites[i].language lj := sites[j].language + if li.Lang == defaultContentLanguage { + return true + } + + if lj.Lang == defaultContentLanguage { + return false + } + if li.Weight != lj.Weight { return li.Weight < lj.Weight } return li.Lang < lj.Lang }) - h, err := newHugoSitesNew(cfg, firstSiteDeps, sites) + h, err := newHugoSites(cfg, firstSiteDeps, pageTrees, sites) if err == nil && h == nil { panic("hugo: newHugoSitesNew returned nil error and nil HugoSites") } @@ -235,29 +261,33 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { return h, err } -func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, error) { +func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) { numWorkers := config.GetNumWorkerMultiplier() - if numWorkers > len(sites) { - numWorkers = len(sites) - } - var workers *para.Workers - if numWorkers > 1 { - workers = para.New(numWorkers) + numWorkersSite := numWorkers + if numWorkersSite > len(sites) { + numWorkersSite = len(sites) } + workersSite := para.New(numWorkersSite) h := &HugoSites{ - Sites: sites, - Deps: sites[0].Deps, - Configs: cfg.Configs, - workers: workers, - numWorkers: numWorkers, + Sites: sites, + Deps: sites[0].Deps, + Configs: cfg.Configs, + workersSite: workersSite, + numWorkersSites: numWorkers, + numWorkers: numWorkers, + pageTrees: pageTrees, + cachePages: dynacache.GetOrCreatePartition[string, + page.Pages](d.MemCache, "/pags/all", + dynacache.OptionsPartition{Weight: 10, ClearWhen: dynacache.ClearOnRebuild}, + ), + translationKeyPages: maps.NewSliceCache[page.Page](), currentSite: sites[0], skipRebuildForFilenames: make(map[string]bool), init: &hugoSitesInit{ - data: lazy.New(), - layouts: lazy.New(), - gitInfo: lazy.New(), - translations: lazy.New(), + data: lazy.New(), + layouts: lazy.New(), + gitInfo: lazy.New(), }, } @@ -304,18 +334,8 @@ func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, donec: make(chan bool), } - // Only needed in server mode. - if cfg.Configs.Base.Internal.Watch { - h.ContentChanges = &contentChangeMap{ - pathSpec: h.PathSpec, - symContent: make(map[string]map[string]bool), - leafBundles: radix.New(), - branchBundles: make(map[string]bool), - } - } - h.init.data.Add(func(context.Context) (any, error) { - err := h.loadData(h.PathSpec.BaseFs.Data.Dirs) + err := h.loadData() if err != nil { return nil, fmt.Errorf("failed to load data: %w", err) } @@ -331,15 +351,6 @@ func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, return nil, nil }) - h.init.translations.Add(func(context.Context) (any, error) { - if len(h.Sites) > 1 { - allTranslations := pagesToTranslationsMap(h.Sites) - assignTranslationsToPages(allTranslations, h.Sites) - } - - return nil, nil - }) - h.init.gitInfo.Add(func(context.Context) (any, error) { err := h.loadGitInfo() if err != nil { @@ -422,10 +433,16 @@ func (s *Site) BaseURL() string { } // Returns the last modification date of the content. +// Deprecated: Use .Lastmod instead. func (s *Site) LastChange() time.Time { return s.lastmod } +// Returns the last modification date of the content. +func (s *Site) Lastmod() time.Time { + return s.lastmod +} + // Returns the Params configured for this site. func (s *Site) Params() maps.Params { return s.conf.Params @@ -480,12 +497,55 @@ func (s *Site) LanguagePrefix() string { return "/" + prefix } -// Returns the identity of this site. -// This is for internal use only. -func (s *Site) GetIdentity() identity.Identity { - return identity.KeyValueIdentity{Key: "site", Value: s.Lang()} -} - func (s *Site) Site() page.Site { return page.WrapSite(s) } + +func (s *Site) ForEeachIdentityByName(name string, f func(identity.Identity) bool) { + if id, found := siteidentities.FromString(name); found { + if f(id) { + return + } + } +} + +// Pages returns all pages. +// This is for the current language only. +func (s *Site) Pages() page.Pages { + return s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: "", + KeyPart: "global", + Include: pagePredicates.ShouldListGlobal, + }, + Recursive: true, + IncludeSelf: true, + }, + ) +} + +// RegularPages returns all the regular pages. +// This is for the current language only. +func (s *Site) RegularPages() page.Pages { + return s.pageMap.getPagesInSection( + pageMapQueryPagesInSection{ + pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ + Path: "", + KeyPart: "global", + Include: pagePredicates.ShouldListGlobal.And(pagePredicates.KindPage), + }, + Recursive: true, + }, + ) +} + +// AllPages returns all pages for all sites. +func (s *Site) AllPages() page.Pages { + return s.h.Pages() +} + +// AllRegularPages returns all regular pages for all sites. +func (s *Site) AllRegularPages() page.Pages { + return s.h.RegularPages() +} diff --git a/hugolib/site_output.go b/hugolib/site_output.go index d6f55cbdd..2744c0133 100644 --- a/hugolib/site_output.go +++ b/hugolib/site_output.go @@ -42,7 +42,7 @@ func createDefaultOutputFormats(allFormats output.Formats) map[string]output.For // Below are for consistency. They are currently not used during rendering. kinds.KindSitemap: {sitemapOut}, kinds.KindRobotsTXT: {robotsOut}, - kinds.Kind404: {htmlOut}, + kinds.KindStatus404: {htmlOut}, } // May be disabled diff --git a/hugolib/site_output_test.go b/hugolib/site_output_test.go index c2a14c3eb..9bcb13ea4 100644 --- a/hugolib/site_output_test.go +++ b/hugolib/site_output_test.go @@ -142,7 +142,7 @@ Len Pages: {{ .Kind }} {{ len .Site.RegularPages }} Page Number: {{ .Paginator.P s := b.H.Sites[0] b.Assert(s.language.Lang, qt.Equals, "en") - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) b.Assert(home, qt.Not(qt.IsNil)) @@ -314,7 +314,7 @@ baseName = "customdelimbase" th.assertFileContent("public/nosuffixbase", "no suffix") th.assertFileContent("public/customdelimbase_del", "custom delim") - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) c.Assert(home, qt.Not(qt.IsNil)) outputs := home.OutputFormats() @@ -383,7 +383,7 @@ func TestCreateSiteOutputFormats(t *testing.T) { c.Assert(outputs[kinds.KindRSS], deepEqualsOutputFormats, output.Formats{output.RSSFormat}) c.Assert(outputs[kinds.KindSitemap], deepEqualsOutputFormats, output.Formats{output.SitemapFormat}) c.Assert(outputs[kinds.KindRobotsTXT], deepEqualsOutputFormats, output.Formats{output.RobotsTxtFormat}) - c.Assert(outputs[kinds.Kind404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat}) + c.Assert(outputs[kinds.KindStatus404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat}) }) // Issue #4528 diff --git a/hugolib/site_render.go b/hugolib/site_render.go index 43371b44d..379dd6e86 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -20,23 +20,21 @@ import ( "strings" "sync" - "github.com/gohugoio/hugo/output/layouts" + "github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/tpl" - "errors" - - "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/page/pagemeta" ) type siteRenderContext struct { cfg *BuildCfg + // languageIdx is the zero based index of the site. + languageIdx int + // Zero based index for all output formats combined. sitesOutIdx int @@ -47,20 +45,24 @@ type siteRenderContext struct { multihost bool } -// Whether to render 404.html, robotsTXT.txt which usually is rendered -// once only in the site root. -func (s siteRenderContext) renderSingletonPages() bool { - if s.multihost { +// Whether to render 404.html, robotsTXT.txt and similar. +// These are useually rendered once in the root of public. +func (s siteRenderContext) shouldRenderStandalonePage(kind string) bool { + if s.multihost || kind == kinds.KindSitemap { // 1 per site return s.outIdx == 0 } - // 1 for all sites - return s.sitesOutIdx == 0 + if kind == kinds.KindStatus404 { + // 1 for all output formats + return s.outIdx == 0 + } + + // 1 for all sites and output formats. + return s.languageIdx == 0 && s.outIdx == 0 } -// renderPages renders pages each corresponding to a markdown file. -// TODO(bep np doc +// renderPages renders pages concurrently. func (s *Site) renderPages(ctx *siteRenderContext) error { numWorkers := config.GetNumWorkerMultiplier() @@ -79,18 +81,26 @@ func (s *Site) renderPages(ctx *siteRenderContext) error { cfg := ctx.cfg - s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool { - - if cfg.shouldRender(n.p) { - select { - case <-s.h.Done(): - return true - default: - pages <- n.p + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + if p, ok := n.(*pageState); ok { + if cfg.shouldRender(p) { + select { + case <-s.h.Done(): + return true, nil + default: + pages <- p + } + } } - } - return false - }) + return false, nil + }, + } + + if err := w.Walk(context.Background()); err != nil { + return err + } close(pages) @@ -110,10 +120,15 @@ func pageRenderer( s *Site, pages <-chan *pageState, results chan<- error, - wg *sync.WaitGroup) { + wg *sync.WaitGroup, +) { defer wg.Done() for p := range pages { + if p.m.isStandalone() && !ctx.shouldRenderStandalonePage(p.Kind()) { + continue + } + if p.m.buildConfig.PublishResources { if err := p.renderResources(); err != nil { s.SendError(p.errorf(err, "failed to render page resources")) @@ -133,13 +148,33 @@ func pageRenderer( } if !found { - s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name) + s.Log.Trace( + func() string { + return fmt.Sprintf("no layout for kind %q found", p.Kind()) + }, + ) + // Don't emit warning for missing 404 etc. pages. + if !p.m.isStandalone() { + s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name) + } continue } targetPath := p.targetPaths().TargetFilename - if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, templ); err != nil { + s.Log.Trace( + func() string { + return fmt.Sprintf("rendering outputFormat %q kind %q using layout %q to %q", p.pageOutput.f.Name, p.Kind(), templ.Name(), targetPath) + }, + ) + + var d any = p + switch p.Kind() { + case kinds.KindSitemapIndex: + d = s.h.Sites + } + + if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, d, templ); err != nil { results <- err } @@ -205,6 +240,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error { if err := s.writeDestAlias(targetPaths.TargetFilename, p.Permalink(), f, p); err != nil { return err } + } // Render pages for the rest @@ -217,7 +253,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error { if err := s.renderAndWritePage( &s.PathSpec.ProcessingStats.PaginatorPages, p.Title(), - targetPaths.TargetFilename, p, templ); err != nil { + targetPaths.TargetFilename, p, p, templ); err != nil { return err } @@ -226,160 +262,72 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error { return nil } -func (s *Site) render404() error { - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.Kind404, - urlPaths: pagemeta.URLPath{ - URL: "404.html", - }, - }, - output.HTMLFormat, - ) - if err != nil { - return err - } - - if !p.render { - return nil - } - - var d layouts.LayoutDescriptor - d.Kind = kinds.Kind404 - - templ, found, err := s.Tmpl().LookupLayout(d, output.HTMLFormat) - if err != nil { - return err - } - if !found { - return nil - } - - targetPath := p.targetPaths().TargetFilename - - if targetPath == "" { - return errors.New("failed to create targetPath for 404 page") - } - - return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, p, templ) -} - -func (s *Site) renderSitemap() error { - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.KindSitemap, - urlPaths: pagemeta.URLPath{ - URL: s.conf.Sitemap.Filename, - }, - }, - output.HTMLFormat, - ) - if err != nil { - return err - } - - if !p.render { - return nil - } - - targetPath := p.targetPaths().TargetFilename - ctx := tpl.SetPageInContext(context.Background(), p) - - if targetPath == "" { - return errors.New("failed to create targetPath for sitemap") - } - - templ := s.lookupLayouts("sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml") - - return s.renderAndWriteXML(ctx, &s.PathSpec.ProcessingStats.Sitemaps, "sitemap", targetPath, p, templ) -} - -func (s *Site) renderRobotsTXT() error { - if !s.conf.EnableRobotsTXT && s.isEnabled(kinds.KindRobotsTXT) { - return nil - } - - p, err := newPageStandalone(&pageMeta{ - s: s, - kind: kinds.KindRobotsTXT, - urlPaths: pagemeta.URLPath{ - URL: "robots.txt", - }, - }, - output.RobotsTxtFormat) - if err != nil { - return err - } - - if !p.render { - return nil - } - - templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt") - - return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, templ) -} - // renderAliases renders shell pages that simply have a redirect in the header. func (s *Site) renderAliases() error { - var err error - s.pageMap.pageTrees.WalkLinkable(func(ss string, n *contentNode) bool { - p := n.p - if len(p.Aliases()) == 0 { - return false - } - - pathSeen := make(map[string]bool) + w := &doctree.NodeShiftTreeWalker[contentNodeI]{ + Tree: s.pageMap.treePages, + Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { + p := n.(*pageState) + + // We cannot alias a page that's not rendered. + if p.m.noLink() { + return false, nil + } - for _, of := range p.OutputFormats() { - if !of.Format.IsHTML { - continue + if len(p.Aliases()) == 0 { + return false, nil } - f := of.Format + pathSeen := make(map[string]bool) + for _, of := range p.OutputFormats() { + if !of.Format.IsHTML { + continue + } - if pathSeen[f.Path] { - continue - } - pathSeen[f.Path] = true + f := of.Format - plink := of.Permalink() + if pathSeen[f.Path] { + continue + } + pathSeen[f.Path] = true - for _, a := range p.Aliases() { - isRelative := !strings.HasPrefix(a, "/") + plink := of.Permalink() - if isRelative { - // Make alias relative, where "." will be on the - // same directory level as the current page. - basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..") - a = path.Join(basePath, a) + for _, a := range p.Aliases() { + isRelative := !strings.HasPrefix(a, "/") - } else { - // Make sure AMP and similar doesn't clash with regular aliases. - a = path.Join(f.Path, a) - } + if isRelative { + // Make alias relative, where "." will be on the + // same directory level as the current page. + basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..") + a = path.Join(basePath, a) - if s.conf.C.IsUglyURLSection(p.Section()) && !strings.HasSuffix(a, ".html") { - a += ".html" - } + } else { + // Make sure AMP and similar doesn't clash with regular aliases. + a = path.Join(f.Path, a) + } - lang := p.Language().Lang + if s.conf.C.IsUglyURLSection(p.Section()) && !strings.HasSuffix(a, ".html") { + a += ".html" + } - if s.h.Configs.IsMultihost && !strings.HasPrefix(a, "/"+lang) { - // These need to be in its language root. - a = path.Join(lang, a) - } + lang := p.Language().Lang - err = s.writeDestAlias(a, plink, f, p) - if err != nil { - return true + if s.h.Configs.IsMultihost && !strings.HasPrefix(a, "/"+lang) { + // These need to be in its language root. + a = path.Join(lang, a) + } + + err := s.writeDestAlias(a, plink, f, p) + if err != nil { + return true, err + } } } - } - return false - }) - - return err + return false, nil + }, + } + return w.Walk(context.TODO()) } // renderMainLanguageRedirect creates a redirect to the main language home, diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go index f5cb41d28..4d4ff965b 100644 --- a/hugolib/site_sections_test.go +++ b/hugolib/site_sections_test.go @@ -21,6 +21,7 @@ import ( qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/htesting" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) @@ -31,8 +32,10 @@ func TestNestedSections(t *testing.T) { cfg, fs = newTestCfg() ) + tt := htesting.NewPinnedRunner(c, "") + cfg.Set("permalinks", map[string]string{ - "perm a": ":sections/:title", + "perm-a": ":sections/:title", }) pageTemplate := `--- @@ -127,7 +130,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"elsewhere", func(c *qt.C, p page.Page) { c.Assert(len(p.Pages()), qt.Equals, 1) for _, p := range p.Pages() { - c.Assert(p.SectionsPath(), qt.Equals, "elsewhere") + c.Assert(p.SectionsPath(), qt.Equals, "/elsewhere") } }}, {"post", func(c *qt.C, p page.Page) { @@ -179,8 +182,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} c.Assert(home.IsHome(), qt.Equals, true) c.Assert(len(p.Sections()), qt.Equals, 0) c.Assert(home.CurrentSection(), qt.Equals, home) - active, err := home.InSection(home) - c.Assert(err, qt.IsNil) + active := home.InSection(home) c.Assert(active, qt.Equals, true) c.Assert(p.FirstSection(), qt.Equals, p) c.Assert(len(p.Ancestors()), qt.Equals, 1) @@ -208,29 +210,22 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} } c.Assert(child.CurrentSection(), qt.Equals, p) - active, err := child.InSection(p) - c.Assert(err, qt.IsNil) + active := child.InSection(p) c.Assert(active, qt.Equals, true) - active, err = p.InSection(child) - c.Assert(err, qt.IsNil) + active = p.InSection(child) c.Assert(active, qt.Equals, true) - active, err = p.InSection(getPage(p, "/")) - c.Assert(err, qt.IsNil) + active = p.InSection(getPage(p, "/")) c.Assert(active, qt.Equals, false) - isAncestor, err := p.IsAncestor(child) - c.Assert(err, qt.IsNil) + isAncestor := p.IsAncestor(child) c.Assert(isAncestor, qt.Equals, true) - isAncestor, err = child.IsAncestor(p) - c.Assert(err, qt.IsNil) + isAncestor = child.IsAncestor(p) c.Assert(isAncestor, qt.Equals, false) - isDescendant, err := p.IsDescendant(child) - c.Assert(err, qt.IsNil) + isDescendant := p.IsDescendant(child) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = child.IsDescendant(p) - c.Assert(err, qt.IsNil) + isDescendant = child.IsDescendant(p) c.Assert(isDescendant, qt.Equals, true) } @@ -254,32 +249,26 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} c.Assert(len(p.Ancestors()), qt.Equals, 3) l1 := getPage(p, "/l1") - isDescendant, err := l1.IsDescendant(p) - c.Assert(err, qt.IsNil) + isDescendant := l1.IsDescendant(p) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = l1.IsDescendant(nil) - c.Assert(err, qt.IsNil) + isDescendant = l1.IsDescendant(nil) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = nilp.IsDescendant(p) - c.Assert(err, qt.IsNil) + isDescendant = nilp.IsDescendant(p) c.Assert(isDescendant, qt.Equals, false) - isDescendant, err = p.IsDescendant(l1) - c.Assert(err, qt.IsNil) + isDescendant = p.IsDescendant(l1) c.Assert(isDescendant, qt.Equals, true) - isAncestor, err := l1.IsAncestor(p) - c.Assert(err, qt.IsNil) + isAncestor := l1.IsAncestor(p) c.Assert(isAncestor, qt.Equals, true) - isAncestor, err = p.IsAncestor(l1) - c.Assert(err, qt.IsNil) + isAncestor = p.IsAncestor(l1) c.Assert(isAncestor, qt.Equals, false) c.Assert(p.FirstSection(), qt.Equals, l1) - isAncestor, err = p.IsAncestor(nil) - c.Assert(err, qt.IsNil) + isAncestor = p.IsAncestor(nil) c.Assert(isAncestor, qt.Equals, false) - isAncestor, err = nilp.IsAncestor(l1) - c.Assert(err, qt.IsNil) c.Assert(isAncestor, qt.Equals, false) + + l3 := getPage(p, "/l1/l2/l3") + c.Assert(l3.FirstSection(), qt.Equals, l1) }}, {"perm a,link", func(c *qt.C, p page.Page) { c.Assert(p.Title(), qt.Equals, "T9_-1") @@ -294,15 +283,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} }}, } - home := s.getPage(kinds.KindHome) + home := s.getPageOldVersion(kinds.KindHome) for _, test := range tests { test := test - t.Run(fmt.Sprintf("sections %s", test.sections), func(t *testing.T) { - t.Parallel() - c := qt.New(t) + tt.Run(fmt.Sprintf("sections %s", test.sections), func(c *qt.C) { + c.Parallel() sections := strings.Split(test.sections, ",") - p := s.getPage(kinds.KindSection, sections...) + p := s.getPageOldVersion(kinds.KindSection, sections...) c.Assert(p, qt.Not(qt.IsNil), qt.Commentf(fmt.Sprint(sections))) if p.Pages() != nil { @@ -319,19 +307,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} c.Assert(len(home.Sections()), qt.Equals, 9) c.Assert(s.Sections(), deepEqualsPages, home.Sections()) - rootPage := s.getPage(kinds.KindPage, "mypage.md") + rootPage := s.getPageOldVersion(kinds.KindPage, "mypage.md") c.Assert(rootPage, qt.Not(qt.IsNil)) c.Assert(rootPage.Parent().IsHome(), qt.Equals, true) // https://github.com/gohugoio/hugo/issues/6365 c.Assert(rootPage.Sections(), qt.HasLen, 0) - // Add a odd test for this as this looks a little bit off, but I'm not in the mood - // to think too hard a out this right now. It works, but people will have to spell - // out the directory name as is. - // If we later decide to do something about this, we will have to do some normalization in - // getPage. - // TODO(bep) - sectionWithSpace := s.getPage(kinds.KindSection, "Spaces in Section") + sectionWithSpace := s.getPageOldVersion(kinds.KindSection, "Spaces in Section") + // s.h.pageTrees.debugPrint() c.Assert(sectionWithSpace, qt.Not(qt.IsNil)) c.Assert(sectionWithSpace.RelPermalink(), qt.Equals, "/spaces-in-section/") @@ -381,3 +364,37 @@ Next: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}| b.AssertFileContent("public/blog/cool/cool2/index.html", "Prev: |", "Next: /blog/cool/cool1/|") } + +func TestSectionEntries(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +-- content/myfirstsection/p1.md -- +--- +title: "P1" +--- +P1 +-- content/a/b/c/_index.md -- +--- +title: "C" +--- +C +-- content/a/b/c/mybundle/index.md -- +--- +title: "My Bundle" +--- +-- layouts/_default/list.html -- +Kind: {{ .Kind }}|RelPermalink: {{ .RelPermalink }}|SectionsPath: {{ .SectionsPath }}|SectionsEntries: {{ .SectionsEntries }}|Len: {{ len .SectionsEntries }}| +-- layouts/_default/single.html -- +Kind: {{ .Kind }}|RelPermalink: {{ .RelPermalink }}|SectionsPath: {{ .SectionsPath }}|SectionsEntries: {{ .SectionsEntries }}|Len: {{ len .SectionsEntries }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/myfirstsection/p1/index.html", "RelPermalink: /myfirstsection/p1/|SectionsPath: /myfirstsection|SectionsEntries: [myfirstsection]|Len: 1") + b.AssertFileContent("public/a/b/c/index.html", "RelPermalink: /a/b/c/|SectionsPath: /a/b/c|SectionsEntries: [a b c]|Len: 3") + b.AssertFileContent("public/a/b/c/mybundle/index.html", "Kind: page|RelPermalink: /a/b/c/mybundle/|SectionsPath: /a/b/c|SectionsEntries: [a b c]|Len: 3") + b.AssertFileContent("public/index.html", "Kind: home|RelPermalink: /|SectionsPath: /|SectionsEntries: []|Len: 0") +} diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go index 4ed6411d2..167194ef5 100644 --- a/hugolib/site_stats_test.go +++ b/hugolib/site_stats_test.go @@ -94,5 +94,38 @@ aliases: [/Ali%d] helpers.ProcessingStatsTable(&buff, stats...) - c.Assert(buff.String(), qt.Contains, "Pages | 19 | 6") + c.Assert(buff.String(), qt.Contains, "Pages | 21 | 7") +} + +func TestSiteLastmod(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +-- content/_index.md -- +--- +date: 2023-01-01 +--- +-- content/posts/_index.md -- +--- +date: 2023-02-01 +--- +-- content/posts/post-1.md -- +--- +date: 2023-03-01 +--- +-- content/posts/post-2.md -- +--- +date: 2023-04-01 +--- +-- layouts/index.html -- +site.Lastmod: {{ .Site.Lastmod.Format "2006-01-02" }} +site.LastChange: {{ .Site.LastChange.Format "2006-01-02" }} +home.Lastmod: {{ site.Home.Lastmod.Format "2006-01-02" }} + +` + b := Test(t, files) + + b.AssertFileContent("public/index.html", "site.Lastmod: 2023-04-01\nsite.LastChange: 2023-04-01\nhome.Lastmod: 2023-01-01") } diff --git a/hugolib/site_test.go b/hugolib/site_test.go index 57c6bbabe..cf0d4a032 100644 --- a/hugolib/site_test.go +++ b/hugolib/site_test.go @@ -419,7 +419,6 @@ Main section page: {{ .RelPermalink }} } func TestMainSectionsMoveToSite(t *testing.T) { - t.Run("defined in params", func(t *testing.T) { t.Parallel() @@ -510,7 +509,6 @@ MainSections Params: [mysect]| MainSections Site method: [mysect]| `) }) - } // Issue #1176 @@ -718,7 +716,7 @@ func TestOrderedPages(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) - if s.getPage(kinds.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPage(kinds.KindSection, "sect").Pages()[2].Title() != "Four" { + if s.getPageOldVersion(kinds.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPageOldVersion(kinds.KindSection, "sect").Pages()[2].Title() != "Four" { t.Error("Pages in unexpected order.") } @@ -1011,7 +1009,7 @@ func TestRefLinking(t *testing.T) { t.Parallel() site := setupLinkingMockSite(t) - currentPage := site.getPage(kinds.KindPage, "level2/level3/start.md") + currentPage := site.getPageOldVersion(kinds.KindPage, "level2/level3/start.md") if currentPage == nil { t.Fatalf("failed to find current page in site") } @@ -1071,7 +1069,7 @@ func TestRefLinking(t *testing.T) { func checkLinkCase(site *Site, link string, currentPage page.Page, relative bool, outputFormat string, expected string, t *testing.T, i int) { t.Helper() if out, err := site.refLink(link, currentPage, relative, outputFormat); err != nil || out != expected { - t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Pathc(), expected, out, err) + t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Path(), expected, out, err) } } @@ -1199,7 +1197,7 @@ writeStats = true writeStats = false `) - b.AssertDestinationExists("hugo_stats.json", false) + b.AssertFileExists("public/hugo_stats.json", false) b = r(` [build.buildStats] @@ -1245,8 +1243,7 @@ disableclasses = true [build.buildStats] enable = false `) - b.AssertDestinationExists("hugo_stats.json", false) - + b.AssertFileExists("public/hugo_stats.json", false) } func TestClassCollectorStress(t *testing.T) { diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go index fd15eb5d3..2cc532854 100644 --- a/hugolib/site_url_test.go +++ b/hugolib/site_url_test.go @@ -23,20 +23,6 @@ import ( "github.com/gohugoio/hugo/resources/kinds" ) -const slugDoc1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\naliases:\n - /sd1/foo/\n - /sd2\n - /sd3/\n - /sd4.html\n---\nslug doc 1 content\n" - -const slugDoc2 = `--- -title: slug doc 2 -slug: slug-doc-2 ---- -slug doc 2 content -` - -var urlFakeSource = [][2]string{ - {filepath.FromSlash("content/blue/doc1.md"), slugDoc1}, - {filepath.FromSlash("content/blue/doc2.md"), slugDoc2}, -} - func TestUglyURLsPerSection(t *testing.T) { t.Parallel() @@ -67,12 +53,12 @@ Do not go gentle into that good night. c.Assert(len(s.RegularPages()), qt.Equals, 2) - notUgly := s.getPage(kinds.KindPage, "sect1/p1.md") + notUgly := s.getPageOldVersion(kinds.KindPage, "sect1/p1.md") c.Assert(notUgly, qt.Not(qt.IsNil)) c.Assert(notUgly.Section(), qt.Equals, "sect1") c.Assert(notUgly.RelPermalink(), qt.Equals, "/sect1/p1/") - ugly := s.getPage(kinds.KindPage, "sect2/p2.md") + ugly := s.getPageOldVersion(kinds.KindPage, "sect2/p2.md") c.Assert(ugly, qt.Not(qt.IsNil)) c.Assert(ugly.Section(), qt.Equals, "sect2") c.Assert(ugly.RelPermalink(), qt.Equals, "/sect2/p2.html") @@ -124,7 +110,7 @@ Do not go gentle into that good night. c.Assert(len(s.RegularPages()), qt.Equals, 10) - sect1 := s.getPage(kinds.KindSection, "sect1") + sect1 := s.getPageOldVersion(kinds.KindSection, "sect1") c.Assert(sect1, qt.Not(qt.IsNil)) c.Assert(sect1.RelPermalink(), qt.Equals, "/ss1/") th.assertFileContent(filepath.Join("public", "ss1", "index.html"), "P1|URL: /ss1/|Next: /ss1/page/2/") diff --git a/hugolib/sitemap_test.go b/hugolib/sitemap_test.go index aae874d50..be13ba1f4 100644 --- a/hugolib/sitemap_test.go +++ b/hugolib/sitemap_test.go @@ -17,67 +17,107 @@ import ( "reflect" "testing" - qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/deps" ) -const sitemapTemplate = `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> - {{ range .Data.Pages }} - {{- if .Permalink -}} - <url> - <loc>{{ .Permalink }}</loc>{{ if not .Lastmod.IsZero }} - <lastmod>{{ safeHTML ( .Lastmod.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }} - <changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }} - <priority>{{ .Sitemap.Priority }}</priority>{{ end }} - </url> - {{- end -}} - {{ end }} -</urlset>` - -func TestSitemapOutput(t *testing.T) { +func TestSitemapBasic(t *testing.T) { t.Parallel() - for _, internal := range []bool{false, true} { - doTestSitemapOutput(t, internal) - } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +-- content/sect/doc1.md -- +--- +title: doc1 +--- +Doc1 +-- content/sect/doc2.md -- +--- +title: doc2 +--- +Doc2 +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() + + b.AssertFileContent("public/sitemap.xml", " <loc>https://example.com/sect/doc1/</loc>", "doc2") } -func doTestSitemapOutput(t *testing.T, internal bool) { - c := qt.New(t) - cfg, fs := newTestCfg() - cfg.Set("baseURL", "http://auth/bub/") - cfg.Set("defaultContentLanguageInSubdir", false) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) - writeSource(t, fs, "layouts/sitemap.xml", sitemapTemplate) - // We want to check that the 404 page is not included in the sitemap - // output. This template should have no effect either way, but include - // it for the clarity. - writeSource(t, fs, "layouts/404.html", "Not found") - - depsCfg := deps.DepsCfg{Fs: fs, Configs: configs} - - writeSourcesToSource(t, "content", fs, weightedSources...) - s := buildSingleSite(t, depsCfg, BuildCfg{}) - th := newTestHelper(s.conf, s.Fs, t) - outputSitemap := "public/sitemap.xml" - - th.assertFileContent(outputSitemap, - // Regular page - " <loc>http://auth/bub/sect/doc1/</loc>", - // Home page - "<loc>http://auth/bub/</loc>", - // Section - "<loc>http://auth/bub/sect/</loc>", - // Tax terms - "<loc>http://auth/bub/categories/</loc>", - // Tax list - "<loc>http://auth/bub/categories/hugo/</loc>", - ) - - content := readWorkingDir(th, th.Fs, outputSitemap) - c.Assert(content, qt.Not(qt.Contains), "404") - c.Assert(content, qt.Not(qt.Contains), "<loc></loc>") +func TestSitemapMultilingual(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +defaultContentLanguage = "en" +[languages] +[languages.en] +weight = 1 +languageName = "English" +[languages.nn] +weight = 2 +languageName = "Nynorsk" +-- content/sect/doc1.md -- +--- +title: doc1 +--- +Doc1 +-- content/sect/doc2.md -- +--- +title: doc2 +--- +Doc2 +-- content/sect/doc2.nn.md -- +--- +title: doc2 +--- +Doc2 +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() + + b.AssertFileContent("public/sitemap.xml", "<loc>https://example.com/en/sitemap.xml</loc>", "<loc>https://example.com/nn/sitemap.xml</loc>") + b.AssertFileContent("public/en/sitemap.xml", " <loc>https://example.com/sect/doc1/</loc>", "doc2") + b.AssertFileContent("public/nn/sitemap.xml", " <loc>https://example.com/nn/sect/doc2/</loc>") +} + +// https://github.com/gohugoio/hugo/issues/5910 +func TestSitemapOutputFormats(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +-- content/blog/html-amp.md -- +--- +Title: AMP and HTML +outputs: [ "html", "amp" ] +--- + +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + }, + ).Build() + + // Should link to the HTML version. + b.AssertFileContent("public/sitemap.xml", " <loc>https://example.com/blog/html-amp/</loc>") } func TestParseSitemap(t *testing.T) { @@ -98,21 +138,3 @@ func TestParseSitemap(t *testing.T) { t.Errorf("Got \n%v expected \n%v", result, expected) } } - -// https://github.com/gohugoio/hugo/issues/5910 -func TestSitemapOutputFormats(t *testing.T) { - b := newTestSitesBuilder(t).WithSimpleConfigFile() - - b.WithContent("blog/html-amp.md", ` ---- -Title: AMP and HTML -outputs: [ "html", "amp" ] ---- - -`) - - b.Build(BuildCfg{}) - - // Should link to the HTML version. - b.AssertFileContent("public/sitemap.xml", " <loc>http://example.com/blog/html-amp/</loc>") -} diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go index d4ded2058..3132cc485 100644 --- a/hugolib/taxonomy_test.go +++ b/hugolib/taxonomy_test.go @@ -38,6 +38,7 @@ func TestTaxonomiesCountOrder(t *testing.T) { cfg, fs := newTestCfg() + cfg.Set("titleCaseStyle", "none") cfg.Set("taxonomies", taxonomies) configs, err := loadTestConfigFromProvider(cfg) c.Assert(err, qt.IsNil) @@ -77,6 +78,7 @@ func TestTaxonomiesWithAndWithoutContentFile(t *testing.T) { func doTestTaxonomiesWithAndWithoutContentFile(t *testing.T, uglyURLs bool) { siteConfig := ` baseURL = "http://example.com/blog" +titleCaseStyle = "firstupper" uglyURLs = %t paginate = 1 defaultContentLanguage = "en" @@ -135,7 +137,7 @@ permalinkeds: } // 1. - b.AssertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "cAt1") + b.AssertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "CAt1") b.AssertFileContent(pathFunc("public/categories/index.html"), "Taxonomy Term Page", "Category Terms") // 2. @@ -168,7 +170,7 @@ permalinkeds: for taxonomy, count := range taxonomyTermPageCounts { msg := qt.Commentf(taxonomy) - term := s.getPage(kinds.KindTaxonomy, taxonomy) + term := s.getPageOldVersion(kinds.KindTaxonomy, taxonomy) b.Assert(term, qt.Not(qt.IsNil), msg) b.Assert(len(term.Pages()), qt.Equals, count, msg) @@ -177,7 +179,7 @@ permalinkeds: } } - cat1 := s.getPage(kinds.KindTerm, "categories", "cat1") + cat1 := s.getPageOldVersion(kinds.KindTerm, "categories", "cat1") b.Assert(cat1, qt.Not(qt.IsNil)) if uglyURLs { b.Assert(cat1.RelPermalink(), qt.Equals, "/blog/categories/cat1.html") @@ -185,8 +187,8 @@ permalinkeds: b.Assert(cat1.RelPermalink(), qt.Equals, "/blog/categories/cat1/") } - pl1 := s.getPage(kinds.KindTerm, "permalinkeds", "pl1") - permalinkeds := s.getPage(kinds.KindTaxonomy, "permalinkeds") + pl1 := s.getPageOldVersion(kinds.KindTerm, "permalinkeds", "pl1") + permalinkeds := s.getPageOldVersion(kinds.KindTaxonomy, "permalinkeds") b.Assert(pl1, qt.Not(qt.IsNil)) b.Assert(permalinkeds, qt.Not(qt.IsNil)) if uglyURLs { @@ -197,7 +199,7 @@ permalinkeds: b.Assert(permalinkeds.RelPermalink(), qt.Equals, "/blog/permalinkeds/") } - helloWorld := s.getPage(kinds.KindTerm, "others", "hello-hugo-world") + helloWorld := s.getPageOldVersion(kinds.KindTerm, "others", "hello-hugo-world") b.Assert(helloWorld, qt.Not(qt.IsNil)) b.Assert(helloWorld.Title(), qt.Equals, "Hello Hugo world") @@ -212,6 +214,7 @@ func TestTaxonomiesPathSeparation(t *testing.T) { config := ` baseURL = "https://example.com" +titleCaseStyle = "none" [taxonomies] "news/tag" = "news/tags" "news/category" = "news/categories" @@ -282,8 +285,8 @@ title: "This is S3s" b.AssertFileContent("public/t1/t2/t3s/t4/t5/index.html", "Taxonomy List Page 1|This is T5|Hello|https://example.com/t1/t2/t3s/t4/t5/|") b.AssertFileContent("public/t1/t2/t3s/t4/t5/t6/index.html", "Taxonomy List Page 1|t4/t5/t6|Hello|https://example.com/t1/t2/t3s/t4/t5/t6/|") - b.AssertFileContent("public/news/categories/index.html", "Taxonomy Term Page 1|News/Categories|Hello|https://example.com/news/categories/|") - b.AssertFileContent("public/t1/t2/t3s/index.html", "Taxonomy Term Page 1|T1/T2/T3s|Hello|https://example.com/t1/t2/t3s/|") + b.AssertFileContent("public/news/categories/index.html", "Taxonomy Term Page 1|categories|Hello|https://example.com/news/categories/|") + b.AssertFileContent("public/t1/t2/t3s/index.html", "Taxonomy Term Page 1|t3s|Hello|https://example.com/t1/t2/t3s/|") b.AssertFileContent("public/s1/s2/s3s/index.html", "Taxonomy Term Page 1|This is S3s|Hello|https://example.com/s1/s2/s3s/|") } @@ -326,8 +329,8 @@ Content. b.CreateSites().Build(BuildCfg{}) b.AssertFileContent("public/index.html", `<li><a href="http://example.com/tags/hugo-rocks/">Hugo Rocks!</a> 10</li>`) - b.AssertFileContent("public/categories/index.html", `<li><a href="http://example.com/categories/this-is-cool/">This is Cool</a> 10</li>`) - b.AssertFileContent("public/tags/index.html", `<li><a href="http://example.com/tags/rocks-i-say/">Rocks I say!</a> 10</li>`) + b.AssertFileContent("public/categories/index.html", `<li><a href="http://example.com/categories/this-is-cool/">This Is Cool</a> 10</li>`) + b.AssertFileContent("public/tags/index.html", `<li><a href="http://example.com/tags/rocks-i-say/">Rocks I Say!</a> 10</li>`) } // Issue 6213 @@ -355,15 +358,14 @@ categories: ["regular"] b.Assert(b.CheckExists("public/categories/regular/index.html"), qt.Equals, true) b.Assert(b.CheckExists("public/categories/drafts/index.html"), qt.Equals, false) - reg, _ := s.getPageNew(nil, "categories/regular") - dra, _ := s.getPageNew(nil, "categories/draft") + reg, _ := s.getPage(nil, "categories/regular") + dra, _ := s.getPage(nil, "categories/draft") b.Assert(reg, qt.Not(qt.IsNil)) b.Assert(dra, qt.IsNil) } func TestTaxonomiesIndexDraft(t *testing.T) { t.Parallel() - b := newTestSitesBuilder(t) b.WithContent( "categories/_index.md", `--- @@ -393,7 +395,7 @@ Content. b.Build(BuildCfg{}) b.AssertFileContentFn("public/index.html", func(s string) bool { - return !strings.Contains(s, "categories") + return !strings.Contains(s, "/categories/|") }) } @@ -468,36 +470,37 @@ categories: ["funny"] } func TestTaxonomiesRemoveOne(t *testing.T) { - b := newTestSitesBuilder(t).Running() - b.WithTemplates("index.html", ` - {{ $cats := .Site.Taxonomies.categories.cats }} - {{ if $cats }} - Len cats: {{ len $cats }} - {{ range $cats }} - Cats:|{{ .Page.RelPermalink }}| - {{ end }} - {{ end }} - {{ $funny := .Site.Taxonomies.categories.funny }} - {{ if $funny }} - Len funny: {{ len $funny }} - {{ range $funny }} - Funny:|{{ .Page.RelPermalink }}| - {{ end }} - {{ end }} - `) - - b.WithContent("p1.md", `--- + files := ` +-- hugo.toml -- +disableLiveReload = true +-- layouts/index.html -- +{{ $cats := .Site.Taxonomies.categories.cats }} +{{ if $cats }} +Len cats: {{ len $cats }} +{{ range $cats }} + Cats:|{{ .Page.RelPermalink }}| +{{ end }} +{{ end }} +{{ $funny := .Site.Taxonomies.categories.funny }} +{{ if $funny }} +Len funny: {{ len $funny }} +{{ range $funny }} + Funny:|{{ .Page.RelPermalink }}| +{{ end }} +{{ end }} +-- content/p1.md -- +--- title: Page categories: ["funny", "cats"] --- - `, "p2.md", `--- +-- content/p2.md -- +--- title: Page2 categories: ["funny", "cats"] --- - `, - ) - b.Build(BuildCfg{}) +` + b := TestRunning(t, files) b.AssertFileContent("public/index.html", ` Len cats: 2 @@ -514,7 +517,7 @@ categories: ["funny"] --- `) - b.Build(BuildCfg{}) + b.Build() b.AssertFileContent("public/index.html", ` Len cats: 1 @@ -660,6 +663,7 @@ func TestTaxonomiesDirectoryOverlaps(t *testing.T) { b.WithConfigFile("toml", ` baseURL = "https://example.org" +titleCaseStyle = "none" [taxonomies] abcdef = "abcdefs" @@ -688,18 +692,17 @@ abcdefgs: {{ template "print-page" $abcdefgs }}|IsAncestor: {{ $abcdefgs.IsAnces Page: /abcdefgh/|abcdefgh|section|Parent: /|CurrentSection: /abcdefgh/| Page: /abcdefgh/p1/|abcdefgh-p|page|Parent: /abcdefgh/|CurrentSection: /abcdefgh/| Page: /abcdefghijk/|abcdefghijk|page|Parent: /|CurrentSection: /| - Page: /abcdefghis/|Abcdefghis|taxonomy|Parent: /|CurrentSection: /| - Page: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /| - Page: /abcdefs/|Abcdefs|taxonomy|Parent: /|CurrentSection: /| - abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/| - abcdefgs: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /| - abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/|FirstSection: /|IsAncestor: false|IsDescendant: true - abcdefgs: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /|FirstSection: /|IsAncestor: true|IsDescendant: false + Page: /abcdefghis/|abcdefghis|taxonomy|Parent: /|CurrentSection: /abcdefghis/| + Page: /abcdefgs/|abcdefgs|taxonomy|Parent: /|CurrentSection: /abcdefgs/| + Page: /abcdefs/|abcdefs|taxonomy|Parent: /|CurrentSection: /abcdefs/| + abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/abc/| + abcdefgs: /abcdefgs/|abcdefgs|taxonomy|Parent: /|CurrentSection: /abcdefgs/| + abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/abc/|FirstSection: /abcdefgs/|IsAncestor: false|IsDescendant: true + abcdefgs: /abcdefgs/|abcdefgs|taxonomy|Parent: /|CurrentSection: /abcdefgs/|FirstSection: /abcdefgs/|IsAncestor: true|IsDescendant: false `) } func TestTaxonomiesWeightSort(t *testing.T) { - files := ` -- layouts/index.html -- {{ $a := site.GetPage "tags/a"}} @@ -736,3 +739,22 @@ tags_weight: 40 b.AssertFileContent("public/index.html", `:/p1/|/p3/|/p2/|:`) } + +func TestTaxonomiesEmptyTagsString(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +[taxonomies] +tag = 'tags' +-- content/p1.md -- ++++ +title = "P1" +tags = '' ++++ +-- layouts/_default/single.html -- +Single. + +` + Test(t, files) +} diff --git a/hugolib/template_test.go b/hugolib/template_test.go index d18b4c6f0..1c60a88b3 100644 --- a/hugolib/template_test.go +++ b/hugolib/template_test.go @@ -20,12 +20,10 @@ import ( "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/allconfig" - "github.com/gohugoio/hugo/identity" qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/tpl" ) func TestTemplateLookupOrder(t *testing.T) { @@ -536,56 +534,6 @@ with: Zero OK `) } -func TestTemplateDependencies(t *testing.T) { - b := newTestSitesBuilder(t).Running() - - b.WithTemplates("index.html", ` -{{ $p := site.GetPage "p1" }} -{{ partial "p1.html" $p }} -{{ partialCached "p2.html" "foo" }} -{{ partials.Include "p3.html" "data" }} -{{ partials.IncludeCached "p4.html" "foo" }} -{{ $p := partial "p5" }} -{{ partial "sub/p6.html" }} -{{ partial "P7.html" }} -{{ template "_default/foo.html" }} -Partial nested: {{ partial "p10" }} - -`, - "partials/p1.html", `ps: {{ .Render "li" }}`, - "partials/p2.html", `p2`, - "partials/p3.html", `p3`, - "partials/p4.html", `p4`, - "partials/p5.html", `p5`, - "partials/sub/p6.html", `p6`, - "partials/P7.html", `p7`, - "partials/p8.html", `p8 {{ partial "p9.html" }}`, - "partials/p9.html", `p9`, - "partials/p10.html", `p10 {{ partial "p11.html" }}`, - "partials/p11.html", `p11`, - "_default/foo.html", `foo`, - "_default/li.html", `li {{ partial "p8.html" }}`, - ) - - b.WithContent("p1.md", `--- -title: P1 ---- - - -`) - - b.Build(BuildCfg{}) - - s := b.H.Sites[0] - - templ, found := s.lookupTemplate("index.html") - b.Assert(found, qt.Equals, true) - - idset := make(map[identity.Identity]bool) - collectIdentities(idset, templ.(tpl.Info)) - b.Assert(idset, qt.HasLen, 11) -} - func TestTemplateGoIssues(t *testing.T) { b := newTestSitesBuilder(t) @@ -625,16 +573,6 @@ Population in Norway is 5 MILLIONS `) } -func collectIdentities(set map[identity.Identity]bool, provider identity.Provider) { - if ids, ok := provider.(identity.IdentitiesProvider); ok { - for _, id := range ids.GetIdentities() { - collectIdentities(set, id) - } - } else { - set[provider.GetIdentity()] = true - } -} - func TestPartialInline(t *testing.T) { b := newTestSitesBuilder(t) diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go index cf054897d..e2bd57f3c 100644 --- a/hugolib/testhelpers_test.go +++ b/hugolib/testhelpers_test.go @@ -11,14 +11,10 @@ import ( "os" "path/filepath" "regexp" - "runtime" - "sort" - "strconv" "strings" "testing" "text/template" "time" - "unicode/utf8" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/config/security" @@ -117,6 +113,7 @@ type filenameContent struct { func newTestSitesBuilder(t testing.TB) *sitesBuilder { v := config.New() v.Set("publishDir", "public") + v.Set("disableLiveReload", true) fs := hugofs.NewFromOld(afero.NewMemMapFs(), v) litterOptions := litter.Options{ @@ -718,6 +715,9 @@ func (s *sitesBuilder) DumpTxtar() string { skipRe := regexp.MustCompile(`^(public|resources|package-lock.json|go.sum)`) afero.Walk(s.Fs.Source, s.workingDir, func(path string, info fs.FileInfo, err error) error { + if err != nil { + return err + } rel := strings.TrimPrefix(path, s.workingDir+"/") if skipRe.MatchString(rel) { if info.IsDir() { @@ -754,7 +754,7 @@ func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) { continue } if !strings.Contains(content, match) { - s.Fatalf("No match for \n%q in content\n%q\nin file %s\n", match, content, filename) + s.Assert(content, qt.Contains, match, qt.Commentf(match+" not in: \n"+content)) } } } @@ -819,13 +819,13 @@ func (s *sitesBuilder) CheckExists(filename string) bool { } func (s *sitesBuilder) GetPage(ref string) page.Page { - p, err := s.H.Sites[0].getPageNew(nil, ref) + p, err := s.H.Sites[0].getPage(nil, ref) s.Assert(err, qt.IsNil) return p } func (s *sitesBuilder) GetPageRel(p page.Page, ref string) page.Page { - p, err := s.H.Sites[0].getPageNew(p, ref) + p, err := s.H.Sites[0].getPage(p, ref) s.Assert(err, qt.IsNil) return p } @@ -902,17 +902,6 @@ func loadTestConfigFromProvider(cfg config.Provider) (*allconfig.Configs, error) return res, err } -func newTestCfgBasic() (config.Provider, *hugofs.Fs) { - mm := afero.NewMemMapFs() - v := config.New() - v.Set("publishDir", "public") - v.Set("defaultContentLanguageInSubdir", true) - - fs := hugofs.NewFromOld(hugofs.NewBaseFileDecorator(mm), v) - - return v, fs -} - func newTestCfg(withConfig ...func(cfg config.Provider) error) (config.Provider, *hugofs.Fs) { mm := afero.NewMemMapFs() cfg := config.New() @@ -1011,106 +1000,3 @@ func content(c resource.ContentProvider) string { } return ccs } - -func pagesToString(pages ...page.Page) string { - var paths []string - for _, p := range pages { - paths = append(paths, p.Pathc()) - } - sort.Strings(paths) - return strings.Join(paths, "|") -} - -func dumpPagesLinks(pages ...page.Page) { - var links []string - for _, p := range pages { - links = append(links, p.RelPermalink()) - } - sort.Strings(links) - - for _, link := range links { - fmt.Println(link) - } -} - -func dumpPages(pages ...page.Page) { - fmt.Println("---------") - for _, p := range pages { - fmt.Printf("Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Lang: %s\n", - p.Kind(), p.Title(), p.RelPermalink(), p.Pathc(), p.SectionsPath(), p.Lang()) - } -} - -func dumpSPages(pages ...*pageState) { - for i, p := range pages { - fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s\n", - i+1, - p.Kind(), p.Title(), p.RelPermalink(), p.Pathc(), p.SectionsPath()) - } -} - -func printStringIndexes(s string) { - lines := strings.Split(s, "\n") - i := 0 - - for _, line := range lines { - - for _, r := range line { - fmt.Printf("%-3s", strconv.Itoa(i)) - i += utf8.RuneLen(r) - } - i++ - fmt.Println() - for _, r := range line { - fmt.Printf("%-3s", string(r)) - } - fmt.Println() - - } -} - -// See https://github.com/golang/go/issues/19280 -// Not in use. -var parallelEnabled = true - -func parallel(t *testing.T) { - if parallelEnabled { - t.Parallel() - } -} - -func skipSymlink(t *testing.T) { - if runtime.GOOS == "windows" && os.Getenv("CI") == "" { - t.Skip("skip symlink test on local Windows (needs admin)") - } -} - -func captureStderr(f func() error) (string, error) { - old := os.Stderr - r, w, _ := os.Pipe() - os.Stderr = w - - err := f() - - w.Close() - os.Stderr = old - - var buf bytes.Buffer - io.Copy(&buf, r) - return buf.String(), err -} - -func captureStdout(f func() error) (string, error) { - old := os.Stdout - r, w, _ := os.Pipe() - os.Stdout = w - - err := f() - - w.Close() - os.Stdout = old - - var buf bytes.Buffer - io.Copy(&buf, r) - return buf.String(), err -} diff --git a/hugolib/translations.go b/hugolib/translations.go deleted file mode 100644 index 5fcbc9218..000000000 --- a/hugolib/translations.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "github.com/gohugoio/hugo/resources/page" -) - -func pagesToTranslationsMap(sites []*Site) map[string]page.Pages { - out := make(map[string]page.Pages) - - for _, s := range sites { - s.pageMap.pageTrees.WalkLinkable(func(ss string, n *contentNode) bool { - p := n.p - // TranslationKey is implemented for all page types. - base := p.TranslationKey() - - pageTranslations, found := out[base] - if !found { - pageTranslations = make(page.Pages, 0) - } - - pageTranslations = append(pageTranslations, p) - out[base] = pageTranslations - - return false - }) - } - - return out -} - -func assignTranslationsToPages(allTranslations map[string]page.Pages, sites []*Site) { - for _, s := range sites { - s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool { - p := n.p - base := p.TranslationKey() - translations, found := allTranslations[base] - if !found { - return false - } - p.setTranslations(translations) - return false - }) - } -} diff --git a/identity/finder.go b/identity/finder.go new file mode 100644 index 000000000..bd23d698e --- /dev/null +++ b/identity/finder.go @@ -0,0 +1,336 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package identity + +import ( + "fmt" + "sync" + + "github.com/gohugoio/hugo/compare" +) + +// NewFinder creates a new Finder. +// This is a thread safe implementation with a cache. +func NewFinder(cfg FinderConfig) *Finder { + return &Finder{cfg: cfg, answers: make(map[ManagerIdentity]FinderResult), seenFindOnce: make(map[Identity]bool)} +} + +var searchIDPool = sync.Pool{ + New: func() interface{} { + return &searchID{seen: make(map[Manager]bool)} + }, +} + +func getSearchID() *searchID { + return searchIDPool.Get().(*searchID) +} + +func putSearchID(sid *searchID) { + sid.id = nil + sid.isDp = false + sid.isPeq = false + sid.hasEqer = false + sid.maxDepth = 0 + sid.dp = nil + sid.peq = nil + sid.eqer = nil + for k := range sid.seen { + delete(sid.seen, k) + } + searchIDPool.Put(sid) +} + +// GetSearchID returns a searchID from the pool. + +// Finder finds identities inside another. +type Finder struct { + cfg FinderConfig + + answers map[ManagerIdentity]FinderResult + muAnswers sync.RWMutex + + seenFindOnce map[Identity]bool + muSeenFindOnce sync.RWMutex +} + +type FinderResult int + +const ( + FinderNotFound FinderResult = iota + FinderFoundOneOfManyRepetition + FinderFoundOneOfMany + FinderFound +) + +// Contains returns whether in contains id. +func (f *Finder) Contains(id, in Identity, maxDepth int) FinderResult { + if id == Anonymous || in == Anonymous { + return FinderNotFound + } + + if id == GenghisKhan && in == GenghisKhan { + return FinderNotFound + } + + if id == GenghisKhan { + return FinderFound + } + + if id == in { + return FinderFound + } + + if id == nil || in == nil { + return FinderNotFound + } + + var ( + isDp bool + isPeq bool + + dp IsProbablyDependentProvider + peq compare.ProbablyEqer + ) + + if !f.cfg.Exact { + dp, isDp = id.(IsProbablyDependentProvider) + peq, isPeq = id.(compare.ProbablyEqer) + } + + eqer, hasEqer := id.(compare.Eqer) + + sid := getSearchID() + sid.id = id + sid.isDp = isDp + sid.isPeq = isPeq + sid.hasEqer = hasEqer + sid.dp = dp + sid.peq = peq + sid.eqer = eqer + sid.maxDepth = maxDepth + + defer putSearchID(sid) + + if r := f.checkOne(sid, in, 0); r > 0 { + return r + } + + m := GetDependencyManager(in) + if m != nil { + if r := f.checkManager(sid, m, 0); r > 0 { + return r + } + } + return FinderNotFound +} + +func (f *Finder) checkMaxDepth(sid *searchID, level int) FinderResult { + if sid.maxDepth >= 0 && level > sid.maxDepth { + return FinderNotFound + } + if level > 100 { + // This should never happen, but some false positives are probably better than a panic. + if !f.cfg.Exact { + return FinderFound + } + panic("too many levels") + } + return -1 +} + +func (f *Finder) checkManager(sid *searchID, m Manager, level int) FinderResult { + if r := f.checkMaxDepth(sid, level); r >= 0 { + return r + } + + if m == nil { + return FinderNotFound + } + if sid.seen[m] { + return FinderNotFound + } + sid.seen[m] = true + + f.muAnswers.RLock() + r, ok := f.answers[ManagerIdentity{Manager: m, Identity: sid.id}] + f.muAnswers.RUnlock() + if ok { + return r + } + + ids := m.getIdentities() + if len(ids) == 0 { + r = FinderNotFound + } else { + r = f.search(sid, ids, level) + } + + if r == FinderFoundOneOfMany { + // Don't cache this one. + return r + } + + f.muAnswers.Lock() + f.answers[ManagerIdentity{Manager: m, Identity: sid.id}] = r + f.muAnswers.Unlock() + + return r +} + +func (f *Finder) checkOne(sid *searchID, v Identity, depth int) (r FinderResult) { + if ff, ok := v.(FindFirstManagerIdentityProvider); ok { + f.muSeenFindOnce.RLock() + mi := ff.FindFirstManagerIdentity() + seen := f.seenFindOnce[mi.Identity] + f.muSeenFindOnce.RUnlock() + if seen { + return FinderFoundOneOfManyRepetition + } + + r = f.doCheckOne(sid, mi.Identity, depth) + if r == 0 { + r = f.checkManager(sid, mi.Manager, depth) + } + + if r > FinderFoundOneOfManyRepetition { + f.muSeenFindOnce.Lock() + // Double check. + if f.seenFindOnce[mi.Identity] { + f.muSeenFindOnce.Unlock() + return FinderFoundOneOfManyRepetition + } + f.seenFindOnce[mi.Identity] = true + f.muSeenFindOnce.Unlock() + r = FinderFoundOneOfMany + } + return r + } else { + return f.doCheckOne(sid, v, depth) + } +} + +func (f *Finder) doCheckOne(sid *searchID, v Identity, depth int) FinderResult { + id2 := Unwrap(v) + if id2 == Anonymous { + return FinderNotFound + } + id := sid.id + if sid.hasEqer { + if sid.eqer.Eq(id2) { + return FinderFound + } + } else if id == id2 { + return FinderFound + } + + if f.cfg.Exact { + return FinderNotFound + } + + if id2 == nil { + return FinderNotFound + } + + if id2 == GenghisKhan { + return FinderFound + } + + if id.IdentifierBase() == id2.IdentifierBase() { + return FinderFound + } + + if sid.isDp && sid.dp.IsProbablyDependent(id2) { + return FinderFound + } + + if sid.isPeq && sid.peq.ProbablyEq(id2) { + return FinderFound + } + + if pdep, ok := id2.(IsProbablyDependencyProvider); ok && pdep.IsProbablyDependency(id) { + return FinderFound + } + + if peq, ok := id2.(compare.ProbablyEqer); ok && peq.ProbablyEq(id) { + return FinderFound + } + + return FinderNotFound +} + +// search searches for id in ids. +func (f *Finder) search(sid *searchID, ids Identities, depth int) FinderResult { + if len(ids) == 0 { + return FinderNotFound + } + + id := sid.id + + if id == Anonymous { + return FinderNotFound + } + + if !f.cfg.Exact && id == GenghisKhan { + return FinderNotFound + } + + for v := range ids { + r := f.checkOne(sid, v, depth) + if r > 0 { + return r + } + + m := GetDependencyManager(v) + if r := f.checkManager(sid, m, depth+1); r > 0 { + return r + } + } + + return FinderNotFound +} + +// FinderConfig provides configuration for the Finder. +// Note that we by default will use a strategy where probable matches are +// good enough. The primary use case for this is to identity the change set +// for a given changed identity (e.g. a template), and we don't want to +// have any false negatives there, but some false positives are OK. Also, speed is important. +type FinderConfig struct { + // Match exact matches only. + Exact bool +} + +// ManagerIdentity wraps a pair of Identity and Manager. +type ManagerIdentity struct { + Identity + Manager +} + +func (p ManagerIdentity) String() string { + return fmt.Sprintf("%s:%s", p.Identity.IdentifierBase(), p.Manager.IdentifierBase()) +} + +type searchID struct { + id Identity + isDp bool + isPeq bool + hasEqer bool + + maxDepth int + + seen map[Manager]bool + + dp IsProbablyDependentProvider + peq compare.ProbablyEqer + eqer compare.Eqer +} diff --git a/identity/finder_test.go b/identity/finder_test.go new file mode 100644 index 000000000..abfab9d75 --- /dev/null +++ b/identity/finder_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. +package identity_test + +import ( + "testing" + + "github.com/gohugoio/hugo/identity" +) + +func BenchmarkFinder(b *testing.B) { + m1 := identity.NewManager("") + m2 := identity.NewManager("") + m3 := identity.NewManager("") + m1.AddIdentity( + testIdentity{"base", "id1", "", "pe1"}, + testIdentity{"base2", "id2", "eq1", ""}, + m2, + m3, + ) + + b4 := testIdentity{"base4", "id4", "", ""} + b5 := testIdentity{"base5", "id5", "", ""} + + m2.AddIdentity(b4) + + f := identity.NewFinder(identity.FinderConfig{}) + + b.Run("Find one", func(b *testing.B) { + for i := 0; i < b.N; i++ { + r := f.Contains(b4, m1, -1) + if r == 0 { + b.Fatal("not found") + } + } + }) + + b.Run("Find none", func(b *testing.B) { + for i := 0; i < b.N; i++ { + r := f.Contains(b5, m1, -1) + if r > 0 { + b.Fatal("found") + } + } + }) +} diff --git a/identity/identity.go b/identity/identity.go index e73951caf..ccb2f6e79 100644 --- a/identity/identity.go +++ b/identity/identity.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,167 +11,481 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. package identity import ( + "fmt" + "path" "path/filepath" + "sort" "strings" "sync" "sync/atomic" + + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/compare" ) -// NewManager creates a new Manager starting at id. -func NewManager(id Provider) Manager { - return &identityManager{ - Provider: id, - ids: Identities{id.GetIdentity(): id}, +const ( + // Anonymous is an Identity that can be used when identity doesn't matter. + Anonymous = StringIdentity("__anonymous") + + // GenghisKhan is an Identity everyone relates to. + GenghisKhan = StringIdentity("__genghiskhan") +) + +var NopManager = new(nopManager) + +// NewIdentityManager creates a new Manager. +func NewManager(name string, opts ...ManagerOption) Manager { + idm := &identityManager{ + Identity: Anonymous, + name: name, + ids: Identities{}, } + + for _, o := range opts { + o(idm) + } + + return idm } -// NewPathIdentity creates a new Identity with the two identifiers -// type and path. -func NewPathIdentity(typ, pat string) PathIdentity { - pat = strings.ToLower(strings.TrimPrefix(filepath.ToSlash(pat), "/")) - return PathIdentity{Type: typ, Path: pat} +// CleanString cleans s to be suitable as an identifier. +func CleanString(s string) string { + s = strings.ToLower(s) + s = strings.TrimPrefix(filepath.ToSlash(s), "/") + return path.Clean(s) } -// Identities stores identity providers. -type Identities map[Identity]Provider +// CleanStringIdentity cleans s to be suitable as an identifier and wraps it in a StringIdentity. +func CleanStringIdentity(s string) StringIdentity { + return StringIdentity(CleanString(s)) +} -func (ids Identities) search(depth int, id Identity) Provider { - if v, found := ids[id.GetIdentity()]; found { - return v +// GetDependencyManager returns the DependencyManager from v or nil if none found. +func GetDependencyManager(v any) Manager { + switch vv := v.(type) { + case Manager: + return vv + case types.Unwrapper: + return GetDependencyManager(vv.Unwrapv()) + case DependencyManagerProvider: + return vv.GetDependencyManager() } + return nil +} - depth++ +// GetDependencyManagerForScope returns the DependencyManager for the given scope from v or nil if none found. +// Note that it will fall back to an unscoped manager if none found for the given scope. +func GetDependencyManagerForScope(v any, scope int) Manager { + switch vv := v.(type) { + case DependencyManagerScopedProvider: + return vv.GetDependencyManagerForScope(scope) + case types.Unwrapper: + return GetDependencyManagerForScope(vv.Unwrapv(), scope) + case Manager: + return vv + case DependencyManagerProvider: + return vv.GetDependencyManager() - // There may be infinite recursion in templates. - if depth > 100 { - // Bail out. - return nil } + return nil +} - for _, v := range ids { - switch t := v.(type) { - case IdentitiesProvider: - if nested := t.GetIdentities().search(depth, id); nested != nil { - return nested - } +// FirstIdentity returns the first Identity in v, Anonymous if none found +func FirstIdentity(v any) Identity { + var result Identity = Anonymous + WalkIdentitiesShallow(v, func(level int, id Identity) bool { + result = id + return true + }) + + return result +} + +// PrintIdentityInfo is used for debugging/tests only. +func PrintIdentityInfo(v any) { + WalkIdentitiesDeep(v, func(level int, id Identity) bool { + var s string + if idm, ok := id.(*identityManager); ok { + s = " " + idm.name } + fmt.Printf("%s%s (%T)%s\n", strings.Repeat(" ", level), id.IdentifierBase(), id, s) + return false + }) +} + +func Unwrap(id Identity) Identity { + switch t := id.(type) { + case IdentityProvider: + return t.GetIdentity() + default: + return id } - return nil } -// IdentitiesProvider provides all Identities. -type IdentitiesProvider interface { - GetIdentities() Identities +// WalkIdentitiesDeep walks identities in v and applies cb to every identity found. +// Return true from cb to terminate. +// If deep is true, it will also walk nested Identities in any Manager found. +func WalkIdentitiesDeep(v any, cb func(level int, id Identity) bool) { + seen := make(map[Identity]bool) + walkIdentities(v, 0, true, seen, cb) } -// Identity represents an thing that can provide an identify. This can be -// any Go type, but the Identity returned by GetIdentify must be hashable. -type Identity interface { - Provider - Name() string +// WalkIdentitiesShallow will not walk into a Manager's Identities. +// See WalkIdentitiesDeep. +// cb is called for every Identity found and returns whether to terminate the walk. +func WalkIdentitiesShallow(v any, cb func(level int, id Identity) bool) { + walkIdentitiesShallow(v, 0, cb) } -// Manager manages identities, and is itself a Provider of Identity. -type Manager interface { - SearchProvider - Add(ids ...Provider) - Reset() +// WithOnAddIdentity sets a callback that will be invoked when an identity is added to the manager. +func WithOnAddIdentity(f func(id Identity)) ManagerOption { + return func(m *identityManager) { + m.onAddIdentity = f + } +} + +// DependencyManagerProvider provides a manager for dependencies. +type DependencyManagerProvider interface { + GetDependencyManager() Manager +} + +// DependencyManagerProviderFunc is a function that implements the DependencyManagerProvider interface. +type DependencyManagerProviderFunc func() Manager + +func (d DependencyManagerProviderFunc) GetDependencyManager() Manager { + return d() +} + +// DependencyManagerScopedProvider provides a manager for dependencies with a given scope. +type DependencyManagerScopedProvider interface { + GetDependencyManagerForScope(scope int) Manager +} + +// ForEeachIdentityProvider provides a way iterate over identities. +type ForEeachIdentityProvider interface { + // ForEeachIdentityProvider calls cb for each Identity. + // If cb returns true, the iteration is terminated. + ForEeachIdentity(cb func(id Identity) bool) +} + +// ForEeachIdentityByNameProvider provides a way to look up identities by name. +type ForEeachIdentityByNameProvider interface { + // ForEeachIdentityByName calls cb for each Identity that relates to name. + // If cb returns true, the iteration is terminated. + ForEeachIdentityByName(name string, cb func(id Identity) bool) +} + +type FindFirstManagerIdentityProvider interface { + Identity + FindFirstManagerIdentity() ManagerIdentity } -// SearchProvider provides access to the chained set of identities. -type SearchProvider interface { - Provider - IdentitiesProvider - Search(id Identity) Provider +func NewFindFirstManagerIdentityProvider(m Manager, id Identity) FindFirstManagerIdentityProvider { + return findFirstManagerIdentity{ + Identity: Anonymous, + ManagerIdentity: ManagerIdentity{ + Manager: m, Identity: id, + }, + } +} + +type findFirstManagerIdentity struct { + Identity + ManagerIdentity +} + +func (f findFirstManagerIdentity) FindFirstManagerIdentity() ManagerIdentity { + return f.ManagerIdentity +} + +// Identities stores identity providers. +type Identities map[Identity]bool + +func (ids Identities) AsSlice() []Identity { + s := make([]Identity, len(ids)) + i := 0 + for v := range ids { + s[i] = v + i++ + } + sort.Slice(s, func(i, j int) bool { + return s[i].IdentifierBase() < s[j].IdentifierBase() + }) + + return s +} + +func (ids Identities) String() string { + var sb strings.Builder + i := 0 + for id := range ids { + sb.WriteString(fmt.Sprintf("[%s]", id.IdentifierBase())) + if i < len(ids)-1 { + sb.WriteString(", ") + } + i++ + } + return sb.String() +} + +// Identity represents a thing in Hugo (a Page, a template etc.) +// Any implementation must be comparable/hashable. +type Identity interface { + IdentifierBase() string +} + +// IdentityGroupProvider can be implemented by tightly connected types. +// Current use case is Resource transformation via Hugo Pipes. +type IdentityGroupProvider interface { + GetIdentityGroup() Identity } -// A PathIdentity is a common identity identified by a type and a path, e.g. "layouts" and "_default/single.html". -type PathIdentity struct { - Type string - Path string +// IdentityProvider can be implemented by types that isn't itself and Identity, +// usually because they're not comparable/hashable. +type IdentityProvider interface { + GetIdentity() Identity } -// GetIdentity returns itself. -func (id PathIdentity) GetIdentity() Identity { - return id +// IncrementByOne implements Incrementer adding 1 every time Incr is called. +type IncrementByOne struct { + counter uint64 } -// Name returns the Path. -func (id PathIdentity) Name() string { - return id.Path +func (c *IncrementByOne) Incr() int { + return int(atomic.AddUint64(&c.counter, uint64(1))) } -// A KeyValueIdentity a general purpose identity. -type KeyValueIdentity struct { - Key string - Value string +// Incrementer increments and returns the value. +// Typically used for IDs. +type Incrementer interface { + Incr() int } -// GetIdentity returns itself. -func (id KeyValueIdentity) GetIdentity() Identity { - return id +// IsProbablyDependentProvider is an optional interface for Identity. +type IsProbablyDependentProvider interface { + IsProbablyDependent(other Identity) bool } -// Name returns the Key. -func (id KeyValueIdentity) Name() string { - return id.Key +// IsProbablyDependencyProvider is an optional interface for Identity. +type IsProbablyDependencyProvider interface { + IsProbablyDependency(other Identity) bool } -// Provider provides the comparable Identity. -type Provider interface { - // GetIdentity is for internal use. +// Manager is an Identity that also manages identities, typically dependencies. +type Manager interface { + Identity + AddIdentity(ids ...Identity) GetIdentity() Identity + Reset() + getIdentities() Identities +} + +type ManagerOption func(m *identityManager) + +// StringIdentity is an Identity that wraps a string. +type StringIdentity string + +func (s StringIdentity) IdentifierBase() string { + return string(s) } type identityManager struct { - sync.Mutex - Provider + Identity + + // Only used for debugging. + name string + + // mu protects _changes_ to this manager, + // reads currently assumes no concurrent writes. + mu sync.RWMutex ids Identities + + // Hooks used in debugging. + onAddIdentity func(id Identity) } -func (im *identityManager) Add(ids ...Provider) { - im.Lock() +func (im *identityManager) AddIdentity(ids ...Identity) { + im.mu.Lock() + for _, id := range ids { - im.ids[id.GetIdentity()] = id + if id == Anonymous { + continue + } + if _, found := im.ids[id]; !found { + if im.onAddIdentity != nil { + im.onAddIdentity(id) + } + im.ids[id] = true + } } - im.Unlock() + im.mu.Unlock() +} + +func (im *identityManager) ContainsIdentity(id Identity) FinderResult { + if im.Identity != Anonymous && id == im.Identity { + return FinderFound + } + + f := NewFinder(FinderConfig{Exact: true}) + r := f.Contains(id, im, -1) + + return r +} + +// Managers are always anonymous. +func (im *identityManager) GetIdentity() Identity { + return im.Identity } func (im *identityManager) Reset() { - im.Lock() - id := im.GetIdentity() - im.ids = Identities{id.GetIdentity(): id} - im.Unlock() + im.mu.Lock() + im.ids = Identities{} + im.mu.Unlock() +} + +func (im *identityManager) GetDependencyManagerForScope(int) Manager { + return im +} + +func (im *identityManager) String() string { + return fmt.Sprintf("IdentityManager(%s)", im.name) } // TODO(bep) these identities are currently only read on server reloads // so there should be no concurrency issues, but that may change. -func (im *identityManager) GetIdentities() Identities { - im.Lock() - defer im.Unlock() +func (im *identityManager) getIdentities() Identities { return im.ids } -func (im *identityManager) Search(id Identity) Provider { - im.Lock() - defer im.Unlock() - return im.ids.search(0, id.GetIdentity()) +type nopManager int + +func (m *nopManager) AddIdentity(ids ...Identity) { } -// Incrementer increments and returns the value. -// Typically used for IDs. -type Incrementer interface { - Incr() int +func (m *nopManager) IdentifierBase() string { + return "" } -// IncrementByOne implements Incrementer adding 1 every time Incr is called. -type IncrementByOne struct { - counter uint64 +func (m *nopManager) GetIdentity() Identity { + return Anonymous } -func (c *IncrementByOne) Incr() int { - return int(atomic.AddUint64(&c.counter, uint64(1))) +func (m *nopManager) Reset() { +} + +func (m *nopManager) getIdentities() Identities { + return nil +} + +// returns whether further walking should be terminated. +func walkIdentities(v any, level int, deep bool, seen map[Identity]bool, cb func(level int, id Identity) bool) { + if level > 20 { + panic("too deep") + } + var cbRecursive func(level int, id Identity) bool + cbRecursive = func(level int, id Identity) bool { + if id == nil { + return false + } + if deep && seen[id] { + return false + } + seen[id] = true + if cb(level, id) { + return true + } + + if deep { + if m := GetDependencyManager(id); m != nil { + for id2 := range m.getIdentities() { + if walkIdentitiesShallow(id2, level+1, cbRecursive) { + return true + } + } + } + } + return false + } + walkIdentitiesShallow(v, level, cbRecursive) +} + +// returns whether further walking should be terminated. +// Anonymous identities are skipped. +func walkIdentitiesShallow(v any, level int, cb func(level int, id Identity) bool) bool { + cb2 := func(level int, id Identity) bool { + if id == Anonymous { + return false + } + return cb(level, id) + } + + if id, ok := v.(Identity); ok { + if cb2(level, id) { + return true + } + } + + if ipd, ok := v.(IdentityProvider); ok { + if cb2(level, ipd.GetIdentity()) { + return true + } + } + + if ipdgp, ok := v.(IdentityGroupProvider); ok { + if cb2(level, ipdgp.GetIdentityGroup()) { + return true + } + } + + return false +} + +var ( + _ Identity = (*orIdentity)(nil) + _ compare.ProbablyEqer = (*orIdentity)(nil) +) + +func Or(a, b Identity) Identity { + return orIdentity{a: a, b: b} +} + +type orIdentity struct { + a, b Identity +} + +func (o orIdentity) IdentifierBase() string { + return o.a.IdentifierBase() +} + +func (o orIdentity) ProbablyEq(other any) bool { + otherID, ok := other.(Identity) + if !ok { + return false + } + + return probablyEq(o.a, otherID) || probablyEq(o.b, otherID) +} + +func probablyEq(a, b Identity) bool { + if a == b { + return true + } + + if a == Anonymous || b == Anonymous { + return false + } + + if a.IdentifierBase() == b.IdentifierBase() { + return true + } + + if a2, ok := a.(IsProbablyDependentProvider); ok { + return a2.IsProbablyDependent(b) + } + + return false } diff --git a/identity/identity_test.go b/identity/identity_test.go index baf2628bb..d003caaf0 100644 --- a/identity/identity_test.go +++ b/identity/identity_test.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,79 +11,201 @@ // See the License for the specific language governing permissions and // limitations under the License. -package identity +package identity_test import ( "fmt" - "math/rand" - "strconv" "testing" qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/identity/identitytesting" ) -func TestIdentityManager(t *testing.T) { - c := qt.New(t) - - id1 := testIdentity{name: "id1"} - im := NewManager(id1) - - c.Assert(im.Search(id1).GetIdentity(), qt.Equals, id1) - c.Assert(im.Search(testIdentity{name: "notfound"}), qt.Equals, nil) -} - func BenchmarkIdentityManager(b *testing.B) { - createIds := func(num int) []Identity { - ids := make([]Identity, num) + createIds := func(num int) []identity.Identity { + ids := make([]identity.Identity, num) for i := 0; i < num; i++ { - ids[i] = testIdentity{name: fmt.Sprintf("id%d", i)} + name := fmt.Sprintf("id%d", i) + ids[i] = &testIdentity{base: name, name: name} } return ids } - b.Run("Add", func(b *testing.B) { - c := qt.New(b) - b.StopTimer() + b.Run("identity.NewManager", func(b *testing.B) { + for i := 0; i < b.N; i++ { + m := identity.NewManager("") + if m == nil { + b.Fatal("manager is nil") + } + } + }) + + b.Run("Add unique", func(b *testing.B) { ids := createIds(b.N) - im := NewManager(testIdentity{"first"}) - b.StartTimer() + im := identity.NewManager("") + b.ResetTimer() for i := 0; i < b.N; i++ { - im.Add(ids[i]) + im.AddIdentity(ids[i]) } b.StopTimer() - c.Assert(im.GetIdentities(), qt.HasLen, b.N+1) }) - b.Run("Search", func(b *testing.B) { - c := qt.New(b) + b.Run("Add duplicates", func(b *testing.B) { + id := &testIdentity{base: "a", name: "b"} + im := identity.NewManager("") + + b.ResetTimer() + for i := 0; i < b.N; i++ { + im.AddIdentity(id) + } + b.StopTimer() - ids := createIds(b.N) - im := NewManager(testIdentity{"first"}) + }) + + b.Run("Nop StringIdentity const", func(b *testing.B) { + const id = identity.StringIdentity("test") + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(id) + } + }) + + b.Run("Nop StringIdentity const other package", func(b *testing.B) { + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(identitytesting.TestIdentity) + } + }) + + b.Run("Nop StringIdentity var", func(b *testing.B) { + id := identity.StringIdentity("test") + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(id) + } + }) + b.Run("Nop pointer identity", func(b *testing.B) { + id := &testIdentity{base: "a", name: "b"} for i := 0; i < b.N; i++ { - im.Add(ids[i]) + identity.NopManager.AddIdentity(id) } + }) - b.StartTimer() + b.Run("Nop Anonymous", func(b *testing.B) { + for i := 0; i < b.N; i++ { + identity.NopManager.AddIdentity(identity.Anonymous) + } + }) +} +func BenchmarkIsNotDependent(b *testing.B) { + runBench := func(b *testing.B, id1, id2 identity.Identity) { for i := 0; i < b.N; i++ { - name := "id" + strconv.Itoa(rand.Intn(b.N)) - id := im.Search(testIdentity{name: name}) - c.Assert(id.GetIdentity().Name(), qt.Equals, name) + isNotDependent(id1, id2) + } + } + + newNestedManager := func(depth, count int) identity.Manager { + m1 := identity.NewManager("") + for i := 0; i < depth; i++ { + m2 := identity.NewManager("") + m1.AddIdentity(m2) + for j := 0; j < count; j++ { + id := fmt.Sprintf("id%d", j) + m2.AddIdentity(&testIdentity{id, id, "", ""}) + } + m1 = m2 } + return m1 + } + + type depthCount struct { + depth int + count int + } + + for _, dc := range []depthCount{{10, 5}} { + b.Run(fmt.Sprintf("Nested not found %d %d", dc.depth, dc.count), func(b *testing.B) { + im := newNestedManager(dc.depth, dc.count) + id1 := identity.StringIdentity("idnotfound") + b.ResetTimer() + runBench(b, im, id1) + }) + } +} + +func TestIdentityManager(t *testing.T) { + c := qt.New(t) + + newNestedManager := func() identity.Manager { + m1 := identity.NewManager("") + m2 := identity.NewManager("") + m3 := identity.NewManager("") + m1.AddIdentity( + testIdentity{"base", "id1", "", "pe1"}, + testIdentity{"base2", "id2", "eq1", ""}, + m2, + m3, + ) + + m2.AddIdentity(testIdentity{"base4", "id4", "", ""}) + + return m1 + } + + c.Run("Anonymous", func(c *qt.C) { + im := newNestedManager() + c.Assert(im.GetIdentity(), qt.Equals, identity.Anonymous) + im.AddIdentity(identity.Anonymous) + c.Assert(isNotDependent(identity.Anonymous, identity.Anonymous), qt.IsTrue) + }) + + c.Run("GenghisKhan", func(c *qt.C) { + c.Assert(isNotDependent(identity.GenghisKhan, identity.GenghisKhan), qt.IsTrue) }) } type testIdentity struct { + base string name string + + idEq string + idProbablyEq string +} + +func (id testIdentity) Eq(other any) bool { + ot, ok := other.(testIdentity) + if !ok { + return false + } + if ot.idEq == "" || id.idEq == "" { + return false + } + return ot.idEq == id.idEq } -func (id testIdentity) GetIdentity() Identity { - return id +func (id testIdentity) IdentifierBase() string { + return id.base } func (id testIdentity) Name() string { return id.name } + +func (id testIdentity) ProbablyEq(other any) bool { + ot, ok := other.(testIdentity) + if !ok { + return false + } + if ot.idProbablyEq == "" || id.idProbablyEq == "" { + return false + } + return ot.idProbablyEq == id.idProbablyEq +} + +func isNotDependent(a, b identity.Identity) bool { + f := identity.NewFinder(identity.FinderConfig{}) + r := f.Contains(b, a, -1) + return r == 0 +} diff --git a/identity/identityhash.go b/identity/identityhash.go index ef7b5afa7..8760ff64d 100644 --- a/identity/identityhash.go +++ b/identity/identityhash.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -59,10 +59,10 @@ type keyer interface { // so rewrite the input slice for known identity types. func toHashable(v any) any { switch t := v.(type) { - case Provider: - return t.GetIdentity() case keyer: return t.Key() + case IdentityProvider: + return t.GetIdentity() default: return v } diff --git a/identity/identityhash_test.go b/identity/identityhash_test.go index 378c0160d..1ecaf7612 100644 --- a/identity/identityhash_test.go +++ b/identity/identityhash_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -29,7 +29,6 @@ func TestHashString(t *testing.T) { c.Assert(HashString(vals...), qt.Equals, "12599484872364427450") c.Assert(vals[2], qt.Equals, tstKeyer{"c"}) - } type tstKeyer struct { diff --git a/identity/identitytesting/identitytesting.go b/identity/identitytesting/identitytesting.go new file mode 100644 index 000000000..74f3ec540 --- /dev/null +++ b/identity/identitytesting/identitytesting.go @@ -0,0 +1,5 @@ +package identitytesting + +import "github.com/gohugoio/hugo/identity" + +const TestIdentity = identity.StringIdentity("__testIdentity") diff --git a/identity/predicate_identity.go b/identity/predicate_identity.go new file mode 100644 index 000000000..bad247867 --- /dev/null +++ b/identity/predicate_identity.go @@ -0,0 +1,78 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. +package identity + +import ( + "fmt" + "sync/atomic" + + hglob "github.com/gohugoio/hugo/hugofs/glob" +) + +// NewGlobIdentity creates a new Identity that +// is probably dependent on any other Identity +// that matches the given pattern. +func NewGlobIdentity(pattern string) Identity { + glob, err := hglob.GetGlob(pattern) + if err != nil { + panic(err) + } + + predicate := func(other Identity) bool { + return glob.Match(other.IdentifierBase()) + } + + return NewPredicateIdentity(predicate, nil) +} + +var predicateIdentityCounter = &atomic.Uint32{} + +type predicateIdentity struct { + id string + probablyDependent func(Identity) bool + probablyDependency func(Identity) bool +} + +var ( + _ IsProbablyDependencyProvider = &predicateIdentity{} + _ IsProbablyDependentProvider = &predicateIdentity{} +) + +// NewPredicateIdentity creates a new Identity that implements both IsProbablyDependencyProvider and IsProbablyDependentProvider +// using the provided functions, both of which are optional. +func NewPredicateIdentity( + probablyDependent func(Identity) bool, + probablyDependency func(Identity) bool, +) *predicateIdentity { + if probablyDependent == nil { + probablyDependent = func(Identity) bool { return false } + } + if probablyDependency == nil { + probablyDependency = func(Identity) bool { return false } + } + return &predicateIdentity{probablyDependent: probablyDependent, probablyDependency: probablyDependency, id: fmt.Sprintf("predicate%d", predicateIdentityCounter.Add(1))} +} + +func (id *predicateIdentity) IdentifierBase() string { + return id.id +} + +func (id *predicateIdentity) IsProbablyDependent(other Identity) bool { + return id.probablyDependent(other) +} + +func (id *predicateIdentity) IsProbablyDependency(other Identity) bool { + return id.probablyDependency(other) +} diff --git a/identity/predicate_identity_test.go b/identity/predicate_identity_test.go new file mode 100644 index 000000000..3a54dee75 --- /dev/null +++ b/identity/predicate_identity_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package provides ways to identify values in Hugo. Used for dependency tracking etc. +package identity + +import ( + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestGlobIdentity(t *testing.T) { + c := qt.New(t) + + gid := NewGlobIdentity("/a/b/*") + + c.Assert(isNotDependent(gid, StringIdentity("/a/b/c")), qt.IsFalse) + c.Assert(isNotDependent(gid, StringIdentity("/a/c/d")), qt.IsTrue) + c.Assert(isNotDependent(StringIdentity("/a/b/c"), gid), qt.IsTrue) + c.Assert(isNotDependent(StringIdentity("/a/c/d"), gid), qt.IsTrue) +} + +func isNotDependent(a, b Identity) bool { + f := NewFinder(FinderConfig{}) + r := f.Contains(a, b, -1) + return r == 0 +} + +func TestPredicateIdentity(t *testing.T) { + c := qt.New(t) + + isDependent := func(id Identity) bool { + return id.IdentifierBase() == "foo" + } + isDependency := func(id Identity) bool { + return id.IdentifierBase() == "baz" + } + + id := NewPredicateIdentity(isDependent, isDependency) + + c.Assert(id.IsProbablyDependent(StringIdentity("foo")), qt.IsTrue) + c.Assert(id.IsProbablyDependent(StringIdentity("bar")), qt.IsFalse) + c.Assert(id.IsProbablyDependent(id), qt.IsFalse) + c.Assert(id.IsProbablyDependent(NewPredicateIdentity(isDependent, nil)), qt.IsFalse) + c.Assert(id.IsProbablyDependency(StringIdentity("baz")), qt.IsTrue) + c.Assert(id.IsProbablyDependency(StringIdentity("foo")), qt.IsFalse) +} diff --git a/identity/question.go b/identity/question.go new file mode 100644 index 000000000..78fcb8234 --- /dev/null +++ b/identity/question.go @@ -0,0 +1,57 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package identity + +import "sync" + +// NewQuestion creates a new question with the given identity. +func NewQuestion[T any](id Identity) *Question[T] { + return &Question[T]{ + Identity: id, + } +} + +// Answer takes a func that knows the answer. +// Note that this is a one-time operation, +// fn will not be invoked again it the question is already answered. +// Use Result to check if the question is answered. +func (q *Question[T]) Answer(fn func() T) { + q.mu.Lock() + defer q.mu.Unlock() + + if q.answered { + return + } + + q.fasit = fn() + q.answered = true +} + +// Result returns the fasit of the question (if answered), +// and a bool indicating if the question has been answered. +func (q *Question[T]) Result() (any, bool) { + q.mu.RLock() + defer q.mu.RUnlock() + + return q.fasit, q.answered +} + +// A Question is defined by its Identity and can be answered once. +type Question[T any] struct { + Identity + fasit T + + mu sync.RWMutex + answered bool +} diff --git a/identity/question_test.go b/identity/question_test.go new file mode 100644 index 000000000..bf1e1d06d --- /dev/null +++ b/identity/question_test.go @@ -0,0 +1,38 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package identity + +import ( + "testing" + + qt "github.com/frankban/quicktest" +) + +func TestQuestion(t *testing.T) { + c := qt.New(t) + + q := NewQuestion[int](StringIdentity("2+2?")) + + v, ok := q.Result() + c.Assert(ok, qt.Equals, false) + c.Assert(v, qt.Equals, 0) + + q.Answer(func() int { + return 4 + }) + + v, ok = q.Result() + c.Assert(ok, qt.Equals, true) + c.Assert(v, qt.Equals, 4) +} diff --git a/langs/i18n/i18n_test.go b/langs/i18n/i18n_test.go index 8629c35fc..8d34e069d 100644 --- a/langs/i18n/i18n_test.go +++ b/langs/i18n/i18n_test.go @@ -20,7 +20,6 @@ import ( "testing" "github.com/bep/logg" - "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/config/testconfig" @@ -35,8 +34,6 @@ import ( "github.com/gohugoio/hugo/config" ) -var logger = loggers.NewDefault() - type i18nTest struct { name string data map[string][]byte @@ -390,14 +387,13 @@ other = "{{ . }} miesiąca" }, }, } { - c.Run(test.name, func(c *qt.C) { cfg := config.New() cfg.Set("enableMissingTranslationPlaceholders", true) cfg.Set("publishDir", "public") afs := afero.NewMemMapFs() - err := afero.WriteFile(afs, filepath.Join("i18n", test.lang+".toml"), []byte(test.templ), 0755) + err := afero.WriteFile(afs, filepath.Join("i18n", test.lang+".toml"), []byte(test.templ), 0o755) c.Assert(err, qt.IsNil) d, tp := prepareDeps(afs, cfg) @@ -409,9 +405,7 @@ other = "{{ . }} miesiąca" c.Assert(f(ctx, test.id, variant.Key), qt.Equals, variant.Value, qt.Commentf("input: %v", variant.Key)) c.Assert(d.Log.LoggCount(logg.LevelWarn), qt.Equals, 0) } - }) - } } @@ -429,8 +423,7 @@ type noCountField struct { Counts int } -type countMethod struct { -} +type countMethod struct{} func (c countMethod) Count() any { return 32.5 @@ -468,7 +461,7 @@ func prepareTranslationProvider(t testing.TB, test i18nTest, cfg config.Provider afs := afero.NewMemMapFs() for file, content := range test.data { - err := afero.WriteFile(afs, filepath.Join("i18n", file), []byte(content), 0755) + err := afero.WriteFile(afs, filepath.Join("i18n", file), []byte(content), 0o755) c.Assert(err, qt.IsNil) } diff --git a/langs/i18n/integration_test.go b/langs/i18n/integration_test.go index c010ac111..7deae6451 100644 --- a/langs/i18n/integration_test.go +++ b/langs/i18n/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/langs/i18n/translationProvider.go b/langs/i18n/translationProvider.go index 2c3c15710..ab5247413 100644 --- a/langs/i18n/translationProvider.go +++ b/langs/i18n/translationProvider.go @@ -46,9 +46,7 @@ func NewTranslationProvider() *TranslationProvider { // Update updates the i18n func in the provided Deps. func (tp *TranslationProvider) NewResource(dst *deps.Deps) error { - spec := source.NewSourceSpec(dst.PathSpec, nil, nil) - - var defaultLangTag, err = language.Parse(dst.Conf.DefaultContentLanguage()) + defaultLangTag, err := language.Parse(dst.Conf.DefaultContentLanguage()) if err != nil { defaultLangTag = language.English } @@ -59,21 +57,19 @@ func (tp *TranslationProvider) NewResource(dst *deps.Deps) error { bundle.RegisterUnmarshalFunc("yml", yaml.Unmarshal) bundle.RegisterUnmarshalFunc("json", json.Unmarshal) - // The source dirs are ordered so the most important comes first. Since this is a - // last key win situation, we have to reverse the iteration order. - dirs := dst.BaseFs.I18n.Dirs - for i := len(dirs) - 1; i >= 0; i-- { - dir := dirs[i] - src := spec.NewFilesystemFromFileMetaInfo(dir) - files, err := src.Files() - if err != nil { - return err - } - for _, file := range files { - if err := addTranslationFile(bundle, file); err != nil { - return err - } - } + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Fs: dst.BaseFs.I18n.Fs, + WalkFn: func(path string, info hugofs.FileMetaInfo) error { + if info.IsDir() { + return nil + } + return addTranslationFile(bundle, source.NewFileInfo(info)) + }, + }) + + if err := w.Walk(); err != nil { + return err } tp.t = NewTranslator(bundle, dst.Conf, dst.Log) @@ -81,12 +77,11 @@ func (tp *TranslationProvider) NewResource(dst *deps.Deps) error { dst.Translate = tp.t.Func(dst.Conf.Language().Lang) return nil - } const artificialLangTagPrefix = "art-x-" -func addTranslationFile(bundle *i18n.Bundle, r source.File) error { +func addTranslationFile(bundle *i18n.Bundle, r *source.File) error { f, err := r.FileInfo().Meta().Open() if err != nil { return fmt.Errorf("failed to open translations file %q:: %w", r.LogicalName(), err) @@ -129,13 +124,8 @@ func (tp *TranslationProvider) CloneResource(dst, src *deps.Deps) error { return nil } -func errWithFileContext(inerr error, r source.File) error { - fim, ok := r.FileInfo().(hugofs.FileMetaInfo) - if !ok { - return inerr - } - - meta := fim.Meta() +func errWithFileContext(inerr error, r *source.File) error { + meta := r.FileInfo().Meta() realFilename := meta.Filename f, err := meta.Open() if err != nil { @@ -144,5 +134,4 @@ func errWithFileContext(inerr error, r source.File) error { defer f.Close() return herrors.NewFileErrorFromName(inerr, realFilename).UpdateContent(f, nil) - } diff --git a/langs/language.go b/langs/language.go index 2cd608675..d34ea1cc7 100644 --- a/langs/language.go +++ b/langs/language.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -95,22 +95,13 @@ func NewLanguage(lang, defaultContentLanguage, timeZone string, languageConfig L // This is injected from hugolib to avoid circular dependencies. var DeprecationFunc = func(item, alternative string, err bool) {} -const paramsDeprecationWarning = `.Language.Params is deprecated and will be removed in a future release. Use site.Params instead. - -- For all but custom parameters, you need to use the built in Hugo variables, e.g. site.Title, site.LanguageCode; site.Language.Params.Title will not work. -- All custom parameters needs to be placed below params, e.g. [languages.en.params] in TOML. - -See https://gohugo.io/content-management/multilingual/#changes-in-hugo-01120 - -` - // Params returns the language params. // Note that this is the same as the Site.Params, but we keep it here for legacy reasons. // Deprecated: Use the site.Params instead. func (l *Language) Params() maps.Params { // TODO(bep) Remove this for now as it created a little too much noise. Need to think about this. // See https://github.com/gohugoio/hugo/issues/11025 - //DeprecationFunc(".Language.Params", paramsDeprecationWarning, false) + // DeprecationFunc(".Language.Params", paramsDeprecationWarning, false) return l.params } @@ -147,7 +138,8 @@ func (l Languages) AsSet() map[string]bool { return m } -func (l Languages) AsOrdinalSet() map[string]int { +// AsIndexSet returns a map with the language code as key and index in l as value. +func (l Languages) AsIndexSet() map[string]int { m := make(map[string]int) for i, lang := range l { m[lang.Lang] = i diff --git a/lazy/init.go b/lazy/init.go index 9a25e1e05..7b88a5351 100644 --- a/lazy/init.go +++ b/lazy/init.go @@ -15,11 +15,10 @@ package lazy import ( "context" + "errors" "sync" "sync/atomic" "time" - - "errors" ) // New creates a new empty Init. @@ -197,6 +196,7 @@ func (ini *Init) withTimeout(ctx context.Context, timeout time.Duration, f func( select { case <-waitCtx.Done(): + //lint:ignore ST1005 end user message. return nil, errors.New("timed out initializing value. You may have a circular loop in a shortcode, or your site may have resources that take longer to build than the `timeout` limit in your Hugo config file.") case ve := <-c: return ve.v, ve.err @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/main_test.go b/main_test.go index 4b6ad4caf..75f5ed949 100644 --- a/main_test.go +++ b/main_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -50,15 +50,12 @@ func TestUnfinished(t *testing.T) { p := commonTestScriptsParam p.Dir = "testscripts/unfinished" - //p.UpdateScripts = true + // p.UpdateScripts = true testscript.Run(t, p) } func TestMain(m *testing.M) { - type testInfo struct { - BaseURLs []string - } os.Exit( testscript.RunMain(m, map[string]func() int{ // The main program. @@ -91,7 +88,7 @@ var commonTestScriptsParam = testscript.Params{ ts.Fatalf("%v", err) } b = bytes.Replace(b, []byte("\r\n"), []byte{'\n'}, -1) - if err := os.WriteFile(filename, b, 0666); err != nil { + if err := os.WriteFile(filename, b, 0o666); err != nil { ts.Fatalf("%v", err) } }, @@ -115,15 +112,10 @@ var commonTestScriptsParam = testscript.Params{ } } time.Sleep(time.Duration(i) * time.Second) - }, // ls lists a directory to stdout. "ls": func(ts *testscript.TestScript, neg bool, args []string) { - var dirname string - if len(args) > 0 { - dirname = args[0] - } - dirname = ts.MkAbs(args[0]) + dirname := ts.MkAbs(args[0]) dir, err := os.Open(dirname) if err != nil { @@ -223,7 +215,6 @@ var commonTestScriptsParam = testscript.Params{ } } return nil - } // The timing on server rebuilds can be a little tricky to get right, @@ -350,7 +341,6 @@ var commonTestScriptsParam = testscript.Params{ return } - }, "stopServer": func(ts *testscript.TestScript, neg bool, args []string) { baseURL := ts.Getenv("HUGOTEST_BASEURL_0") @@ -367,7 +357,6 @@ var commonTestScriptsParam = testscript.Params{ resp.Body.Close() // Allow some time for the server to shut down. time.Sleep(2 * time.Second) - }, }, } @@ -384,13 +373,13 @@ func testSetupFunc() func(env *testscript.Env) error { keyVals = append(keyVals, "HOME", home) if runtime.GOOS == "darwin" { - if err := os.MkdirAll(filepath.Join(home, "Library", "Caches"), 0777); err != nil { + if err := os.MkdirAll(filepath.Join(home, "Library", "Caches"), 0o777); err != nil { return err } } if runtime.GOOS == "linux" { - if err := os.MkdirAll(xdghome, 0777); err != nil { + if err := os.MkdirAll(xdghome, 0o777); err != nil { return err } } diff --git a/markup/asciidocext/convert_test.go b/markup/asciidocext/convert_test.go index 459686139..9ccc807f1 100644 --- a/markup/asciidocext/convert_test.go +++ b/markup/asciidocext/convert_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/blackfriday/anchors.go b/markup/blackfriday/anchors.go index 90f65a64c..987f46fc6 100644 --- a/markup/blackfriday/anchors.go +++ b/markup/blackfriday/anchors.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/converter/converter.go b/markup/converter/converter.go index 7c4898592..b66cb8730 100644 --- a/markup/converter/converter.go +++ b/markup/converter/converter.go @@ -89,7 +89,6 @@ func (nopConverter) Supports(feature identity.Identity) bool { // another format, e.g. Markdown to HTML. type Converter interface { Convert(ctx RenderContext) (ResultRender, error) - Supports(feature identity.Identity) bool } // ParseRenderer is an optional interface. @@ -156,5 +155,3 @@ type RenderContext struct { // GerRenderer provides hook renderers on demand. GetRenderer hooks.GetRendererFunc } - -var FeatureRenderHooks = identity.NewPathIdentity("markup", "renderingHooks") diff --git a/markup/converter/hooks/hooks.go b/markup/converter/hooks/hooks.go index c5be4d1f0..bdc38f119 100644 --- a/markup/converter/hooks/hooks.go +++ b/markup/converter/hooks/hooks.go @@ -20,7 +20,6 @@ import ( "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/common/types/hstring" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/internal/attributes" ) @@ -89,12 +88,10 @@ type AttributesOptionsSliceProvider interface { type LinkRenderer interface { RenderLink(cctx context.Context, w io.Writer, ctx LinkContext) error - identity.Provider } type CodeBlockRenderer interface { RenderCodeblock(cctx context.Context, w hugio.FlexiWriter, ctx CodeblockContext) error - identity.Provider } type IsDefaultCodeBlockRendererProvider interface { @@ -123,7 +120,6 @@ type HeadingContext interface { type HeadingRenderer interface { // RenderHeading writes the rendered content to w using the data in w. RenderHeading(cctx context.Context, w io.Writer, ctx HeadingContext) error - identity.Provider } // ElementPositionResolver provides a way to resolve the start Position diff --git a/markup/goldmark/codeblocks/integration_test.go b/markup/goldmark/codeblocks/integration_test.go index 7f0201878..5597fc507 100644 --- a/markup/goldmark/codeblocks/integration_test.go +++ b/markup/goldmark/codeblocks/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -339,7 +339,6 @@ Attributes: {{ .Attributes }}|Options: {{ .Options }}| } func TestPanics(t *testing.T) { - files := ` -- config.toml -- [markup] @@ -384,7 +383,6 @@ Common b.AssertFileContent("public/p1/index.html", "Common") }) } - } // Issue 10835 @@ -421,5 +419,4 @@ Attributes: {{ .Attributes }}|Type: {{ .Type }}| b.Assert(err, qt.Not(qt.IsNil)) b.Assert(err.Error(), qt.Contains, "p1.md:7:9\": failed to parse Markdown attributes; you may need to quote the values") - } diff --git a/markup/goldmark/codeblocks/render.go b/markup/goldmark/codeblocks/render.go index 5f053d278..5f479bf23 100644 --- a/markup/goldmark/codeblocks/render.go +++ b/markup/goldmark/codeblocks/render.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -133,8 +133,6 @@ func (r *htmlRenderer) renderCodeBlock(w util.BufWriter, src []byte, node ast.No cbctx, ) - ctx.AddIdentity(cr) - if err != nil { return ast.WalkContinue, herrors.NewFileErrorFromPos(err, cbctx.createPos()) } diff --git a/markup/goldmark/convert.go b/markup/goldmark/convert.go index 56cc56fcd..de06bedff 100644 --- a/markup/goldmark/convert.go +++ b/markup/goldmark/convert.go @@ -17,8 +17,6 @@ package goldmark import ( "bytes" - "github.com/gohugoio/hugo/identity" - "github.com/gohugoio/hugo-goldmark-extensions/passthrough" "github.com/gohugoio/hugo/markup/goldmark/codeblocks" "github.com/gohugoio/hugo/markup/goldmark/goldmark_config" @@ -213,8 +211,6 @@ func newMarkdown(pcfg converter.ProviderConfig) goldmark.Markdown { return md } -var _ identity.IdentitiesProvider = (*converterResult)(nil) - type parserResult struct { doc any toc *tableofcontents.Fragments @@ -230,25 +226,17 @@ func (p parserResult) TableOfContents() *tableofcontents.Fragments { type renderResult struct { converter.ResultRender - ids identity.Identities -} - -func (r renderResult) GetIdentities() identity.Identities { - return r.ids } type converterResult struct { converter.ResultRender tableOfContentsProvider - identity.IdentitiesProvider } type tableOfContentsProvider interface { TableOfContents() *tableofcontents.Fragments } -var converterIdentity = identity.KeyValueIdentity{Key: "goldmark", Value: "converter"} - func (c *goldmarkConverter) Parse(ctx converter.RenderContext) (converter.ResultParse, error) { pctx := c.newParserContext(ctx) reader := text.NewReader(ctx.Src) @@ -262,8 +250,8 @@ func (c *goldmarkConverter) Parse(ctx converter.RenderContext) (converter.Result doc: doc, toc: pctx.TableOfContents(), }, nil - } + func (c *goldmarkConverter) Render(ctx converter.RenderContext, doc any) (converter.ResultRender, error) { n := doc.(ast.Node) buf := &render.BufWriter{Buffer: &bytes.Buffer{}} @@ -271,7 +259,6 @@ func (c *goldmarkConverter) Render(ctx converter.RenderContext, doc any) (conver rcx := &render.RenderContextDataHolder{ Rctx: ctx, Dctx: c.ctx, - IDs: identity.NewManager(converterIdentity), } w := &render.Context{ @@ -285,9 +272,7 @@ func (c *goldmarkConverter) Render(ctx converter.RenderContext, doc any) (conver return renderResult{ ResultRender: buf, - ids: rcx.IDs.GetIdentities(), }, nil - } func (c *goldmarkConverter) Convert(ctx converter.RenderContext) (converter.ResultRender, error) { @@ -302,17 +287,7 @@ func (c *goldmarkConverter) Convert(ctx converter.RenderContext) (converter.Resu return converterResult{ ResultRender: renderResult, tableOfContentsProvider: parseResult, - IdentitiesProvider: renderResult.(identity.IdentitiesProvider), }, nil - -} - -var featureSet = map[identity.Identity]bool{ - converter.FeatureRenderHooks: true, -} - -func (c *goldmarkConverter) Supports(feature identity.Identity) bool { - return featureSet[feature.GetIdentity()] } func (c *goldmarkConverter) newParserContext(rctx converter.RenderContext) *parserContext { @@ -349,5 +324,4 @@ func toTypographicPunctuationMap(t goldmark_config.Typographer) map[extension.Ty extension.RightAngleQuote: []byte(t.RightAngleQuote), extension.Apostrophe: []byte(t.Apostrophe), } - } diff --git a/markup/goldmark/convert_test.go b/markup/goldmark/convert_test.go index c97156f7a..266f0f9ab 100644 --- a/markup/goldmark/convert_test.go +++ b/markup/goldmark/convert_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -483,7 +483,6 @@ noclasses=false }) c.Run("Highlight lines, default config", func(c *qt.C) { - result := convertForConfig(c, cfgStrHighlichgtNoClasses, lines, `bash {linenos=table,hl_lines=[2 "4-5"],linenostart=3}`) c.Assert(result, qt.Contains, "<div class=\"highlight\"><div class=\"chroma\">\n<table class=\"lntable\"><tr><td class=\"lntd\">\n<pre tabindex=\"0\" class=\"chroma\"><code><span class") c.Assert(result, qt.Contains, "<span class=\"hl\"><span class=\"lnt\">4") @@ -614,7 +613,6 @@ func unsafeConf() config.AllProvider { unsafe = true `) return testconfig.GetTestConfig(nil, cfg) - } func safeConf() config.AllProvider { @@ -624,7 +622,6 @@ func safeConf() config.AllProvider { unsafe = false `) return testconfig.GetTestConfig(nil, cfg) - } func TestConvertCJK(t *testing.T) { diff --git a/markup/goldmark/goldmark_config/config.go b/markup/goldmark/goldmark_config/config.go index ba1874a18..1c393e3f4 100644 --- a/markup/goldmark/goldmark_config/config.go +++ b/markup/goldmark/goldmark_config/config.go @@ -73,9 +73,10 @@ var Default = Config{ // Config configures Goldmark. type Config struct { - Renderer Renderer - Parser Parser - Extensions Extensions + DuplicateResourceFiles bool + Renderer Renderer + Parser Parser + Extensions Extensions } type Extensions struct { diff --git a/markup/goldmark/images/integration_test.go b/markup/goldmark/images/integration_test.go index e8d1b880e..8b0ba99c1 100644 --- a/markup/goldmark/images/integration_test.go +++ b/markup/goldmark/images/integration_test.go @@ -39,10 +39,10 @@ This is an inline image: ![Inline Image](/inline.jpg). Some more text. files = files + `-- layouts/_default/_markup/render-image.html -- {{ if .IsBlock }} <figure class="{{ .Attributes.class }}"> - <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}" /> + <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}|{{ .Ordinal }}" /> </figure> {{ else }} - <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}" /> + <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}|{{ .Ordinal }}" /> {{ end }} ` b := hugolib.NewIntegrationTestBuilder( @@ -54,8 +54,8 @@ This is an inline image: ![Inline Image](/inline.jpg). Some more text. ).Build() b.AssertFileContent("public/p1/index.html", - "This is an inline image: \n\t<img src=\"/inline.jpg\" alt=\"Inline Image\" />\n. Some more text.</p>", - "<figure class=\"b\">\n\t<img src=\"/block.jpg\" alt=\"Block Image\" />", + "This is an inline image: \n\t<img src=\"/inline.jpg\" alt=\"Inline Image|0\" />\n. Some more text.</p>", + "<figure class=\"b\">\n\t<img src=\"/block.jpg\" alt=\"Block Image|1\" />", ) }) @@ -109,5 +109,4 @@ This is an inline image: ![Inline Image](/inline.jpg). Some more text. b.AssertFileContent("public/p1/index.html", "<p class=\"b\"><img src=\"/block.jpg\" alt=\"Block Image\"></p>") }) - } diff --git a/markup/goldmark/internal/render/context.go b/markup/goldmark/internal/render/context.go index b18983ef3..578714339 100644 --- a/markup/goldmark/internal/render/context.go +++ b/markup/goldmark/internal/render/context.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,7 +17,6 @@ import ( "bytes" "math/bits" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/converter" ) @@ -59,13 +58,11 @@ func (ctx *Context) PopPos() int { type ContextData interface { RenderContext() converter.RenderContext DocumentContext() converter.DocumentContext - AddIdentity(id identity.Provider) } type RenderContextDataHolder struct { Rctx converter.RenderContext Dctx converter.DocumentContext - IDs identity.Manager } func (ctx *RenderContextDataHolder) RenderContext() converter.RenderContext { @@ -75,7 +72,3 @@ func (ctx *RenderContextDataHolder) RenderContext() converter.RenderContext { func (ctx *RenderContextDataHolder) DocumentContext() converter.DocumentContext { return ctx.Dctx } - -func (ctx *RenderContextDataHolder) AddIdentity(id identity.Provider) { - ctx.IDs.Add(id) -} diff --git a/markup/goldmark/links/integration_test.go b/markup/goldmark/links/integration_test.go deleted file mode 100644 index 20d4d74b1..000000000 --- a/markup/goldmark/links/integration_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package images_test - -import ( - "strings" - "testing" - - "github.com/gohugoio/hugo/hugolib" -) - -func TestDisableWrapStandAloneImageWithinParagraph(t *testing.T) { - t.Parallel() - - filesTemplate := ` --- config.toml -- -[markup.goldmark.renderer] - unsafe = false -[markup.goldmark.parser] -wrapStandAloneImageWithinParagraph = CONFIG_VALUE -[markup.goldmark.parser.attribute] - block = true - title = true --- content/p1.md -- ---- -title: "p1" ---- - -This is an inline image: ![Inline Image](/inline.jpg). Some more text. - -![Block Image](/block.jpg) -{.b} - - --- layouts/_default/single.html -- -{{ .Content }} -` - - t.Run("With Hook, no wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "false") - files = files + `-- layouts/_default/_markup/render-image.html -- -{{ if .IsBlock }} -<figure class="{{ .Attributes.class }}"> - <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}|{{ .Ordinal }}" /> -</figure> -{{ else }} - <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}|{{ .Ordinal }}" /> -{{ end }} -` - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", - "This is an inline image: \n\t<img src=\"/inline.jpg\" alt=\"Inline Image|0\" />\n. Some more text.</p>", - "<figure class=\"b\">\n\t<img src=\"/block.jpg\" alt=\"Block Image|1\" />", - ) - }) - - t.Run("With Hook, wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "true") - files = files + `-- layouts/_default/_markup/render-image.html -- -{{ if .IsBlock }} -<figure class="{{ .Attributes.class }}"> - <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}" /> -</figure> -{{ else }} - <img src="{{ .Destination | safeURL }}" alt="{{ .Text }}" /> -{{ end }} -` - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", - "This is an inline image: \n\t<img src=\"/inline.jpg\" alt=\"Inline Image\" />\n. Some more text.</p>", - "<p class=\"b\">\n\t<img src=\"/block.jpg\" alt=\"Block Image\" />\n</p>", - ) - }) - - t.Run("No Hook, no wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "false") - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", "<p>This is an inline image: <img src=\"/inline.jpg\" alt=\"Inline Image\">. Some more text.</p>\n<img src=\"/block.jpg\" alt=\"Block Image\" class=\"b\">") - }) - - t.Run("No Hook, wrap", func(t *testing.T) { - files := strings.ReplaceAll(filesTemplate, "CONFIG_VALUE", "true") - b := hugolib.NewIntegrationTestBuilder( - hugolib.IntegrationTestConfig{ - T: t, - TxtarString: files, - NeedsOsFS: false, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", "<p class=\"b\"><img src=\"/block.jpg\" alt=\"Block Image\"></p>") - }) - -} diff --git a/markup/goldmark/links/transform.go b/markup/goldmark/links/transform.go deleted file mode 100644 index 2a7815b70..000000000 --- a/markup/goldmark/links/transform.go +++ /dev/null @@ -1,57 +0,0 @@ -package images - -import ( - "github.com/yuin/goldmark/ast" - "github.com/yuin/goldmark/parser" - "github.com/yuin/goldmark/text" -) - -type ( - linksExtension struct { - wrapStandAloneImageWithinParagraph bool - } -) - -const ( - // Used to signal to the rendering step that an image is used in a block context. - // Dont's change this; the prefix must match the internalAttrPrefix in the root goldmark package. - AttrIsBlock = "_h__isBlock" -) - -type Transformer struct { - wrapStandAloneImageWithinParagraph bool -} - -// Transform transforms the provided Markdown AST. -func (t *Transformer) Transform(doc *ast.Document, reader text.Reader, pctx parser.Context) { - ast.Walk(doc, func(node ast.Node, enter bool) (ast.WalkStatus, error) { - if !enter { - return ast.WalkContinue, nil - } - - if n, ok := node.(*ast.Image); ok { - parent := n.Parent() - - if !t.wrapStandAloneImageWithinParagraph { - isBlock := parent.ChildCount() == 1 - if isBlock { - n.SetAttributeString(AttrIsBlock, true) - } - - if isBlock && parent.Kind() == ast.KindParagraph { - for _, attr := range parent.Attributes() { - // Transfer any attribute set down to the image. - // Image elements does not support attributes on its own, - // so it's safe to just set without checking first. - n.SetAttribute(attr.Name, attr.Value) - } - grandParent := parent.Parent() - grandParent.ReplaceChild(grandParent, parent, n) - } - } - - } - - return ast.WalkContinue, nil - }) -} diff --git a/markup/goldmark/render_hooks.go b/markup/goldmark/render_hooks.go index ecdd7f91e..8dcdc39c3 100644 --- a/markup/goldmark/render_hooks.go +++ b/markup/goldmark/render_hooks.go @@ -197,8 +197,6 @@ func (r *hookedRenderer) renderImage(w util.BufWriter, source []byte, node ast.N }, ) - ctx.AddIdentity(lr) - return ast.WalkContinue, err } @@ -284,11 +282,6 @@ func (r *hookedRenderer) renderLink(w util.BufWriter, source []byte, node ast.No }, ) - // TODO(bep) I have a working branch that fixes these rather confusing identity types, - // but for now it's important that it's not .GetIdentity() that's added here, - // to make sure we search the entire chain on changes. - ctx.AddIdentity(lr) - return ast.WalkContinue, err } @@ -353,11 +346,6 @@ func (r *hookedRenderer) renderAutoLink(w util.BufWriter, source []byte, node as }, ) - // TODO(bep) I have a working branch that fixes these rather confusing identity types, - // but for now it's important that it's not .GetIdentity() that's added here, - // to make sure we search the entire chain on changes. - ctx.AddIdentity(lr) - return ast.WalkContinue, err } @@ -443,8 +431,6 @@ func (r *hookedRenderer) renderHeading(w util.BufWriter, source []byte, node ast }, ) - ctx.AddIdentity(hr) - return ast.WalkContinue, err } diff --git a/markup/goldmark/toc_test.go b/markup/goldmark/toc_test.go index 1b846877b..96983dfa6 100644 --- a/markup/goldmark/toc_test.go +++ b/markup/goldmark/toc_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/highlight/chromalexers/chromalexers.go b/markup/highlight/chromalexers/chromalexers.go index 41fd76261..6ab4a7bbe 100644 --- a/markup/highlight/chromalexers/chromalexers.go +++ b/markup/highlight/chromalexers/chromalexers.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/highlight/highlight.go b/markup/highlight/highlight.go index 85ea74124..a284b5981 100644 --- a/markup/highlight/highlight.go +++ b/markup/highlight/highlight.go @@ -27,7 +27,6 @@ import ( "github.com/alecthomas/chroma/v2/styles" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/text" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/converter/hooks" "github.com/gohugoio/hugo/markup/highlight/chromalexers" "github.com/gohugoio/hugo/markup/internal/attributes" @@ -146,13 +145,6 @@ func (h chromaHighlighter) IsDefaultCodeBlockRenderer() bool { return true } -var id = identity.NewPathIdentity("chroma", "highlight") - -// GetIdentity is for internal use. -func (h chromaHighlighter) GetIdentity() identity.Identity { - return id -} - // HighlightResult holds the result of an highlighting operation. type HighlightResult struct { innerLow int @@ -188,7 +180,7 @@ func highlight(fw hugio.FlexiWriter, code, lang string, attributes []attributes. if lexer == nil { if cfg.Hl_inline { - fmt.Fprint(w, fmt.Sprintf("<code%s>%s</code>", inlineCodeAttrs(lang), gohtml.EscapeString(code))) + fmt.Fprintf(w, "<code%s>%s</code>", inlineCodeAttrs(lang), gohtml.EscapeString(code)) } else { preWrapper := getPreWrapper(lang, w) fmt.Fprint(w, preWrapper.Start(true, "")) @@ -278,8 +270,6 @@ func (p *preWrapper) Start(code bool, styleAttr string) string { } func inlineCodeAttrs(lang string) string { - if lang == "" { - } return fmt.Sprintf(` class="code-inline language-%s"`, lang) } diff --git a/markup/highlight/highlight_test.go b/markup/highlight/highlight_test.go index 662f07c93..732dbfa64 100644 --- a/markup/highlight/highlight_test.go +++ b/markup/highlight/highlight_test.go @@ -87,7 +87,6 @@ User-Agent: foo result, _ := h.Highlight(lines, "bash", "") c.Assert(result, qt.Contains, "<span class=\"lnt\" id=\"2\"><a class=\"lnlinks\" href=\"#2\">2</a>\n</span>") - result, _ = h.Highlight(lines, "bash", "lineanchors=test") result, _ = h.Highlight(lines, "bash", "anchorlinenos=false,hl_lines=2") c.Assert(result, qt.Not(qt.Contains), "id=\"2\"") }) diff --git a/markup/highlight/integration_test.go b/markup/highlight/integration_test.go index ce6705f02..b53b585c0 100644 --- a/markup/highlight/integration_test.go +++ b/markup/highlight/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/internal/attributes/attributes.go b/markup/internal/attributes/attributes.go index 91181c78c..4e81afe04 100644 --- a/markup/internal/attributes/attributes.go +++ b/markup/internal/attributes/attributes.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/markup.go b/markup/markup.go index ebd86f38f..835c7bbec 100644 --- a/markup/markup.go +++ b/markup/markup.go @@ -95,6 +95,7 @@ func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, erro type ConverterProvider interface { Get(name string) converter.Provider + IsGoldmark(name string) bool // Default() converter.Provider GetMarkupConfig() markup_config.Config GetHighlighter() highlight.Highlighter @@ -110,6 +111,11 @@ type converterRegistry struct { config converter.ProviderConfig } +func (r *converterRegistry) IsGoldmark(name string) bool { + cp := r.Get(name) + return cp != nil && cp.Name() == "goldmark" +} + func (r *converterRegistry) Get(name string) converter.Provider { return r.converters[strings.ToLower(name)] } diff --git a/markup/markup_test.go b/markup/markup_test.go index 5cf08758d..172099d5c 100644 --- a/markup/markup_test.go +++ b/markup/markup_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/org/convert_test.go b/markup/org/convert_test.go index 1422585af..16c4306ff 100644 --- a/markup/org/convert_test.go +++ b/markup/org/convert_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/markup/tableofcontents/integration_test.go b/markup/tableofcontents/integration_test.go index a51ad3d45..87a7c0108 100644 --- a/markup/tableofcontents/integration_test.go +++ b/markup/tableofcontents/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/media/config.go b/media/config.go index b356132be..cdec2e438 100644 --- a/media/config.go +++ b/media/config.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/media/config_test.go b/media/config_test.go index 75ede75bd..4803eb42a 100644 --- a/media/config_test.go +++ b/media/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/media/mediaType.go b/media/mediaType.go index 8204fc435..367c8ecc9 100644 --- a/media/mediaType.go +++ b/media/mediaType.go @@ -129,15 +129,6 @@ func FromStringAndExt(t, ext string) (Type, error) { return tp, nil } -// MustFromString is like FromString but panics on error. -func MustFromString(t string) Type { - tp, err := FromString(t) - if err != nil { - panic(err) - } - return tp -} - // FromString creates a new Type given a type string on the form MainType/SubType and // an optional suffix, e.g. "text/html" or "text/html+html". func FromString(t string) (Type, error) { @@ -209,14 +200,6 @@ func (m *Type) init() { } } -// WithDelimiterAndSuffixes is used in tests. -func WithDelimiterAndSuffixes(t Type, delimiter, suffixesCSV string) Type { - t.Delimiter = delimiter - t.SuffixesCSV = suffixesCSV - t.init() - return t -} - func newMediaType(main, sub string, suffixes []string) Type { t := Type{MainType: main, SubType: sub, SuffixesCSV: strings.Join(suffixes, ","), Delimiter: DefaultDelimiter} t.init() @@ -315,7 +298,6 @@ func (t Types) IsTextSuffix(suffix string) bool { } } return false - } func (m Type) hasSuffix(suffix string) bool { diff --git a/minifiers/config_test.go b/minifiers/config_test.go index 9dc20c655..7edd8734e 100644 --- a/minifiers/config_test.go +++ b/minifiers/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/modules/client.go b/modules/client.go index b9a2a48d4..fae6a4c0a 100644 --- a/modules/client.go +++ b/modules/client.go @@ -42,7 +42,7 @@ import ( "github.com/gohugoio/hugo/config" - "github.com/rogpeppe/go-internal/module" + "golang.org/x/mod/module" "github.com/gohugoio/hugo/common/hugio" diff --git a/modules/client_test.go b/modules/client_test.go index 75e3c2b08..ea910580f 100644 --- a/modules/client_test.go +++ b/modules/client_test.go @@ -49,7 +49,7 @@ github.com/gohugoio/hugoTestModules1_darwin/[email protected] github.com/gohugoio/h workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, fmt.Sprintf("%s-%d", modName, clientID)) c.Assert(err, qt.IsNil) themesDir := filepath.Join(workingDir, "themes") - err = os.Mkdir(themesDir, 0777) + err = os.Mkdir(themesDir, 0o777) c.Assert(err, qt.IsNil) ccfg := ClientConfig{ @@ -184,7 +184,7 @@ project github.com/gohugoio/hugoTestModules1_darwin/[email protected]+vendor c.Assert(err, qt.IsNil) c.Assert(dirname, qt.Equals, filepath.Join(client.ccfg.ThemesDir, "../../foo")) - dirname, err = client.createThemeDirname("../../foo", false) + _, err = client.createThemeDirname("../../foo", false) c.Assert(err, qt.Not(qt.IsNil)) absDir := filepath.Join(client.ccfg.WorkingDir, "..", "..") diff --git a/modules/collect.go b/modules/collect.go index 6c47bde5c..a4066a46c 100644 --- a/modules/collect.go +++ b/modules/collect.go @@ -17,6 +17,7 @@ import ( "bufio" "errors" "fmt" + "io/fs" "os" "path/filepath" "regexp" @@ -36,7 +37,7 @@ import ( "github.com/gohugoio/hugo/hugofs/files" - "github.com/rogpeppe/go-internal/module" + "golang.org/x/mod/module" "github.com/gohugoio/hugo/config" "github.com/spf13/afero" @@ -282,6 +283,7 @@ func (c *collector) add(owner *moduleAdapter, moduleImport Import) (*moduleAdapt return nil, nil } if found, _ := afero.Exists(c.fs, moduleDir); !found { + //lint:ignore ST1005 end user message. c.err = c.wrapModuleNotFound(fmt.Errorf(`module %q not found in %q; either add it as a Hugo Module or store it in %q.`, modulePath, moduleDir, c.ccfg.ThemesDir)) return nil, nil } @@ -599,7 +601,12 @@ func (c *collector) mountCommonJSConfig(owner *moduleAdapter, mounts []Mount) ([ } // Mount the common JS config files. - fis, err := afero.ReadDir(c.fs, owner.Dir()) + d, err := c.fs.Open(owner.Dir()) + if err != nil { + return mounts, fmt.Errorf("failed to open dir %q: %q", owner.Dir(), err) + } + defer d.Close() + fis, err := d.(fs.ReadDirFile).ReadDir(-1) if err != nil { return mounts, fmt.Errorf("failed to read dir %q: %q", owner.Dir(), err) } diff --git a/modules/config.go b/modules/config.go index f8faf7969..62671613c 100644 --- a/modules/config.go +++ b/modules/config.go @@ -29,7 +29,6 @@ import ( const WorkspaceDisabled = "off" var DefaultModuleConfig = Config{ - // Default to direct, which means "git clone" and similar. We // will investigate proxy settings in more depth later. // See https://github.com/golang/go/issues/26334 @@ -58,7 +57,6 @@ var DefaultModuleConfig = Config{ // ApplyProjectConfigDefaults applies default/missing module configuration for // the main project. func ApplyProjectConfigDefaults(mod Module, cfgs ...config.AllProvider) error { - moda := mod.(*moduleAdapter) // To bridge between old and new configuration format we need @@ -99,14 +97,19 @@ func ApplyProjectConfigDefaults(mod Module, cfgs ...config.AllProvider) error { dir = dirs.ContentDir dropLang = dir == dirsBase.ContentDir case files.ComponentFolderData: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.DataDir case files.ComponentFolderLayouts: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.LayoutDir case files.ComponentFolderI18n: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.I18nDir case files.ComponentFolderArchetypes: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.ArcheTypeDir case files.ComponentFolderAssets: + //lint:ignore SA1019 Keep as adapter for now. dir = dirs.AssetDir case files.ComponentFolderStatic: // For static dirs, we only care about the language in multihost setups. @@ -230,6 +233,7 @@ func decodeConfig(cfg config.Provider, pathReplacements map[string]string) (Conf c.Workspace = filepath.Join(workingDir, c.Workspace) } if _, err := os.Stat(c.Workspace); err != nil { + //lint:ignore ST1005 end user message. return c, fmt.Errorf("module workspace %q does not exist. Check your module.workspace setting (or HUGO_MODULE_WORKSPACE env var).", c.Workspace) } } diff --git a/modules/npm/package_builder.go b/modules/npm/package_builder.go index 9bdc7eb78..0deed2f42 100644 --- a/modules/npm/package_builder.go +++ b/modules/npm/package_builder.go @@ -18,6 +18,7 @@ import ( "encoding/json" "fmt" "io" + "io/fs" "strings" "github.com/gohugoio/hugo/common/hugio" @@ -44,17 +45,17 @@ const ( }` ) -func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { +func Pack(sourceFs, assetsWithDuplicatesPreservedFs afero.Fs) error { var b *packageBuilder // Have a package.hugo.json? - fi, err := fs.Stat(files.FilenamePackageHugoJSON) + fi, err := sourceFs.Stat(files.FilenamePackageHugoJSON) if err != nil { // Have a package.json? - fi, err = fs.Stat(packageJSONName) + fi, err = sourceFs.Stat(packageJSONName) if err == nil { // Preserve the original in package.hugo.json. - if err = hugio.CopyFile(fs, packageJSONName, files.FilenamePackageHugoJSON); err != nil { + if err = hugio.CopyFile(sourceFs, packageJSONName, files.FilenamePackageHugoJSON); err != nil { return fmt.Errorf("npm pack: failed to copy package file: %w", err) } } else { @@ -62,15 +63,15 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { name := "project" // Use the Hugo site's folder name as the default name. // The owner can change it later. - rfi, err := fs.Stat("") + rfi, err := sourceFs.Stat("") if err == nil { name = rfi.Name() } packageJSONContent := fmt.Sprintf(packageJSONTemplate, name, "0.1.0") - if err = afero.WriteFile(fs, files.FilenamePackageHugoJSON, []byte(packageJSONContent), 0666); err != nil { + if err = afero.WriteFile(sourceFs, files.FilenamePackageHugoJSON, []byte(packageJSONContent), 0o666); err != nil { return err } - fi, err = fs.Stat(files.FilenamePackageHugoJSON) + fi, err = sourceFs.Stat(files.FilenamePackageHugoJSON) if err != nil { return err } @@ -86,9 +87,18 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { b = newPackageBuilder(meta.Module, f) f.Close() + d, err := assetsWithDuplicatesPreservedFs.Open(files.FolderJSConfig) + if err != nil { + return nil + } + + fis, err := d.(fs.ReadDirFile).ReadDir(-1) + if err != nil { + return fmt.Errorf("npm pack: failed to read assets: %w", err) + } + for _, fi := range fis { if fi.IsDir() { - // We only care about the files in the root. continue } @@ -137,7 +147,7 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error { return fmt.Errorf("npm pack: failed to marshal JSON: %w", err) } - if err := afero.WriteFile(fs, packageJSONName, packageJSONData.Bytes(), 0666); err != nil { + if err := afero.WriteFile(sourceFs, packageJSONName, packageJSONData.Bytes(), 0o666); err != nil { return fmt.Errorf("npm pack: failed to write package.json: %w", err) } diff --git a/navigation/menu.go b/navigation/menu.go index 50e51bcbe..3802014b1 100644 --- a/navigation/menu.go +++ b/navigation/menu.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -49,7 +49,6 @@ type MenuEntry struct { } func (m *MenuEntry) URL() string { - // Check page first. // In Hugo 0.86.0 we added `pageRef`, // a way to connect menu items in site config to pages. @@ -88,7 +87,7 @@ type Page interface { Weight() int IsPage() bool IsSection() bool - IsAncestor(other any) (bool, error) + IsAncestor(other any) bool Params() maps.Params } @@ -290,7 +289,6 @@ func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus] if err != nil { return ret, nil, err } else { - for _, entry := range m { var menuConfig MenuConfig if err := mapstructure.WeakDecode(entry, &menuConfig); err != nil { @@ -312,7 +310,6 @@ func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus] } return ret, menus, nil - } return config.DecodeNamespace[map[string]MenuConfig](in, buildConfig) diff --git a/navigation/menu_cache.go b/navigation/menu_cache.go index 4287ed875..b6350cd01 100644 --- a/navigation/menu_cache.go +++ b/navigation/menu_cache.go @@ -39,12 +39,6 @@ func newMenuCache() *menuCache { return &menuCache{m: make(map[string][]menuCacheEntry)} } -func (c *menuCache) clear() { - c.Lock() - defer c.Unlock() - c.m = make(map[string][]menuCacheEntry) -} - type menuCache struct { sync.RWMutex m map[string][]menuCacheEntry diff --git a/navigation/pagemenus.go b/navigation/pagemenus.go index 6321a8a63..ab57231c3 100644 --- a/navigation/pagemenus.go +++ b/navigation/pagemenus.go @@ -125,7 +125,7 @@ type pageMenus struct { func (pm *pageMenus) HasMenuCurrent(menuID string, me *MenuEntry) bool { if !types.IsNil(me.Page) && me.Page.IsSection() { - if ok, _ := me.Page.IsAncestor(pm.p); ok { + if ok := me.Page.IsAncestor(pm.p); ok { return true } } diff --git a/output/config.go b/output/config.go index 86e5bcfaa..a7ebf5107 100644 --- a/output/config.go +++ b/output/config.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -140,5 +140,4 @@ func decode(mediaTypes media.Types, input any, output *Format) error { } return nil - } diff --git a/output/config_test.go b/output/config_test.go index 52381c5d2..c2f0af980 100644 --- a/output/config_test.go +++ b/output/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/output/layouts/layout.go b/output/layouts/layout.go index 9c5ef17a1..c05841ae3 100644 --- a/output/layouts/layout.go +++ b/output/layouts/layout.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -90,7 +90,7 @@ type layoutBuilder struct { layoutVariations []string typeVariations []string d LayoutDescriptor - //f Format + // f Format } func (l *layoutBuilder) addLayoutVariations(vars ...string) { @@ -184,9 +184,18 @@ func resolvePageTemplate(d LayoutDescriptor) []string { case "404": b.addLayoutVariations("404") b.addTypeVariations("") + case "robotstxt": + b.addLayoutVariations("robots") + b.addTypeVariations("") + case "sitemap": + b.addLayoutVariations("sitemap") + b.addTypeVariations("") + case "sitemapindex": + b.addLayoutVariations("sitemapindex") + b.addTypeVariations("") } - isRSS := strings.EqualFold(d.OutputFormatName, "rss") + isRSS := d.OutputFormatName == "rss" if !d.RenderingHook && !d.Baseof && isRSS { // The historic and common rss.xml case b.addLayoutVariations("") @@ -212,6 +221,15 @@ func resolvePageTemplate(d LayoutDescriptor) []string { layouts = append(layouts, "_internal/_default/rss.xml") } + switch d.Kind { + case "robotsTXT": + layouts = append(layouts, "_internal/_default/robots.txt") + case "sitemap": + layouts = append(layouts, "_internal/_default/sitemap.xml") + case "sitemapindex": + layouts = append(layouts, "_internal/_default/sitemapindex.xml") + } + return layouts } diff --git a/output/outputFormat.go b/output/outputFormat.go index f602c03f3..54e7fe98d 100644 --- a/output/outputFormat.go +++ b/output/outputFormat.go @@ -56,12 +56,19 @@ type Format struct { // Enable to ignore the global uglyURLs setting. NoUgly bool `json:"noUgly"` + // Enable to override the global uglyURLs setting. + Ugly bool `json:"ugly"` + // Enable if it doesn't make sense to include this format in an alternative // format listing, CSS being one good example. // Note that we use the term "alternative" and not "alternate" here, as it // does not necessarily replace the other format, it is an alternative representation. NotAlternative bool `json:"notAlternative"` + // Eneable if this is a resource which path always starts at the root, + // e.g. /robots.txt. + Root bool `json:"root"` + // Setting this will make this output format control the value of // .Permalink and .RelPermalink for a rendered Page. // If not set, these values will point to the main (first) output format @@ -75,7 +82,7 @@ type Format struct { Weight int `json:"weight"` } -// An ordered list of built-in output formats. +// Built-in output formats. var ( AMPFormat = Format{ Name: "amp", @@ -156,6 +163,7 @@ var ( MediaType: media.Builtin.TextType, BaseName: "robots", IsPlainText: true, + Root: true, Rel: "alternate", } @@ -171,9 +179,27 @@ var ( Name: "sitemap", MediaType: media.Builtin.XMLType, BaseName: "sitemap", - NoUgly: true, + Ugly: true, Rel: "sitemap", } + + SitemapIndexFormat = Format{ + Name: "sitemapindex", + MediaType: media.Builtin.XMLType, + BaseName: "sitemap", + Ugly: true, + Root: true, + Rel: "sitemap", + } + + HTTPStatusHTMLFormat = Format{ + Name: "httpstatus", + MediaType: media.Builtin.HTMLType, + NotAlternative: true, + Ugly: true, + IsHTML: true, + Permalinkable: true, + } ) // DefaultFormats contains the default output formats supported by Hugo. @@ -297,6 +323,11 @@ func (f Format) BaseFilename() string { return f.BaseName + f.MediaType.FirstSuffix.FullSuffix } +// IsZero returns true if f represents a zero value. +func (f Format) IsZero() bool { + return f.Name == "" +} + // MarshalJSON returns the JSON encoding of f. // For internal use only. func (f Format) MarshalJSON() ([]byte, error) { diff --git a/parser/lowercase_camel_json.go b/parser/lowercase_camel_json.go index d48aa40c4..3dd4c24b0 100644 --- a/parser/lowercase_camel_json.go +++ b/parser/lowercase_camel_json.go @@ -25,8 +25,7 @@ import ( // Regexp definitions var ( - keyMatchRegex = regexp.MustCompile(`\"(\w+)\":`) - wordBarrierRegex = regexp.MustCompile(`(\w)([A-Z])`) + keyMatchRegex = regexp.MustCompile(`\"(\w+)\":`) ) // Code adapted from https://gist.github.com/piersy/b9934790a8892db1a603820c0c23e4a7 @@ -92,19 +91,17 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) { if !hreflect.IsTruthful(v) { delete(m, k) } else { - switch v.(type) { + switch vv := v.(type) { case map[string]interface{}: - removeZeroVAlues(v.(map[string]any)) + removeZeroVAlues(vv) case []interface{}: - for _, vv := range v.([]interface{}) { - if m, ok := vv.(map[string]any); ok { + for _, vvv := range vv { + if m, ok := vvv.(map[string]any); ok { removeZeroVAlues(m) } } } - } - } } removeZeroVAlues(m) diff --git a/parser/metadecoders/decoder.go b/parser/metadecoders/decoder.go index 8d93d86a0..5dac23f03 100644 --- a/parser/metadecoders/decoder.go +++ b/parser/metadecoders/decoder.go @@ -174,22 +174,22 @@ func (d Decoder) UnmarshalTo(data []byte, f Format, v any) error { // and change all maps to map[string]interface{} like we would've // gotten from `json`. var ptr any - switch v.(type) { + switch vv := v.(type) { case *map[string]any: - ptr = *v.(*map[string]any) + ptr = *vv case *any: - ptr = *v.(*any) + ptr = *vv default: // Not a map. } if ptr != nil { if mm, changed := stringifyMapKeys(ptr); changed { - switch v.(type) { + switch vv := v.(type) { case *map[string]any: - *v.(*map[string]any) = mm.(map[string]any) + *vv = mm.(map[string]any) case *any: - *v.(*any) = mm + *vv = mm } } } @@ -218,9 +218,9 @@ func (d Decoder) unmarshalCSV(data []byte, v any) error { return err } - switch v.(type) { + switch vv := v.(type) { case *any: - *v.(*any) = records + *vv = records default: return fmt.Errorf("CSV cannot be unmarshaled into %T", v) @@ -257,11 +257,11 @@ func (d Decoder) unmarshalORG(data []byte, v any) error { frontMatter[k] = v } } - switch v.(type) { + switch vv := v.(type) { case *map[string]any: - *v.(*map[string]any) = frontMatter - default: - *v.(*any) = frontMatter + *vv = frontMatter + case *any: + *vv = frontMatter } return nil } diff --git a/parser/pageparser/pagelexer.go b/parser/pageparser/pagelexer.go index 64cd4bfc1..bd903b771 100644 --- a/parser/pageparser/pagelexer.go +++ b/parser/pageparser/pagelexer.go @@ -50,6 +50,9 @@ type pageLexer struct { // items delivered to client items Items + + // error delivered to the client + err error } // Implement the Result interface @@ -164,7 +167,6 @@ func (l *pageLexer) emit(t ItemType) { } l.append(Item{Type: t, low: l.start, high: l.pos}) - } // sends a string item back to the client. @@ -210,7 +212,6 @@ func (l *pageLexer) ignoreEscapesAndEmit(t ItemType, isString bool) { } l.start = l.pos - } // gets the current value (for debugging and error handling) @@ -227,7 +228,14 @@ var lf = []byte("\n") // nil terminates the parser func (l *pageLexer) errorf(format string, args ...any) stateFunc { - l.append(Item{Type: tError, Err: fmt.Errorf(format, args...)}) + l.append(Item{Type: tError, Err: fmt.Errorf(format, args...), low: l.start, high: l.pos}) + return nil +} + +// documentError can be used to signal a fatal error in the lexing process. +// nil terminates the parser +func (l *pageLexer) documentError(err error) stateFunc { + l.err = err return nil } @@ -465,6 +473,7 @@ func lexDone(l *pageLexer) stateFunc { return nil } +//lint:ignore U1000 useful for debugging func (l *pageLexer) printCurrentInput() { fmt.Printf("input[%d:]: %q", l.pos, string(l.input[l.pos:])) } @@ -475,10 +484,6 @@ func (l *pageLexer) index(sep []byte) int { return bytes.Index(l.input[l.pos:], sep) } -func (l *pageLexer) indexByte(sep byte) int { - return bytes.IndexByte(l.input[l.pos:], sep) -} - func (l *pageLexer) hasPrefix(prefix []byte) bool { return bytes.HasPrefix(l.input[l.pos:], prefix) } diff --git a/parser/pageparser/pagelexer_intro.go b/parser/pageparser/pagelexer_intro.go index 6e4617998..25af4170b 100644 --- a/parser/pageparser/pagelexer_intro.go +++ b/parser/pageparser/pagelexer_intro.go @@ -13,6 +13,10 @@ package pageparser +import "errors" + +var ErrPlainHTMLDocumentsNotSupported = errors.New("plain HTML documents not supported") + func lexIntroSection(l *pageLexer) stateFunc { l.summaryDivider = summaryDivider @@ -45,7 +49,7 @@ LOOP: l.emit(TypeIgnore) continue LOOP } else { - return l.errorf("plain HTML documents not supported") + return l.documentError(ErrPlainHTMLDocumentsNotSupported) } } break LOOP diff --git a/parser/pageparser/pageparser.go b/parser/pageparser/pageparser.go index 8d4c757af..9e8b6d803 100644 --- a/parser/pageparser/pageparser.go +++ b/parser/pageparser/pageparser.go @@ -34,9 +34,22 @@ type Result interface { var _ Result = (*pageLexer)(nil) -// Parse parses the page in the given reader according to the given Config. -func Parse(r io.Reader, cfg Config) (Result, error) { - return parseSection(r, cfg, lexIntroSection) +// ParseBytes parses the page in b according to the given Config. +func ParseBytes(b []byte, cfg Config) (Items, error) { + l, err := parseBytes(b, cfg, lexIntroSection) + if err != nil { + return nil, err + } + return l.items, l.err +} + +// ParseBytesMain parses b starting with the main section. +func ParseBytesMain(b []byte, cfg Config) (Items, error) { + l, err := parseBytes(b, cfg, lexMainSection) + if err != nil { + return nil, err + } + return l.items, l.err } type ContentFrontMatter struct { @@ -50,24 +63,29 @@ type ContentFrontMatter struct { func ParseFrontMatterAndContent(r io.Reader) (ContentFrontMatter, error) { var cf ContentFrontMatter - psr, err := Parse(r, Config{}) + input, err := io.ReadAll(r) + if err != nil { + return cf, fmt.Errorf("failed to read page content: %w", err) + } + + psr, err := ParseBytes(input, Config{}) if err != nil { return cf, err } var frontMatterSource []byte - iter := psr.Iterator() + iter := NewIterator(psr) walkFn := func(item Item) bool { if frontMatterSource != nil { // The rest is content. - cf.Content = psr.Input()[item.low:] + cf.Content = input[item.low:] // Done return false } else if item.IsFrontMatter() { cf.FrontMatterFormat = FormatFromFrontMatterType(item.Type) - frontMatterSource = item.Val(psr.Input()) + frontMatterSource = item.Val(input) } return true } @@ -106,7 +124,7 @@ func parseSection(r io.Reader, cfg Config, start stateFunc) (Result, error) { return parseBytes(b, cfg, start) } -func parseBytes(b []byte, cfg Config, start stateFunc) (Result, error) { +func parseBytes(b []byte, cfg Config, start stateFunc) (*pageLexer, error) { lexer := newPageLexer(b, start, cfg) lexer.run() return lexer, nil diff --git a/parser/pageparser/pageparser_intro_test.go b/parser/pageparser/pageparser_intro_test.go index 1b2d59ccc..df2f2579b 100644 --- a/parser/pageparser/pageparser_intro_test.go +++ b/parser/pageparser/pageparser_intro_test.go @@ -25,6 +25,7 @@ type lexerTest struct { name string input string items []typeText + err error } type typeText struct { @@ -58,34 +59,40 @@ var crLfReplacer = strings.NewReplacer("\r", "#", "\n", "$") // TODO(bep) a way to toggle ORG mode vs the rest. var frontMatterTests = []lexerTest{ - {"empty", "", []typeText{tstEOF}}, - {"Byte order mark", "\ufeff\nSome text.\n", []typeText{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}}, - {"HTML Document", ` <html> `, []typeText{nti(tError, "plain HTML documents not supported")}}, - {"HTML Document with shortcode", `<html>{{< sc1 >}}</html>`, []typeText{nti(tError, "plain HTML documents not supported")}}, - {"No front matter", "\nSome text.\n", []typeText{tstSomeText, tstEOF}}, - {"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []typeText{tstFrontMatterYAML, tstSomeText, tstEOF}}, - {"YAML empty front matter", "---\n---\n\nSome text.\n", []typeText{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}}, - {"YAML commented out front matter", "<!--\n---\nfoo: \"bar\"\n---\n-->\nSome text.\n", []typeText{nti(TypeIgnore, "<!--\n"), tstFrontMatterYAML, nti(TypeIgnore, "-->"), tstSomeText, tstEOF}}, - {"YAML commented out front matter, no end", "<!--\n---\nfoo: \"bar\"\n---\nSome text.\n", []typeText{nti(TypeIgnore, "<!--\n"), tstFrontMatterYAML, nti(tError, "starting HTML comment with no end")}}, + {"empty", "", []typeText{tstEOF}, nil}, + {"Byte order mark", "\ufeff\nSome text.\n", []typeText{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}, nil}, + {"HTML Document", ` <html> `, nil, ErrPlainHTMLDocumentsNotSupported}, + {"HTML Document with shortcode", `<html>{{< sc1 >}}</html>`, nil, ErrPlainHTMLDocumentsNotSupported}, + {"No front matter", "\nSome text.\n", []typeText{tstSomeText, tstEOF}, nil}, + {"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []typeText{tstFrontMatterYAML, tstSomeText, tstEOF}, nil}, + {"YAML empty front matter", "---\n---\n\nSome text.\n", []typeText{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}, nil}, + {"YAML commented out front matter", "<!--\n---\nfoo: \"bar\"\n---\n-->\nSome text.\n", []typeText{nti(TypeIgnore, "<!--\n"), tstFrontMatterYAML, nti(TypeIgnore, "-->"), tstSomeText, tstEOF}, nil}, + {"YAML commented out front matter, no end", "<!--\n---\nfoo: \"bar\"\n---\nSome text.\n", []typeText{nti(TypeIgnore, "<!--\n"), tstFrontMatterYAML, nti(tError, "starting HTML comment with no end")}, nil}, // Note that we keep all bytes as they are, but we need to handle CRLF - {"YAML front matter CRLF", "---\r\nfoo: \"bar\"\r\n---\n\nSome text.\n", []typeText{tstFrontMatterYAMLCRLF, tstSomeText, tstEOF}}, - {"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstEOF}}, - {"JSON front matter", tstJSON + "\r\n\nSome text.\n", []typeText{tstFrontMatterJSON, tstSomeText, tstEOF}}, - {"ORG front matter", tstORG + "\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, tstEOF}}, - {"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, nti(TypeLeadSummaryDivider, "# more\n"), nti(tText, "Some text.\n"), tstEOF}}, - {"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, nti(tText, "Some text.\n"), tstEOF}}, - {"Summary divider same line", "+++\nfoo = \"bar\"\n+++\n\nSome text.<!--more-->Some text.\n", []typeText{tstFrontMatterTOML, nti(tText, "\nSome text."), nti(TypeLeadSummaryDivider, "<!--more-->"), nti(tText, "Some text.\n"), tstEOF}}, + {"YAML front matter CRLF", "---\r\nfoo: \"bar\"\r\n---\n\nSome text.\n", []typeText{tstFrontMatterYAMLCRLF, tstSomeText, tstEOF}, nil}, + {"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstEOF}, nil}, + {"JSON front matter", tstJSON + "\r\n\nSome text.\n", []typeText{tstFrontMatterJSON, tstSomeText, tstEOF}, nil}, + {"ORG front matter", tstORG + "\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, tstEOF}, nil}, + {"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, nti(TypeLeadSummaryDivider, "# more\n"), nti(tText, "Some text.\n"), tstEOF}, nil}, + {"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, nti(tText, "Some text.\n"), tstEOF}, nil}, + {"Summary divider same line", "+++\nfoo = \"bar\"\n+++\n\nSome text.<!--more-->Some text.\n", []typeText{tstFrontMatterTOML, nti(tText, "\nSome text."), nti(TypeLeadSummaryDivider, "<!--more-->"), nti(tText, "Some text.\n"), tstEOF}, nil}, // https://github.com/gohugoio/hugo/issues/5402 - {"Summary and shortcode, no space", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->{{< sc1 >}}\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, nti(TypeLeadSummaryDivider, "<!--more-->"), tstLeftNoMD, tstSC1, tstRightNoMD, tstSomeText, tstEOF}}, + {"Summary and shortcode, no space", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->{{< sc1 >}}\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, nti(TypeLeadSummaryDivider, "<!--more-->"), tstLeftNoMD, tstSC1, tstRightNoMD, tstSomeText, tstEOF}, nil}, // https://github.com/gohugoio/hugo/issues/5464 - {"Summary and shortcode only", "+++\nfoo = \"bar\"\n+++\n{{< sc1 >}}\n<!--more-->\n{{< sc2 >}}", []typeText{tstFrontMatterTOML, tstLeftNoMD, tstSC1, tstRightNoMD, tstNewline, tstSummaryDivider, tstLeftNoMD, tstSC2, tstRightNoMD, tstEOF}}, + {"Summary and shortcode only", "+++\nfoo = \"bar\"\n+++\n{{< sc1 >}}\n<!--more-->\n{{< sc2 >}}", []typeText{tstFrontMatterTOML, tstLeftNoMD, tstSC1, tstRightNoMD, tstNewline, tstSummaryDivider, tstLeftNoMD, tstSC2, tstRightNoMD, tstEOF}, nil}, } func TestFrontMatter(t *testing.T) { t.Parallel() c := qt.New(t) for i, test := range frontMatterTests { - items := collect([]byte(test.input), false, lexIntroSection) + items, err := collect([]byte(test.input), false, lexIntroSection) + if err != nil { + c.Assert(err, qt.Equals, test.err) + continue + } else { + c.Assert(test.err, qt.IsNil) + } if !equal(test.input, items, test.items) { got := itemsToString(items, []byte(test.input)) expected := testItemsToString(test.items) @@ -124,12 +131,15 @@ func testItemsToString(items []typeText) string { return crLfReplacer.Replace(sb.String()) } -func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, cfg Config) (items []Item) { +func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, cfg Config) (items []Item, err error) { l := newPageLexer(input, stateStart, cfg) l.run() iter := NewIterator(l.items) for { + if l.err != nil { + return nil, l.err + } item := iter.Next() items = append(items, item) if item.Type == tEOF || item.Type == tError { @@ -139,13 +149,13 @@ func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, return } -func collect(input []byte, skipFrontMatter bool, stateStart stateFunc) (items []Item) { +func collect(input []byte, skipFrontMatter bool, stateStart stateFunc) (items []Item, err error) { var cfg Config return collectWithConfig(input, skipFrontMatter, stateStart, cfg) } -func collectStringMain(input string) []Item { +func collectStringMain(input string) ([]Item, error) { return collect([]byte(input), true, lexMainSection) } diff --git a/parser/pageparser/pageparser_shortcode_test.go b/parser/pageparser/pageparser_shortcode_test.go index 26d836e32..327da30ee 100644 --- a/parser/pageparser/pageparser_shortcode_test.go +++ b/parser/pageparser/pageparser_shortcode_test.go @@ -20,46 +20,42 @@ import ( ) var ( - tstEOF = nti(tEOF, "") - tstLeftNoMD = nti(tLeftDelimScNoMarkup, "{{<") - tstRightNoMD = nti(tRightDelimScNoMarkup, ">}}") - tstLeftMD = nti(tLeftDelimScWithMarkup, "{{%") - tstRightMD = nti(tRightDelimScWithMarkup, "%}}") - tstSCClose = nti(tScClose, "/") - tstSC1 = nti(tScName, "sc1") - tstSC1Inline = nti(tScNameInline, "sc1.inline") - tstSC2Inline = nti(tScNameInline, "sc2.inline") - tstSC2 = nti(tScName, "sc2") - tstSC3 = nti(tScName, "sc3") - tstSCSlash = nti(tScName, "sc/sub") - tstParam1 = nti(tScParam, "param1") - tstParam2 = nti(tScParam, "param2") - tstParamBoolTrue = nti(tScParam, "true") - tstParamBoolFalse = nti(tScParam, "false") - tstParamInt = nti(tScParam, "32") - tstParamFloat = nti(tScParam, "3.14") - tstVal = nti(tScParamVal, "Hello World") - tstText = nti(tText, "Hello World") + tstEOF = nti(tEOF, "") + tstLeftNoMD = nti(tLeftDelimScNoMarkup, "{{<") + tstRightNoMD = nti(tRightDelimScNoMarkup, ">}}") + tstLeftMD = nti(tLeftDelimScWithMarkup, "{{%") + tstRightMD = nti(tRightDelimScWithMarkup, "%}}") + tstSCClose = nti(tScClose, "/") + tstSC1 = nti(tScName, "sc1") + tstSC1Inline = nti(tScNameInline, "sc1.inline") + tstSC2Inline = nti(tScNameInline, "sc2.inline") + tstSC2 = nti(tScName, "sc2") + tstSC3 = nti(tScName, "sc3") + tstSCSlash = nti(tScName, "sc/sub") + tstParam1 = nti(tScParam, "param1") + tstParam2 = nti(tScParam, "param2") + tstVal = nti(tScParamVal, "Hello World") + tstText = nti(tText, "Hello World") ) var shortCodeLexerTests = []lexerTest{ - {"empty", "", []typeText{tstEOF}}, - {"spaces", " \t\n", []typeText{nti(tText, " \t\n"), tstEOF}}, - {"text", `to be or not`, []typeText{nti(tText, "to be or not"), tstEOF}}, - {"no markup", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}}, - {"with EOL", "{{< sc1 \n >}}", []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}}, + {"empty", "", []typeText{tstEOF}, nil}, + {"spaces", " \t\n", []typeText{nti(tText, " \t\n"), tstEOF}, nil}, + {"text", `to be or not`, []typeText{nti(tText, "to be or not"), tstEOF}, nil}, + {"no markup", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil}, + {"with EOL", "{{< sc1 \n >}}", []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil}, - {"forward slash inside name", `{{< sc/sub >}}`, []typeText{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}}, + {"forward slash inside name", `{{< sc/sub >}}`, []typeText{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}, nil}, - {"simple with markup", `{{% sc1 %}}`, []typeText{tstLeftMD, tstSC1, tstRightMD, tstEOF}}, - {"with spaces", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}}, - {"indented on new line", "Hello\n {{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}}, - {"indented on new line tab", "Hello\n\t{{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, "\t"), tstLeftMD, tstSC1, tstRightMD, tstEOF}}, - {"indented on first line", " {{% sc1 %}}", []typeText{nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}}, + {"simple with markup", `{{% sc1 %}}`, []typeText{tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, + {"with spaces", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil}, + {"indented on new line", "Hello\n {{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, + {"indented on new line tab", "Hello\n\t{{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, "\t"), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, + {"indented on first line", " {{% sc1 %}}", []typeText{nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil}, {"mismatched rightDelim", `{{< sc1 %}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tError, "unrecognized character in shortcode action: U+0025 '%'. Note: Parameters with non-alphanumeric args must be quoted"), - }}, + }, nil}, {"inner, markup", `{{% sc1 %}} inner {{% /sc1 %}}`, []typeText{ tstLeftMD, tstSC1, @@ -70,79 +66,79 @@ var shortCodeLexerTests = []lexerTest{ tstSC1, tstRightMD, tstEOF, - }}, + }, nil}, {"close, but no open", `{{< /sc1 >}}`, []typeText{ tstLeftNoMD, nti(tError, "got closing shortcode, but none is open"), - }}, + }, nil}, {"close wrong", `{{< sc1 >}}{{< /another >}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, nti(tError, "closing tag for shortcode 'another' does not match start tag"), - }}, + }, nil}, {"close, but no open, more", `{{< sc1 >}}{{< /sc1 >}}{{< /another >}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, nti(tError, "closing tag for shortcode 'another' does not match start tag"), - }}, + }, nil}, {"close with extra keyword", `{{< sc1 >}}{{< /sc1 keyword>}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, nti(tError, "unclosed shortcode"), - }}, + }, nil}, {"float param, positional", `{{< sc1 3.14 >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "3.14"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"float param, named", `{{< sc1 param1=3.14 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"named param, raw string", `{{< sc1 param1=` + "`" + "Hello World" + "`" + " >}}", []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "Hello World"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"float param, named, space before", `{{< sc1 param1= 3.14 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"Youtube id", `{{< sc1 -ziL-Q_456igdO-4 >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-Q_456igdO-4"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"non-alphanumerics param quoted", `{{< sc1 "-ziL-.%QigdO-4" >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-.%QigdO-4"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"raw string", `{{< sc1` + "`" + "Hello World" + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"raw string with newline", `{{< sc1` + "`" + `Hello World` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, `Hello World`), tstRightNoMD, tstEOF, - }}, + }, nil}, {"raw string with escape character", `{{< sc1` + "`" + `Hello \b World` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, `Hello \b World`), tstRightNoMD, tstEOF, - }}, + }, nil}, {"two params", `{{< sc1 param1 param2 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstParam2, tstRightNoMD, tstEOF, - }}, + }, nil}, // issue #934 {"self-closing", `{{< sc1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, // Issue 2498 {"multiple self-closing", `{{< sc1 />}}{{< sc1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"self-closing with param", `{{< sc1 param1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"multiple self-closing with param", `{{< sc1 param1 />}}{{< sc1 param1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"multiple different self-closing with param", `{{< sc1 param1 />}}{{< sc2 param1 />}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC2, tstParam1, tstSCClose, tstRightNoMD, tstEOF, - }}, + }, nil}, {"nested simple", `{{< sc1 >}}{{< sc2 >}}{{< /sc1 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSC2, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstEOF, - }}, + }, nil}, {"nested complex", `{{< sc1 >}}ab{{% sc2 param1 %}}cd{{< sc3 >}}ef{{< /sc3 >}}gh{{% /sc2 %}}ij{{< /sc1 >}}kl`, []typeText{ tstLeftNoMD, tstSC1, tstRightNoMD, nti(tText, "ab"), @@ -156,30 +152,31 @@ var shortCodeLexerTests = []lexerTest{ nti(tText, "ij"), tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, nti(tText, "kl"), tstEOF, - }}, + }, nil}, {"two quoted params", `{{< sc1 "param nr. 1" "param nr. 2" >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "param nr. 1"), nti(tScParam, "param nr. 2"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"two named params", `{{< sc1 param1="Hello World" param2="p2Val">}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, tstParam2, nti(tScParamVal, "p2Val"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"escaped quotes", `{{< sc1 param1=\"Hello World\" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, tstRightNoMD, tstEOF, - }}, + }, nil}, {"escaped quotes, positional param", `{{< sc1 \"param1\" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstRightNoMD, tstEOF, - }}, + }, nil}, {"escaped quotes inside escaped quotes", `{{< sc1 param1=\"Hello \"escaped\" World\" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, `Hello `), nti(tError, `got positional parameter 'escaped'. Cannot mix named and positional parameters`), - }}, + }, nil}, { "escaped quotes inside nonescaped quotes", `{{< sc1 param1="Hello \"escaped\" World" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, `Hello "escaped" World`), tstRightNoMD, tstEOF, }, + nil, }, { "escaped quotes inside nonescaped quotes in positional param", @@ -187,68 +184,69 @@ var shortCodeLexerTests = []lexerTest{ []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, `Hello "escaped" World`), tstRightNoMD, tstEOF, }, + nil, }, {"escaped raw string, named param", `{{< sc1 param1=` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"), - }}, + }, nil}, {"escaped raw string, positional param", `{{< sc1 param1 ` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"), - }}, + }, nil}, {"two raw string params", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tScParam, "Second Param"), tstRightNoMD, tstEOF, - }}, + }, nil}, {"unterminated quote", `{{< sc1 param2="Hello World>}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam2, nti(tError, "unterminated quoted string in shortcode parameter-argument: 'Hello World>}}'"), - }}, + }, nil}, {"unterminated raw string", `{{< sc1` + "`" + "Hello World" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tError, "unterminated raw string in shortcode parameter-argument: 'Hello World >}}'"), - }}, + }, nil}, {"unterminated raw string in second argument", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tError, "unterminated raw string in shortcode parameter-argument: 'Second Param >}}'"), - }}, + }, nil}, {"one named param, one not", `{{< sc1 param1="Hello World" p2 >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, nti(tError, "got positional parameter 'p2'. Cannot mix named and positional parameters"), - }}, + }, nil}, {"one named param, one quoted positional param, both raw strings", `{{< sc1 param1=` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"), - }}, + }, nil}, {"one named param, one quoted positional param", `{{< sc1 param1="Hello World" "And Universe" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, tstVal, nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"), - }}, + }, nil}, {"one quoted positional param, one named param", `{{< sc1 "param1" param2="And Universe" >}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"), - }}, + }, nil}, {"ono positional param, one not", `{{< sc1 param1 param2="Hello World">}}`, []typeText{ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"), - }}, + }, nil}, {"commented out", `{{</* sc1 */>}}`, []typeText{ nti(tText, "{{<"), nti(tText, " sc1 "), nti(tText, ">}}"), tstEOF, - }}, + }, nil}, {"commented out, with asterisk inside", `{{</* sc1 "**/*.pdf" */>}}`, []typeText{ nti(tText, "{{<"), nti(tText, " sc1 \"**/*.pdf\" "), nti(tText, ">}}"), tstEOF, - }}, + }, nil}, {"commented out, missing close", `{{</* sc1 >}}`, []typeText{ nti(tError, "comment must be closed"), - }}, + }, nil}, {"commented out, misplaced close", `{{</* sc1 >}}*/`, []typeText{ nti(tError, "comment must be closed"), - }}, + }, nil}, // Inline shortcodes - {"basic inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}}, - {"basic inline with space", `{{< sc1.inline >}}Hello World{{< / sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}}, - {"inline self closing", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstEOF}}, + {"basic inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil}, + {"basic inline with space", `{{< sc1.inline >}}Hello World{{< / sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil}, + {"inline self closing", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstEOF}, nil}, {"inline self closing, then a new inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}{{< sc2.inline >}}Hello World{{< /sc2.inline >}}`, []typeText{ tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstLeftNoMD, tstSC2Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC2Inline, tstRightNoMD, tstEOF, - }}, - {"inline with template syntax", `{{< sc1.inline >}}{{ .Get 0 }}{{ .Get 1 }}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, nti(tText, "{{ .Get 0 }}"), nti(tText, "{{ .Get 1 }}"), tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}}, - {"inline with nested shortcode (not supported)", `{{< sc1.inline >}}Hello World{{< sc1 >}}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, nti(tError, "inline shortcodes do not support nesting")}}, - {"inline case mismatch", `{{< sc1.Inline >}}Hello World{{< /sc1.Inline >}}`, []typeText{tstLeftNoMD, nti(tError, "period in shortcode name only allowed for inline identifiers")}}, + }, nil}, + {"inline with template syntax", `{{< sc1.inline >}}{{ .Get 0 }}{{ .Get 1 }}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, nti(tText, "{{ .Get 0 }}"), nti(tText, "{{ .Get 1 }}"), tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil}, + {"inline with nested shortcode (not supported)", `{{< sc1.inline >}}Hello World{{< sc1 >}}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, nti(tError, "inline shortcodes do not support nesting")}, nil}, + {"inline case mismatch", `{{< sc1.Inline >}}Hello World{{< /sc1.Inline >}}`, []typeText{tstLeftNoMD, nti(tError, "period in shortcode name only allowed for inline identifiers")}, nil}, } func TestShortcodeLexer(t *testing.T) { @@ -256,7 +254,8 @@ func TestShortcodeLexer(t *testing.T) { c := qt.New(t) for i, test := range shortCodeLexerTests { t.Run(test.name, func(t *testing.T) { - items := collect([]byte(test.input), true, lexMainSection) + items, err := collect([]byte(test.input), true, lexMainSection) + c.Assert(err, qt.IsNil) if !equal(test.input, items, test.items) { got := itemsToString(items, []byte(test.input)) expected := testItemsToString(test.items) @@ -275,8 +274,9 @@ func BenchmarkShortcodeLexer(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { for _, input := range testInputs { - items := collectWithConfig(input, true, lexMainSection, cfg) - if len(items) == 0 { + _, err := collectWithConfig(input, true, lexMainSection, cfg) + if err != nil { + b.Fatal(err) } } diff --git a/parser/pageparser/pageparser_test.go b/parser/pageparser/pageparser_test.go index c58018f0e..a50ab46e9 100644 --- a/parser/pageparser/pageparser_test.go +++ b/parser/pageparser/pageparser_test.go @@ -68,7 +68,8 @@ func TestIsProbablyItemsSource(t *testing.T) { c := qt.New(t) input := ` {{< foo >}} ` - items := collectStringMain(input) + items, err := collectStringMain(input) + c.Assert(err, qt.IsNil) c.Assert(IsProbablySourceOfItems([]byte(input), items), qt.IsTrue) c.Assert(IsProbablySourceOfItems(bytes.Repeat([]byte(" "), len(input)), items), qt.IsFalse) @@ -83,7 +84,6 @@ func TestHasShortcode(t *testing.T) { c.Assert(HasShortcode("aSDasd SDasd aSD\n\nasdfadf{{% foo %}}\nasdf"), qt.IsTrue) c.Assert(HasShortcode("{{</* foo */>}}"), qt.IsFalse) c.Assert(HasShortcode("{{%/* foo */%}}"), qt.IsFalse) - } func BenchmarkHasShortcode(b *testing.B) { @@ -100,5 +100,4 @@ func BenchmarkHasShortcode(b *testing.B) { HasShortcode(withoutShortcode) } }) - } diff --git a/publisher/publisher.go b/publisher/publisher.go index 39274b2a9..bbe65ff8a 100644 --- a/publisher/publisher.go +++ b/publisher/publisher.go @@ -169,7 +169,7 @@ func (p DestinationPublisher) createTransformerChain(f Descriptor) transform.Cha if isHTML { if f.LiveReloadBaseURL != nil { - transformers = append(transformers, livereloadinject.New(*f.LiveReloadBaseURL)) + transformers = append(transformers, livereloadinject.New(f.LiveReloadBaseURL)) } // This is only injected on the home page. diff --git a/related/integration_test.go b/related/integration_test.go index 4cb537f1f..2c71c1d1a 100644 --- a/related/integration_test.go +++ b/related/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -135,7 +135,6 @@ Related 2: 2 "Related 1: 0: p2: h0: First title|ref1|::END", "Related 2: 0: p5: h0: Common p3, p4, p5|common-p3-p4-p5|::END 1: p4: h0: Common p3, p4, p5|common-p3-p4-p5|::END", ) - } func BenchmarkRelatedSite(b *testing.B) { @@ -170,7 +169,6 @@ keywords: ['k%d'] } return fmt.Sprintf(base, n, rand.Intn(32)) - } for i := 1; i < 100; i++ { diff --git a/related/inverted_index.go b/related/inverted_index.go index fcebdc716..7e171cf53 100644 --- a/related/inverted_index.go +++ b/related/inverted_index.go @@ -265,7 +265,6 @@ func (idx *InvertedIndex) Finalize(ctx context.Context) error { idx.finalized = true return nil - } // queryElement holds the index name and keywords that can be used to compose a @@ -346,7 +345,6 @@ type SearchOpts struct { // threshold (normalize to 0..100) will be removed. // If an index name is provided, only that index will be queried. func (idx *InvertedIndex) Search(ctx context.Context, opts SearchOpts) ([]Document, error) { - var ( queryElements []queryElement configs IndicesConfig @@ -379,7 +377,6 @@ func (idx *InvertedIndex) Search(ctx context.Context, opts SearchOpts) ([]Docume keywords = append(keywords, FragmentKeyword(fragment)) } if opts.Document != nil { - if fp, ok := opts.Document.(FragmentProvider); ok { for _, fragment := range fp.Fragments(ctx).Identifiers { keywords = append(keywords, FragmentKeyword(fragment)) diff --git a/resources/docs.go b/resources/docs.go index f992893da..16fe34027 100644 --- a/resources/docs.go +++ b/resources/docs.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/image.go b/resources/image.go index 6c34795f8..2e351bd28 100644 --- a/resources/image.go +++ b/resources/image.go @@ -20,25 +20,23 @@ import ( "image/color" "image/draw" "image/gif" - _ "image/gif" _ "image/png" "io" "os" - "path" - "path/filepath" "strings" "sync" color_extractor "github.com/marekm4/color-extractor" + "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/identity" "github.com/disintegration/gift" - "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/resources/images/exif" + "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources/resource" @@ -50,9 +48,10 @@ import ( ) var ( - _ images.ImageResource = (*imageResource)(nil) - _ resource.Source = (*imageResource)(nil) - _ resource.Cloner = (*imageResource)(nil) + _ images.ImageResource = (*imageResource)(nil) + _ resource.Source = (*imageResource)(nil) + _ resource.Cloner = (*imageResource)(nil) + _ resource.NameOriginalProvider = (*imageResource)(nil) ) // imageResource represents an image resource. @@ -107,6 +106,7 @@ func (i *imageResource) getExif() *exif.ExifInfo { } create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { + defer w.Close() f, err := i.root.ReadSeekCloser() if err != nil { i.metaInitErr = err @@ -127,7 +127,7 @@ func (i *imageResource) getExif() *exif.ExifInfo { return enc.Encode(i.meta) } - _, i.metaInitErr = i.getSpec().ImageCache.fileCache.ReadOrCreate(key, read, create) + _, i.metaInitErr = i.getSpec().ImageCache.fcache.ReadOrCreate(key, read, create) }) if i.metaInitErr != nil { @@ -369,17 +369,14 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im <-imageProcSem }() - errOp := conf.Action - errPath := i.getSourceFilename() - src, err := i.DecodeImage() if err != nil { - return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err} + return nil, nil, &os.PathError{Op: conf.Action, Path: i.TargetPath(), Err: err} } converted, err := f(src) if err != nil { - return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err} + return nil, nil, &os.PathError{Op: conf.Action, Path: i.TargetPath(), Err: err} } hasAlpha := !images.IsOpaque(converted) @@ -414,16 +411,15 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im } ci := i.clone(converted) - ci.setBasePath(conf) + targetPath := i.relTargetPathFromConfig(conf) + ci.setTargetPath(targetPath) ci.Format = conf.TargetFormat ci.setMediaType(conf.TargetFormat.MediaType()) return ci, converted, nil }) if err != nil { - if i.root != nil && i.root.getFileInfo() != nil { - return nil, fmt.Errorf("image %q: %w", i.root.getFileInfo().Meta().Filename, err) - } + return nil, err } return img, nil } @@ -474,32 +470,25 @@ func (i *imageResource) clone(img image.Image) *imageResource { } } -func (i *imageResource) setBasePath(conf images.ImageConfig) { - i.getResourcePaths().relTargetDirFile = i.relTargetPathFromConfig(conf) -} - func (i *imageResource) getImageMetaCacheTargetPath() string { const imageMetaVersionNumber = 1 // Increment to invalidate the meta cache cfgHash := i.getSpec().imaging.Cfg.SourceHash - df := i.getResourcePaths().relTargetDirFile - if fi := i.getFileInfo(); fi != nil { - df.dir = filepath.Dir(fi.Meta().Path) - } - p1, _ := paths.FileAndExt(df.file) - h, _ := i.hash() + df := i.getResourcePaths() + p1, _ := paths.FileAndExt(df.File) + h := i.hash() idStr := identity.HashString(h, i.size(), imageMetaVersionNumber, cfgHash) - p := path.Join(df.dir, fmt.Sprintf("%s_%s.json", p1, idStr)) - return p + df.File = fmt.Sprintf("%s_%s.json", p1, idStr) + return df.TargetPath() } -func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile { - p1, p2 := paths.FileAndExt(i.getResourcePaths().relTargetDirFile.file) +func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) internal.ResourcePaths { + p1, p2 := paths.FileAndExt(i.getResourcePaths().File) if conf.TargetFormat != i.Format { p2 = conf.TargetFormat.DefaultExtension() } - h, _ := i.hash() + h := i.hash() idStr := fmt.Sprintf("_hu%s_%d", h, i.size()) // Do not change for no good reason. @@ -526,8 +515,8 @@ func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile idStr = "" } - return dirFile{ - dir: i.getResourcePaths().relTargetDirFile.dir, - file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2), - } + rp := i.getResourcePaths() + rp.File = fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2) + + return rp } diff --git a/resources/image_cache.go b/resources/image_cache.go index f416f0230..f9770ffc1 100644 --- a/resources/image_cache.go +++ b/resources/image_cache.go @@ -16,12 +16,11 @@ package resources import ( "image" "io" - "path/filepath" - "strings" - "sync" + "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/resources/images" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/helpers" ) @@ -30,132 +29,88 @@ import ( type ImageCache struct { pathSpec *helpers.PathSpec - fileCache *filecache.Cache - - *imageCacheStore -} - -type imageCacheStore struct { - mu sync.RWMutex - store map[string]*resourceAdapter -} - -// WithPathSpec returns a copy of the ImageCache with the given PathSpec set. -func (c ImageCache) WithPathSpec(ps *helpers.PathSpec) *ImageCache { - c.pathSpec = ps - return &c -} - -func (c *ImageCache) deleteIfContains(s string) { - c.mu.Lock() - defer c.mu.Unlock() - s = c.normalizeKeyBase(s) - for k := range c.store { - if strings.Contains(k, s) { - delete(c.store, k) - } - } -} - -// The cache key is a lowercase path with Unix style slashes and it always starts with -// a leading slash. -func (c *ImageCache) normalizeKey(key string) string { - return "/" + c.normalizeKeyBase(key) -} - -func (c *ImageCache) normalizeKeyBase(key string) string { - return strings.Trim(strings.ToLower(filepath.ToSlash(key)), "/") -} - -func (c *ImageCache) clear() { - c.mu.Lock() - defer c.mu.Unlock() - c.store = make(map[string]*resourceAdapter) + fcache *filecache.Cache + mcache *dynacache.Partition[string, *resourceAdapter] } func (c *ImageCache) getOrCreate( parent *imageResource, conf images.ImageConfig, - createImage func() (*imageResource, image.Image, error)) (*resourceAdapter, error) { + createImage func() (*imageResource, image.Image, error), +) (*resourceAdapter, error) { relTarget := parent.relTargetPathFromConfig(conf) - memKey := parent.relTargetPathForRel(relTarget.path(), false, false, false) - memKey = c.normalizeKey(memKey) - - // For the file cache we want to generate and store it once if possible. - fileKeyPath := relTarget - if fi := parent.root.getFileInfo(); fi != nil { - fileKeyPath.dir = filepath.ToSlash(filepath.Dir(fi.Meta().Path)) - } - fileKey := fileKeyPath.path() - - // First check the in-memory store, then the disk. - c.mu.RLock() - cachedImage, found := c.store[memKey] - c.mu.RUnlock() - - if found { - return cachedImage, nil - } - - var img *imageResource + relTargetPath := relTarget.TargetPath() + memKey := dynacache.CleanKey(relTargetPath) + + v, err := c.mcache.GetOrCreate(memKey, func(key string) (*resourceAdapter, error) { + var img *imageResource + + // These funcs are protected by a named lock. + // read clones the parent to its new name and copies + // the content to the destinations. + read := func(info filecache.ItemInfo, r io.ReadSeeker) error { + img = parent.clone(nil) + targetPath := img.getResourcePaths() + targetPath.File = relTarget.File + img.setTargetPath(targetPath) + img.setOpenSource(func() (hugio.ReadSeekCloser, error) { + return c.fcache.Fs.Open(info.Name) + }) + img.setSourceFilenameIsHash(true) + img.setMediaType(conf.TargetFormat.MediaType()) + + if err := img.InitConfig(r); err != nil { + return err + } + + return nil + } - // These funcs are protected by a named lock. - // read clones the parent to its new name and copies - // the content to the destinations. - read := func(info filecache.ItemInfo, r io.ReadSeeker) error { - img = parent.clone(nil) - rp := img.getResourcePaths() - rp.relTargetDirFile.file = relTarget.file - img.setSourceFilename(info.Name) - img.setSourceFilenameIsHash(true) - img.setMediaType(conf.TargetFormat.MediaType()) + // create creates the image and encodes it to the cache (w). + create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { + defer w.Close() + + var conv image.Image + img, conv, err = createImage() + if err != nil { + return + } + targetPath := img.getResourcePaths() + targetPath.File = relTarget.File + img.setTargetPath(targetPath) + img.setOpenSource(func() (hugio.ReadSeekCloser, error) { + return c.fcache.Fs.Open(info.Name) + }) + return img.EncodeTo(conf, conv, w) + } - return img.InitConfig(r) - } + // Now look in the file cache. - // create creates the image and encodes it to the cache (w). - create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { - defer w.Close() + // The definition of this counter is not that we have processed that amount + // (e.g. resized etc.), it can be fetched from file cache, + // but the count of processed image variations for this site. + c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages) - var conv image.Image - img, conv, err = createImage() + _, err := c.fcache.ReadOrCreate(relTargetPath, read, create) if err != nil { - return + return nil, err } - rp := img.getResourcePaths() - rp.relTargetDirFile.file = relTarget.file - img.setSourceFilename(info.Name) - - return img.EncodeTo(conf, conv, w) - } - - // Now look in the file cache. - - // The definition of this counter is not that we have processed that amount - // (e.g. resized etc.), it can be fetched from file cache, - // but the count of processed image variations for this site. - c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages) - - _, err := c.fileCache.ReadOrCreate(fileKey, read, create) - if err != nil { - return nil, err - } - - // The file is now stored in this cache. - img.setSourceFs(c.fileCache.Fs) - c.mu.Lock() - if cachedImage, found = c.store[memKey]; found { - c.mu.Unlock() - return cachedImage, nil - } + imgAdapter := newResourceAdapter(parent.getSpec(), true, img) - imgAdapter := newResourceAdapter(parent.getSpec(), true, img) - c.store[memKey] = imgAdapter - c.mu.Unlock() + return imgAdapter, nil + }) - return imgAdapter, nil + return v, err } -func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *ImageCache { - return &ImageCache{fileCache: fileCache, pathSpec: ps, imageCacheStore: &imageCacheStore{store: make(map[string]*resourceAdapter)}} +func newImageCache(fileCache *filecache.Cache, memCache *dynacache.Cache, ps *helpers.PathSpec) *ImageCache { + return &ImageCache{ + fcache: fileCache, + mcache: dynacache.GetOrCreatePartition[string, *resourceAdapter]( + memCache, + "/imgs", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 70}, + ), + pathSpec: ps, + } } diff --git a/resources/image_extended_test.go b/resources/image_extended_test.go index 4da603fc4..429e51fb6 100644 --- a/resources/image_extended_test.go +++ b/resources/image_extended_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/image_test.go b/resources/image_test.go index 96cc07b3b..44861d629 100644 --- a/resources/image_test.go +++ b/resources/image_test.go @@ -22,7 +22,6 @@ import ( "math/big" "math/rand" "os" - "path" "path/filepath" "runtime" "strconv" @@ -31,7 +30,6 @@ import ( "testing" "time" - "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/images/webp" "github.com/gohugoio/hugo/common/paths" @@ -80,8 +78,7 @@ var eq = qt.CmpEquals( func TestImageTransformBasic(t *testing.T) { c := qt.New(t) - spec, image := fetchSunset(c) - fileCache := spec.FileCaches.ImageCache().Fs + _, image := fetchSunset(c) assertWidthHeight := func(img images.ImageResource, w, h int) { assertWidthHeight(c, img, w, h) @@ -104,12 +101,10 @@ func TestImageTransformBasic(t *testing.T) { resized0x, err := image.Resize("x200") c.Assert(err, qt.IsNil) assertWidthHeight(resized0x, 320, 200) - assertFileCache(c, fileCache, path.Base(resized0x.RelPermalink()), 320, 200) resizedx0, err := image.Resize("200x") c.Assert(err, qt.IsNil) assertWidthHeight(resizedx0, 200, 125) - assertFileCache(c, fileCache, path.Base(resizedx0.RelPermalink()), 200, 125) resizedAndRotated, err := image.Resize("x200 r90") c.Assert(err, qt.IsNil) @@ -203,8 +198,7 @@ func TestImageProcess(t *testing.T) { func TestImageTransformFormat(t *testing.T) { c := qt.New(t) - spec, image := fetchSunset(c) - fileCache := spec.FileCaches.ImageCache().Fs + _, image := fetchSunset(c) assertExtWidthHeight := func(img images.ImageResource, ext string, w, h int) { c.Helper() @@ -226,8 +220,6 @@ func TestImageTransformFormat(t *testing.T) { c.Assert(imagePng.Name(), qt.Equals, "sunset.jpg") c.Assert(imagePng.MediaType().String(), qt.Equals, "image/png") - assertFileCache(c, fileCache, path.Base(imagePng.RelPermalink()), 450, 281) - imageGif, err := image.Resize("225x gif") c.Assert(err, qt.IsNil) c.Assert(imageGif.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_225x0_resize_linear.gif") @@ -235,8 +227,6 @@ func TestImageTransformFormat(t *testing.T) { assertExtWidthHeight(imageGif, ".gif", 225, 141) c.Assert(imageGif.Name(), qt.Equals, "sunset.jpg") c.Assert(imageGif.MediaType().String(), qt.Equals, "image/gif") - - assertFileCache(c, fileCache, path.Base(imageGif.RelPermalink()), 225, 141) } // https://github.com/gohugoio/hugo/issues/5730 @@ -275,7 +265,7 @@ func TestImagePermalinkPublishOrder(t *testing.T) { resized, err := original.Resize("100x50") c.Assert(err, qt.IsNil) - check1(resized.(images.ImageResource)) + check1(resized) if !checkOriginalFirst { check2(original) @@ -386,27 +376,6 @@ func TestImageTransformConcurrent(t *testing.T) { wg.Wait() } -func TestImageWithMetadata(t *testing.T) { - c := qt.New(t) - - _, image := fetchSunset(c) - - meta := []map[string]any{ - { - "title": "My Sunset", - "name": "Sunset #:counter", - "src": "*.jpg", - }, - } - - c.Assert(resources.AssignMetadata(meta, image), qt.IsNil) - c.Assert(image.Name(), qt.Equals, "Sunset #1") - - resized, err := image.Resize("200x") - c.Assert(err, qt.IsNil) - c.Assert(resized.Name(), qt.Equals, "Sunset #1") -} - func TestImageResize8BitPNG(t *testing.T) { c := qt.New(t) @@ -424,38 +393,6 @@ func TestImageResize8BitPNG(t *testing.T) { c.Assert(resized.Width(), qt.Equals, 800) } -func TestImageResizeInSubPath(t *testing.T) { - c := qt.New(t) - - spec, image := fetchImage(c, "sub/gohugoio2.png") - - c.Assert(image.MediaType(), eq, media.Builtin.PNGType) - c.Assert(image.RelPermalink(), qt.Equals, "/a/sub/gohugoio2.png") - c.Assert(image.ResourceType(), qt.Equals, "image") - c.Assert(image.Exif(), qt.IsNil) - - resized, err := image.Resize("101x101") - c.Assert(err, qt.IsNil) - c.Assert(resized.MediaType().Type, qt.Equals, "image/png") - c.Assert(resized.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png") - c.Assert(resized.Width(), qt.Equals, 101) - c.Assert(resized.Exif(), qt.IsNil) - - publishedImageFilename := filepath.Clean(resized.RelPermalink()) - - assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101) - c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil) - - // Clear mem cache to simulate reading from the file cache. - spec.ClearCaches() - - resizedAgain, err := image.Resize("101x101") - c.Assert(err, qt.IsNil) - c.Assert(resizedAgain.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png") - c.Assert(resizedAgain.Width(), qt.Equals, 101) - assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101) -} - func TestSVGImage(t *testing.T) { c := qt.New(t) spec := newTestResourceSpec(specDescriptor{c: c}) @@ -640,7 +577,7 @@ func TestImageOperationsGoldenWebp(t *testing.T) { return } - dir1 := filepath.Join(workDir, "resources/_gen/images") + dir1 := filepath.Join(workDir, "resources/_gen/images/a") dir2 := filepath.FromSlash("testdata/golden_webp") assetGoldenDirs(c, dir1, dir2) @@ -694,8 +631,10 @@ func TestImageOperationsGolden(t *testing.T) { opacity30, err := orig.Filter(f.Opacity(30)) c.Assert(err, qt.IsNil) overlay, err := sunset.Filter(f.Overlay(opacity30.(images.ImageSource), 20, 20)) + c.Assert(err, qt.IsNil) rel := overlay.RelPermalink() c.Assert(rel, qt.Not(qt.Equals), "") + } // A simple Gif file (no animation). @@ -782,7 +721,7 @@ func TestImageOperationsGolden(t *testing.T) { return } - dir1 := filepath.Join(workDir, "resources/_gen/images") + dir1 := filepath.Join(workDir, "resources/_gen/images/a/") dir2 := filepath.FromSlash("testdata/golden") assetGoldenDirs(c, dir1, dir2) @@ -798,7 +737,7 @@ func assetGoldenDirs(c *qt.C, dir1, dir2 string) { for i, fi1 := range dirinfos1 { fi2 := dirinfos2[i] - c.Assert(fi1.Name(), qt.Equals, fi2.Name()) + c.Assert(fi1.Name(), qt.Equals, fi2.Name(), qt.Commentf("i=%d", i)) f1, err := os.Open(filepath.Join(dir1, fi1.Name())) c.Assert(err, qt.IsNil) diff --git a/resources/images/auto_orient.go b/resources/images/auto_orient.go index 194efefb5..ed86979e1 100644 --- a/resources/images/auto_orient.go +++ b/resources/images/auto_orient.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/exif/exif.go b/resources/images/exif/exif.go index 90198eeed..af92366ca 100644 --- a/resources/images/exif/exif.go +++ b/resources/images/exif/exif.go @@ -117,7 +117,7 @@ func NewDecoder(options ...func(*Decoder) error) (*Decoder, error) { func (d *Decoder) Decode(r io.Reader) (ex *ExifInfo, err error) { defer func() { if r := recover(); r != nil { - err = fmt.Errorf("Exif failed: %v", r) + err = fmt.Errorf("exif failed: %v", r) } }() diff --git a/resources/images/exif/exif_test.go b/resources/images/exif/exif_test.go index 821367550..64c5a39e3 100644 --- a/resources/images/exif/exif_test.go +++ b/resources/images/exif/exif_test.go @@ -58,6 +58,7 @@ func TestExif(t *testing.T) { c.Assert(err, qt.IsNil) x2 := &ExifInfo{} err = json.Unmarshal(data, x2) + c.Assert(err, qt.IsNil) c.Assert(x2, eq, x) } @@ -135,7 +136,6 @@ var eq = qt.CmpEquals( ) func TestIssue10738(t *testing.T) { - c := qt.New(t) testFunc := func(path, include string) any { @@ -153,6 +153,7 @@ func TestIssue10738(t *testing.T) { c.Assert(err, qt.IsNil) x2 := &ExifInfo{} err = json.Unmarshal(data, x2) + c.Assert(err, qt.IsNil) c.Assert(x2, eq, x) @@ -300,15 +301,13 @@ func TestIssue10738(t *testing.T) { for _, tt := range tests { c.Run(tt.name, func(c *qt.C) { got := testFunc(tt.args.path, tt.args.include) - switch got.(type) { + switch v := got.(type) { case float64: - eTime, ok := got.(float64) - c.Assert(ok, qt.Equals, true) - c.Assert(eTime, qt.Equals, float64(tt.want.vN)) + c.Assert(v, qt.Equals, float64(tt.want.vN)) case *big.Rat: - eTime, ok := got.(*big.Rat) - c.Assert(ok, qt.Equals, true) - c.Assert(eTime, eq, big.NewRat(tt.want.vN, tt.want.vD)) + c.Assert(v, eq, big.NewRat(tt.want.vN, tt.want.vD)) + default: + c.Fatalf("unexpected type: %T", got) } }) } diff --git a/resources/images/image_resource.go b/resources/images/image_resource.go index be40418b1..e6be757c2 100644 --- a/resources/images/image_resource.go +++ b/resources/images/image_resource.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/opacity.go b/resources/images/opacity.go index 4b60e30a4..482476c5b 100644 --- a/resources/images/opacity.go +++ b/resources/images/opacity.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/padding.go b/resources/images/padding.go index 153d0bd82..4399312f8 100644 --- a/resources/images/padding.go +++ b/resources/images/padding.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/images/process.go b/resources/images/process.go index 984ac3c8f..fb2e995ce 100644 --- a/resources/images/process.go +++ b/resources/images/process.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/integration_test.go b/resources/integration_test.go index 51a003625..9540b0976 100644 --- a/resources/integration_test.go +++ b/resources/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ anigif: {{ $anigif.RelPermalink }}|{{ $anigif.Width }}|{{ $anigif.Height }}|{{ $ assertImages() - b.EditFileReplace("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") }) + b.EditFileReplaceFunc("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") }) b.Build() assertImages() diff --git a/resources/internal/resourcepaths.go b/resources/internal/resourcepaths.go new file mode 100644 index 000000000..21c65e2ca --- /dev/null +++ b/resources/internal/resourcepaths.go @@ -0,0 +1,107 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "path" + "path/filepath" + "strings" + + "github.com/gohugoio/hugo/common/paths" +) + +// ResourcePaths holds path information for a resouce. +// All directories in here have Unix-style slashes, with leading slash, but no trailing slash. +// Empty directories are represented with an empty string. +type ResourcePaths struct { + // This is the directory component for the target file or link. + Dir string + + // Any base directory for the target file. Will be prepended to Dir. + BaseDirTarget string + + // This is the directory component for the link will be prepended to Dir. + BaseDirLink string + + // Set when publishing in a multihost setup. + TargetBasePaths []string + + // This is the File component, e.g. "data.json". + File string +} + +func (d ResourcePaths) join(p ...string) string { + var s string + for i, pp := range p { + if pp == "" { + continue + } + if i > 0 && !strings.HasPrefix(pp, "/") { + pp = "/" + pp + } + s += pp + + } + if !strings.HasPrefix(s, "/") { + s = "/" + s + } + return s +} + +func (d ResourcePaths) TargetLink() string { + return d.join(d.BaseDirLink, d.Dir, d.File) +} + +func (d ResourcePaths) TargetPath() string { + return d.join(d.BaseDirTarget, d.Dir, d.File) +} + +func (d ResourcePaths) Path() string { + return d.join(d.Dir, d.File) +} + +func (d ResourcePaths) TargetPaths() []string { + if len(d.TargetBasePaths) == 0 { + return []string{d.TargetPath()} + } + + var paths []string + for _, p := range d.TargetBasePaths { + paths = append(paths, p+d.TargetPath()) + } + return paths +} + +func (d ResourcePaths) TargetFilenames() []string { + filenames := d.TargetPaths() + for i, p := range filenames { + filenames[i] = filepath.FromSlash(p) + } + return filenames +} + +func (d ResourcePaths) FromTargetPath(targetPath string) ResourcePaths { + targetPath = filepath.ToSlash(targetPath) + dir, file := path.Split(targetPath) + dir = paths.ToSlashPreserveLeading(dir) + if dir == "/" { + dir = "" + } + d.Dir = dir + d.File = file + d.BaseDirLink = "" + d.BaseDirTarget = "" + + return d +} diff --git a/resources/kinds/kinds.go b/resources/kinds/kinds.go index b035cdd29..2660ec719 100644 --- a/resources/kinds/kinds.go +++ b/resources/kinds/kinds.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -34,10 +34,11 @@ const ( // The following are (currently) temporary nodes, // i.e. nodes we create just to render in isolation. - KindRSS = "rss" - KindSitemap = "sitemap" - KindRobotsTXT = "robotstxt" - Kind404 = "404" + KindRSS = "rss" + KindSitemap = "sitemap" + KindSitemapIndex = "sitemapindex" + KindRobotsTXT = "robotstxt" + KindStatus404 = "404" ) var ( @@ -77,7 +78,7 @@ var kindMapTemporary = map[string]string{ KindRSS: KindRSS, KindSitemap: KindSitemap, KindRobotsTXT: KindRobotsTXT, - Kind404: Kind404, + KindStatus404: KindStatus404, } // GetKindMain gets the page kind given a string, empty if not found. @@ -94,6 +95,16 @@ func GetKindAny(s string) string { return kindMapTemporary[strings.ToLower(s)] } +// IsBranch returns whether the given kind is a branch node. +func IsBranch(kind string) bool { + switch kind { + case KindHome, KindSection, KindTaxonomy, KindTerm: + return true + default: + return false + } +} + // IsDeprecatedAndReplacedWith returns the new kind if the given kind is deprecated. func IsDeprecatedAndReplacedWith(s string) string { s = strings.ToLower(s) diff --git a/resources/kinds/kinds_test.go b/resources/kinds/kinds_test.go index c2868d617..a0fe42ff8 100644 --- a/resources/kinds/kinds_test.go +++ b/resources/kinds/kinds_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/page/page.go b/resources/page/page.go index b5af489f1..56ba04d74 100644 --- a/resources/page/page.go +++ b/resources/page/page.go @@ -19,16 +19,14 @@ import ( "context" "html/template" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/converter" "github.com/gohugoio/hugo/markup/tableofcontents" "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/compare" - "github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/related" @@ -122,7 +120,7 @@ type ContentRenderer interface { type FileProvider interface { // File returns the source file for this Page, // or a zero File if this Page is not backed by a file. - File() source.File + File() *source.File } // GetPageProvider provides the GetPage method. @@ -133,9 +131,6 @@ type GetPageProvider interface { // This will return nil when no page could be found, and will return // an error if the ref is ambiguous. GetPage(ref string) (Page, error) - - // GetPageWithTemplateInfo is for internal use only. - GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) } // GitInfoProvider provides Git info. @@ -166,6 +161,12 @@ type OutputFormatsProvider interface { OutputFormats() OutputFormats } +// PageProvider provides access to a Page. +// Implemented by shortcodes and others. +type PageProvider interface { + Page() Page +} + // Page is the core interface in Hugo and what you get as the top level data context in your templates. type Page interface { ContentProvider @@ -175,7 +176,7 @@ type Page interface { type PageFragment interface { resource.ResourceLinksProvider - resource.ResourceMetaProvider + resource.ResourceNameTitleProvider } // PageMetaProvider provides page metadata, typically provided via front matter. @@ -187,7 +188,7 @@ type PageMetaProvider interface { Aliases() []string // BundleType returns the bundle type: `leaf`, `branch` or an empty string. - BundleType() files.ContentClass + BundleType() string // A configured description. Description() string @@ -224,9 +225,8 @@ type PageMetaProvider interface { // to the source of this Page. It will be relative to any content root. Path() string - // This is just a temporary bridge method. Use Path in templates. - // Pathc is for internal usage only. - Pathc() string + // This is for internal use only. + PathInfo() *paths.Path // The slug, typically defined in front matter. Slug() string @@ -240,13 +240,6 @@ type PageMetaProvider interface { // Section returns the first path element below the content root. Section() string - // Returns a slice of sections (directories if it's a file) to this - // Page. - SectionsEntries() []string - - // SectionsPath is SectionsEntries joined with a /. - SectionsPath() string - // Sitemap returns the sitemap configuration for this page. // This is for internal use only. Sitemap() config.SitemapConfig @@ -332,9 +325,6 @@ type PageWithoutContent interface { // e.g. GetTerms("categories") GetTerms(taxonomy string) Pages - // Used in change/dependency tracking. - identity.Provider - // HeadingsFiltered returns the headings for this page when a filter is set. // This is currently only triggered with the Related content feature // and the "fragments" type of index. @@ -430,7 +420,7 @@ type TranslationsProvider interface { type TreeProvider interface { // IsAncestor returns whether the current page is an ancestor of other. // Note that this method is not relevant for taxonomy lists and taxonomy terms pages. - IsAncestor(other any) (bool, error) + IsAncestor(other any) bool // CurrentSection returns the page's current section or the page itself if home or a section. // Note that this will return nil for pages that is not regular, home or section pages. @@ -438,7 +428,7 @@ type TreeProvider interface { // IsDescendant returns whether the current page is a descendant of other. // Note that this method is not relevant for taxonomy lists and taxonomy terms pages. - IsDescendant(other any) (bool, error) + IsDescendant(other any) bool // FirstSection returns the section on level 1 below home, e.g. "/docs". // For the home page, this will return itself. @@ -447,7 +437,7 @@ type TreeProvider interface { // InSection returns whether other is in the current section. // Note that this will always return false for pages that are // not either regular, home or section pages. - InSection(other any) (bool, error) + InSection(other any) bool // Parent returns a section's parent section or a page's section. // To get a section's subsections, see Page's Sections method. @@ -463,6 +453,13 @@ type TreeProvider interface { // Page returns a reference to the Page itself, kept here mostly // for legacy reasons. Page() Page + + // Returns a slice of sections (directories if it's a file) to this + // Page. + SectionsEntries() []string + + // SectionsPath is SectionsEntries joined with a /. + SectionsPath() string } // PageWithContext is a Page with a context.Context. diff --git a/resources/page/page_generate/generate_page_wrappers.go b/resources/page/page_generate/generate_page_wrappers.go index 2449cf28d..d720b8a42 100644 --- a/resources/page/page_generate/generate_page_wrappers.go +++ b/resources/page/page_generate/generate_page_wrappers.go @@ -14,19 +14,14 @@ package page_generate import ( - "bytes" "errors" "fmt" "os" "path/filepath" "reflect" - "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/codegen" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/resource" - "github.com/gohugoio/hugo/source" ) const header = `// Copyright 2019 The Hugo Authors. All rights reserved. @@ -46,7 +41,7 @@ const header = `// Copyright 2019 The Hugo Authors. All rights reserved. ` var ( - pageInterface = reflect.TypeOf((*page.Page)(nil)).Elem() + pageInterface = reflect.TypeOf((*page.PageMetaProvider)(nil)).Elem() packageDir = filepath.FromSlash("resources/page") ) @@ -56,10 +51,6 @@ func Generate(c *codegen.Inspector) error { return fmt.Errorf("failed to generate JSON marshaler: %w", err) } - if err := generateFileIsZeroWrappers(c); err != nil { - return fmt.Errorf("failed to generate file wrappers: %w", err) - } - return nil } @@ -73,25 +64,7 @@ func generateMarshalJSON(c *codegen.Inspector) error { includes := []reflect.Type{pageInterface} - // Exclude these methods - excludes := []reflect.Type{ - // Leave this out for now. We need to revisit the author issue. - reflect.TypeOf((*page.AuthorProvider)(nil)).Elem(), - - reflect.TypeOf((*resource.ErrProvider)(nil)).Elem(), - - // navigation.PageMenus - - // Prevent loops. - reflect.TypeOf((*page.SitesProvider)(nil)).Elem(), - reflect.TypeOf((*page.Positioner)(nil)).Elem(), - - reflect.TypeOf((*page.ChildCareProvider)(nil)).Elem(), - reflect.TypeOf((*page.TreeProvider)(nil)).Elem(), - reflect.TypeOf((*page.InSectionPositioner)(nil)).Elem(), - reflect.TypeOf((*page.PaginatorProvider)(nil)).Elem(), - reflect.TypeOf((*maps.Scratcher)(nil)).Elem(), - } + excludes := []reflect.Type{} methods := c.MethodsFromTypes( includes, @@ -123,71 +96,6 @@ package page return nil } -func generateFileIsZeroWrappers(c *codegen.Inspector) error { - filename := filepath.Join(c.ProjectRootDir, packageDir, "zero_file.autogen.go") - f, err := os.Create(filename) - if err != nil { - return err - } - defer f.Close() - - // Generate warnings for zero file access - - warning := func(name string, tp reflect.Type) string { - msg := fmt.Sprintf(".File.%s on zero object. Wrap it in if or with: {{ with .File }}{{ .%s }}{{ end }}", name, name) - - // We made this a Warning in 0.92.0. - // When we remove this construct in 0.93.0, people will get a nil pointer. - return fmt.Sprintf("z.log.Warnln(%q)", msg) - } - - var buff bytes.Buffer - - methods := c.MethodsFromTypes([]reflect.Type{reflect.TypeOf((*source.File)(nil)).Elem()}, nil) - - for _, m := range methods { - if m.Name == "IsZero" || m.Name == "Classifier" { - continue - } - fmt.Fprint(&buff, m.DeclarationNamed("zeroFile")) - fmt.Fprintln(&buff, " {") - fmt.Fprintf(&buff, "\t%s\n", warning(m.Name, m.Owner)) - if len(m.Out) > 0 { - fmt.Fprintln(&buff, "\treturn") - } - fmt.Fprintln(&buff, "}") - - } - - pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/common/loggers", "github.com/gohugoio/hugo/source") - - fmt.Fprintf(f, `%s - -package page - -%s - -// ZeroFile represents a zero value of source.File with warnings if invoked. -type zeroFile struct { - log loggers.Logger -} - -func NewZeroFile(log loggers.Logger) source.File { - return zeroFile{log: log} -} - -func (zeroFile) IsZero() bool { - return true -} - - -%s - -`, header, importsString(pkgImports), buff.String()) - - return nil -} - func importsString(imps []string) string { if len(imps) == 0 { return "" diff --git a/resources/page/page_lazy_contentprovider.go b/resources/page/page_lazy_contentprovider.go index 2d647e90c..665b2d003 100644 --- a/resources/page/page_lazy_contentprovider.go +++ b/resources/page/page_lazy_contentprovider.go @@ -77,7 +77,6 @@ func (lcp *LazyContentProvider) Reset() { func (lcp *LazyContentProvider) TableOfContents(ctx context.Context) template.HTML { lcp.init.Do(ctx) return lcp.cp.TableOfContents(ctx) - } func (lcp *LazyContentProvider) Fragments(ctx context.Context) *tableofcontents.Fragments { @@ -131,7 +130,7 @@ func (lcp *LazyContentProvider) Len(ctx context.Context) int { } func (lcp *LazyContentProvider) Render(ctx context.Context, layout ...string) (template.HTML, error) { - lcp.init.Do(context.TODO()) + lcp.init.Do(ctx) return lcp.cp.Render(ctx, layout...) } @@ -149,6 +148,7 @@ func (lcp *LazyContentProvider) ParseContent(ctx context.Context, content []byte lcp.init.Do(ctx) return lcp.cp.ParseContent(ctx, content) } + func (lcp *LazyContentProvider) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { lcp.init.Do(ctx) return lcp.cp.RenderContent(ctx, content, doc) diff --git a/resources/page/page_marshaljson.autogen.go b/resources/page/page_marshaljson.autogen.go index bc9b5cc0f..18ed2a75d 100644 --- a/resources/page/page_marshaljson.autogen.go +++ b/resources/page/page_marshaljson.autogen.go @@ -17,27 +17,12 @@ package page import ( "encoding/json" - "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/identity" - "github.com/gohugoio/hugo/langs" - "github.com/gohugoio/hugo/media" - "github.com/gohugoio/hugo/navigation" - "github.com/gohugoio/hugo/source" "time" + + "github.com/gohugoio/hugo/config" ) func MarshalPageToJSON(p Page) ([]byte, error) { - rawContent := p.RawContent() - resourceType := p.ResourceType() - mediaType := p.MediaType() - permalink := p.Permalink() - relPermalink := p.RelPermalink() - name := p.Name() - title := p.Title() - params := p.Params() - data := p.Data() date := p.Date() lastmod := p.Lastmod() publishDate := p.PublishDate() @@ -54,128 +39,65 @@ func MarshalPageToJSON(p Page) ([]byte, error) { isNode := p.IsNode() isPage := p.IsPage() path := p.Path() - pathc := p.Pathc() + pathc := p.Path() slug := p.Slug() lang := p.Lang() isSection := p.IsSection() section := p.Section() - sectionsEntries := p.SectionsEntries() - sectionsPath := p.SectionsPath() sitemap := p.Sitemap() typ := p.Type() weight := p.Weight() - language := p.Language() - file := p.File() - gitInfo := p.GitInfo() - codeOwners := p.CodeOwners() - outputFormats := p.OutputFormats() - alternativeOutputFormats := p.AlternativeOutputFormats() - menus := p.Menus() - translationKey := p.TranslationKey() - isTranslated := p.IsTranslated() - allTranslations := p.AllTranslations() - translations := p.Translations() - store := p.Store() - getIdentity := p.GetIdentity() s := struct { - RawContent string - ResourceType string - MediaType media.Type - Permalink string - RelPermalink string - Name string - Title string - Params maps.Params - Data interface{} - Date time.Time - Lastmod time.Time - PublishDate time.Time - ExpiryDate time.Time - Aliases []string - BundleType files.ContentClass - Description string - Draft bool - IsHome bool - Keywords []string - Kind string - Layout string - LinkTitle string - IsNode bool - IsPage bool - Path string - Pathc string - Slug string - Lang string - IsSection bool - Section string - SectionsEntries []string - SectionsPath string - Sitemap config.SitemapConfig - Type string - Weight int - Language *langs.Language - File source.File - GitInfo source.GitInfo - CodeOwners []string - OutputFormats OutputFormats - AlternativeOutputFormats OutputFormats - Menus navigation.PageMenus - TranslationKey string - IsTranslated bool - AllTranslations Pages - Translations Pages - Store *maps.Scratch - GetIdentity identity.Identity + Date time.Time + Lastmod time.Time + PublishDate time.Time + ExpiryDate time.Time + Aliases []string + BundleType string + Description string + Draft bool + IsHome bool + Keywords []string + Kind string + Layout string + LinkTitle string + IsNode bool + IsPage bool + Path string + Pathc string + Slug string + Lang string + IsSection bool + Section string + Sitemap config.SitemapConfig + Type string + Weight int }{ - RawContent: rawContent, - ResourceType: resourceType, - MediaType: mediaType, - Permalink: permalink, - RelPermalink: relPermalink, - Name: name, - Title: title, - Params: params, - Data: data, - Date: date, - Lastmod: lastmod, - PublishDate: publishDate, - ExpiryDate: expiryDate, - Aliases: aliases, - BundleType: bundleType, - Description: description, - Draft: draft, - IsHome: isHome, - Keywords: keywords, - Kind: kind, - Layout: layout, - LinkTitle: linkTitle, - IsNode: isNode, - IsPage: isPage, - Path: path, - Pathc: pathc, - Slug: slug, - Lang: lang, - IsSection: isSection, - Section: section, - SectionsEntries: sectionsEntries, - SectionsPath: sectionsPath, - Sitemap: sitemap, - Type: typ, - Weight: weight, - Language: language, - File: file, - GitInfo: gitInfo, - CodeOwners: codeOwners, - OutputFormats: outputFormats, - AlternativeOutputFormats: alternativeOutputFormats, - Menus: menus, - TranslationKey: translationKey, - IsTranslated: isTranslated, - AllTranslations: allTranslations, - Translations: translations, - Store: store, - GetIdentity: getIdentity, + Date: date, + Lastmod: lastmod, + PublishDate: publishDate, + ExpiryDate: expiryDate, + Aliases: aliases, + BundleType: bundleType, + Description: description, + Draft: draft, + IsHome: isHome, + Keywords: keywords, + Kind: kind, + Layout: layout, + LinkTitle: linkTitle, + IsNode: isNode, + IsPage: isPage, + Path: path, + Pathc: pathc, + Slug: slug, + Lang: lang, + IsSection: isSection, + Section: section, + Sitemap: sitemap, + Type: typ, + Weight: weight, } return json.Marshal(&s) diff --git a/resources/page/page_matcher.go b/resources/page/page_matcher.go index 4c861cbd7..f5e8e2697 100644 --- a/resources/page/page_matcher.go +++ b/resources/page/page_matcher.go @@ -63,7 +63,7 @@ func (m PageMatcher) Matches(p Page) bool { if m.Path != "" { g, err := glob.GetGlob(m.Path) // TODO(bep) Path() vs filepath vs leading slash. - p := strings.ToLower(filepath.ToSlash(p.Pathc())) + p := strings.ToLower(filepath.ToSlash(p.Path())) if !(strings.HasPrefix(p, "/")) { p = "/" + p } @@ -123,7 +123,6 @@ func DecodeCascadeConfig(in any) (*config.ConfigNamespace[[]PageMatcherParamsCon } return config.DecodeNamespace[[]PageMatcherParamsConfig](in, buildConfig) - } // DecodeCascade decodes in which could be either a map or a slice of maps. @@ -161,7 +160,6 @@ func mapToPageMatcherParamsConfig(m map[string]any) (PageMatcherParamsConfig, er } } return pcfg, pcfg.init() - } // decodePageMatcher decodes m into v. diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go index 735d6eea8..a8f42e4d3 100644 --- a/resources/page/page_nop.go +++ b/resources/page/page_nop.go @@ -21,19 +21,17 @@ import ( "html/template" "time" - "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/markup/converter" "github.com/gohugoio/hugo/markup/tableofcontents" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/tpl" - "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/navigation" "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/config" @@ -59,6 +57,8 @@ var ( // PageNop implements Page, but does nothing. type nopPage int +var noOpPathInfo = paths.Parse(files.ComponentFolderContent, "no-op.md") + func (p *nopPage) Err() resource.ResourceError { return nil } @@ -103,7 +103,7 @@ func (p *nopPage) BaseFileName() string { return "" } -func (p *nopPage) BundleType() files.ContentClass { +func (p *nopPage) BundleType() string { return "" } @@ -163,10 +163,8 @@ func (p *nopPage) Extension() string { return "" } -var nilFile *source.FileInfo - -func (p *nopPage) File() source.File { - return nilFile +func (p *nopPage) File() *source.File { + return nil } func (p *nopPage) FileInfo() hugofs.FileMetaInfo { @@ -189,10 +187,6 @@ func (p *nopPage) GetPage(ref string) (Page, error) { return nil, nil } -func (p *nopPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) { - return nil, nil -} - func (p *nopPage) GetParam(key string) any { return nil } @@ -221,16 +215,16 @@ func (p *nopPage) Hugo() (h hugo.HugoInfo) { return } -func (p *nopPage) InSection(other any) (bool, error) { - return false, nil +func (p *nopPage) InSection(other any) bool { + return false } -func (p *nopPage) IsAncestor(other any) (bool, error) { - return false, nil +func (p *nopPage) IsAncestor(other any) bool { + return false } -func (p *nopPage) IsDescendant(other any) (bool, error) { - return false, nil +func (p *nopPage) IsDescendant(other any) bool { + return false } func (p *nopPage) IsDraft() bool { @@ -357,8 +351,8 @@ func (p *nopPage) Path() string { return "" } -func (p *nopPage) Pathc() string { - return "" +func (p *nopPage) PathInfo() *paths.Path { + return noOpPathInfo } func (p *nopPage) Permalink() string { @@ -529,13 +523,10 @@ func (p *nopPage) WordCount(context.Context) int { return 0 } -func (p *nopPage) GetIdentity() identity.Identity { - return identity.NewPathIdentity("content", "foo/bar.md") -} - func (p *nopPage) Fragments(context.Context) *tableofcontents.Fragments { return nil } + func (p *nopPage) HeadingsFiltered(context.Context) tableofcontents.Headings { return nil } @@ -550,6 +541,7 @@ func (r *nopContentRenderer) ParseAndRenderContent(ctx context.Context, content func (r *nopContentRenderer) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) { return nil, false, nil } + func (r *nopContentRenderer) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) { return nil, false, nil } diff --git a/resources/page/page_paths.go b/resources/page/page_paths.go index 1bc16fe35..8052287c6 100644 --- a/resources/page/page_paths.go +++ b/resources/page/page_paths.go @@ -17,7 +17,9 @@ import ( "path" "path/filepath" "strings" + "sync" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/output" @@ -39,16 +41,14 @@ type TargetPathDescriptor struct { Type output.Format Kind string - Sections []string + Path *paths.Path + Section *paths.Path // For regular content pages this is either // 1) the Slug, if set, // 2) the file base name (TranslationBaseName). BaseName string - // Source directory. - Dir string - // Typically a language prefix added to file paths. PrefixFilePath string @@ -74,7 +74,6 @@ type TargetPathDescriptor struct { // TODO(bep) move this type. type TargetPaths struct { - // Where to store the file on disk relative to the publish dir. OS slashes. TargetFilename string @@ -107,237 +106,347 @@ func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Form return s.PermalinkForBaseURL(p.Link, baseURLstr) } -func isHtmlIndex(s string) bool { - return strings.HasSuffix(s, "/index.html") -} - func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) { - if d.Type.Name == "" { - panic("CreateTargetPath: missing type") - } - // Normalize all file Windows paths to simplify what's next. - if helpers.FilePathSeparator != slash { - d.Dir = filepath.ToSlash(d.Dir) + if helpers.FilePathSeparator != "/" { d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath) - } - if d.URL != "" && !strings.HasPrefix(d.URL, "/") { + if !d.Type.Root && d.URL != "" && !strings.HasPrefix(d.URL, "/") { // Treat this as a context relative URL d.ForcePrefix = true } - pagePath := slash - fullSuffix := d.Type.MediaType.FirstSuffix.FullSuffix + if d.URL != "" { + d.URL = filepath.ToSlash(d.URL) + if strings.Contains(d.URL, "..") { + d.URL = path.Join("/", d.URL) + } + } + + if d.Type.Root && !d.ForcePrefix { + d.PrefixFilePath = "" + d.PrefixLink = "" + } + + pb := getPagePathBuilder(d) + defer putPagePathBuilder(pb) - var ( - pagePathDir string - link string - linkDir string - ) + pb.fullSuffix = d.Type.MediaType.FirstSuffix.FullSuffix // The top level index files, i.e. the home page etc., needs // the index base even when uglyURLs is enabled. needsBase := true - isUgly := d.UglyURLs && !d.Type.NoUgly - baseNameSameAsType := d.BaseName != "" && d.BaseName == d.Type.BaseName + pb.isUgly = (d.UglyURLs || d.Type.Ugly) && !d.Type.NoUgly + pb.baseNameSameAsType = !d.Path.IsBundle() && d.BaseName != "" && d.BaseName == d.Type.BaseName - if d.ExpandedPermalink == "" && baseNameSameAsType { - isUgly = true + if d.ExpandedPermalink == "" && pb.baseNameSameAsType { + pb.isUgly = true } - if d.Kind != kinds.KindPage && d.URL == "" && len(d.Sections) > 0 { + if d.Type == output.HTTPStatusHTMLFormat || d.Type == output.SitemapFormat || d.Type == output.RobotsTxtFormat { + pb.noSubResources = true + } else if d.Kind != kinds.KindPage && d.URL == "" && d.Section.Base() != "/" { if d.ExpandedPermalink != "" { - pagePath = pjoin(pagePath, d.ExpandedPermalink) + pb.Add(d.ExpandedPermalink) } else { - pagePath = pjoin(d.Sections...) + pb.Add(d.Section.Base()) } needsBase = false } if d.Type.Path != "" { - pagePath = pjoin(pagePath, d.Type.Path) + pb.Add(d.Type.Path) } if d.Kind != kinds.KindHome && d.URL != "" { - pagePath = pjoin(pagePath, d.URL) + pb.Add(paths.FieldsSlash(d.URL)...) if d.Addends != "" { - pagePath = pjoin(pagePath, d.Addends) + pb.Add(d.Addends) } - pagePathDir = pagePath - link = pagePath hasDot := strings.Contains(d.URL, ".") - hasSlash := strings.HasSuffix(d.URL, slash) + hasSlash := strings.HasSuffix(d.URL, "/") if hasSlash || !hasDot { - pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix) + pb.Add(d.Type.BaseName + pb.fullSuffix) } else if hasDot { - pagePathDir = path.Dir(pagePathDir) + pb.fullSuffix = paths.Ext(d.URL) } - if !isHtmlIndex(pagePath) { - link = pagePath - } else if !hasSlash { - link += slash + if pb.IsHtmlIndex() { + pb.linkUpperOffset = 1 } - linkDir = pagePathDir - if d.ForcePrefix { // Prepend language prefix if not already set in URL - if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, slash+d.PrefixFilePath) { - pagePath = pjoin(d.PrefixFilePath, pagePath) - pagePathDir = pjoin(d.PrefixFilePath, pagePathDir) + if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixFilePath) { + pb.prefixPath = d.PrefixFilePath } - if d.PrefixLink != "" && !strings.HasPrefix(d.URL, slash+d.PrefixLink) { - link = pjoin(d.PrefixLink, link) - linkDir = pjoin(d.PrefixLink, linkDir) + if d.PrefixLink != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixLink) { + pb.prefixLink = d.PrefixLink } } - - } else if d.Kind == kinds.KindPage { - + } else if !kinds.IsBranch(d.Kind) { if d.ExpandedPermalink != "" { - pagePath = pjoin(pagePath, d.ExpandedPermalink) + pb.Add(d.ExpandedPermalink) } else { - if d.Dir != "" { - pagePath = pjoin(pagePath, d.Dir) + if dir := d.Path.ContainerDir(); dir != "" { + pb.Add(dir) } if d.BaseName != "" { - pagePath = pjoin(pagePath, d.BaseName) + pb.Add(d.BaseName) + } else { + pb.Add(d.Path.BaseNameNoIdentifier()) } } if d.Addends != "" { - pagePath = pjoin(pagePath, d.Addends) - } - - link = pagePath - - // TODO(bep) this should not happen after the fix in https://github.com/gohugoio/hugo/issues/4870 - // but we may need some more testing before we can remove it. - if baseNameSameAsType { - link = strings.TrimSuffix(link, d.BaseName) + pb.Add(d.Addends) } - pagePathDir = link - link = link + slash - linkDir = pagePathDir - - if isUgly { - pagePath = addSuffix(pagePath, fullSuffix) + if pb.isUgly { + pb.ConcatLast(pb.fullSuffix) } else { - pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix) + pb.Add(d.Type.BaseName + pb.fullSuffix) } - if !isHtmlIndex(pagePath) { - link = pagePath + if pb.IsHtmlIndex() { + pb.linkUpperOffset = 1 } if d.PrefixFilePath != "" { - pagePath = pjoin(d.PrefixFilePath, pagePath) - pagePathDir = pjoin(d.PrefixFilePath, pagePathDir) + pb.prefixPath = d.PrefixFilePath } if d.PrefixLink != "" { - link = pjoin(d.PrefixLink, link) - linkDir = pjoin(d.PrefixLink, linkDir) + pb.prefixLink = d.PrefixLink } - } else { if d.Addends != "" { - pagePath = pjoin(pagePath, d.Addends) + pb.Add(d.Addends) } needsBase = needsBase && d.Addends == "" - // No permalink expansion etc. for node type pages (for now) - base := "" - - if needsBase || !isUgly { - base = d.Type.BaseName - } - - pagePathDir = pagePath - link = pagePath - linkDir = pagePathDir - - if base != "" { - pagePath = path.Join(pagePath, addSuffix(base, fullSuffix)) + if needsBase || !pb.isUgly { + pb.Add(d.Type.BaseName + pb.fullSuffix) } else { - pagePath = addSuffix(pagePath, fullSuffix) + pb.ConcatLast(pb.fullSuffix) } - if !isHtmlIndex(pagePath) { - link = pagePath - } else { - link += slash + if pb.IsHtmlIndex() { + pb.linkUpperOffset = 1 } if d.PrefixFilePath != "" { - pagePath = pjoin(d.PrefixFilePath, pagePath) - pagePathDir = pjoin(d.PrefixFilePath, pagePathDir) + pb.prefixPath = d.PrefixFilePath } if d.PrefixLink != "" { - link = pjoin(d.PrefixLink, link) - linkDir = pjoin(d.PrefixLink, linkDir) + pb.prefixLink = d.PrefixLink } } - pagePath = pjoin(slash, pagePath) - pagePathDir = strings.TrimSuffix(path.Join(slash, pagePathDir), slash) - - hadSlash := strings.HasSuffix(link, slash) - link = strings.Trim(link, slash) - if hadSlash { - link += slash - } - - if !strings.HasPrefix(link, slash) { - link = slash + link - } - - linkDir = strings.TrimSuffix(path.Join(slash, linkDir), slash) - // if page URL is explicitly set in frontmatter, // preserve its value without sanitization if d.Kind != kinds.KindPage || d.URL == "" { // Note: MakePathSanitized will lower case the path if // disablePathToLower isn't set. - pagePath = d.PathSpec.MakePathSanitized(pagePath) - pagePathDir = d.PathSpec.MakePathSanitized(pagePathDir) - link = d.PathSpec.MakePathSanitized(link) - linkDir = d.PathSpec.MakePathSanitized(linkDir) + pb.Sanitize() } + link := pb.Link() + pagePath := pb.PathFile() + tp.TargetFilename = filepath.FromSlash(pagePath) - tp.SubResourceBaseTarget = filepath.FromSlash(pagePathDir) - tp.SubResourceBaseLink = linkDir - tp.Link = d.PathSpec.URLizeFilename(link) + if !pb.noSubResources { + tp.SubResourceBaseTarget = pb.PathDir() + tp.SubResourceBaseLink = pb.LinkDir() + } + if d.URL != "" { + tp.Link = paths.URLEscape(link) + } else { + // This is slightly faster for when we know we don't have any + // query or scheme etc. + tp.Link = paths.PathEscape(link) + } if tp.Link == "" { - tp.Link = slash + tp.Link = "/" } return } -func addSuffix(s, suffix string) string { - return strings.Trim(s, slash) + suffix +// When adding state here, remember to update putPagePathBuilder. +type pagePathBuilder struct { + els []string + + d TargetPathDescriptor + + // Builder state. + isUgly bool + baseNameSameAsType bool + noSubResources bool + fullSuffix string // File suffix including any ".". + prefixLink string + prefixPath string + linkUpperOffset int +} + +func (p *pagePathBuilder) Add(el ...string) { + // Filter empty and slashes. + n := 0 + for _, e := range el { + if e != "" && e != slash { + el[n] = e + n++ + } + } + el = el[:n] + + p.els = append(p.els, el...) } -// Like path.Join, but preserves one trailing slash if present. -func pjoin(elem ...string) string { - hadSlash := strings.HasSuffix(elem[len(elem)-1], slash) - joined := path.Join(elem...) - if hadSlash && !strings.HasSuffix(joined, slash) { - return joined + slash +func (p *pagePathBuilder) ConcatLast(s string) { + if len(p.els) == 0 { + p.Add(s) + return } - return joined + old := p.els[len(p.els)-1] + if old == "" { + p.els[len(p.els)-1] = s + return + } + if old[len(old)-1] == '/' { + old = old[:len(old)-1] + } + p.els[len(p.els)-1] = old + s +} + +func (p *pagePathBuilder) IsHtmlIndex() bool { + return p.Last() == "index.html" +} + +func (p *pagePathBuilder) Last() string { + if p.els == nil { + return "" + } + return p.els[len(p.els)-1] +} + +func (p *pagePathBuilder) Link() string { + link := p.Path(p.linkUpperOffset) + + if p.baseNameSameAsType { + link = strings.TrimSuffix(link, p.d.BaseName) + } + + if p.prefixLink != "" { + link = "/" + p.prefixLink + link + } + + if p.linkUpperOffset > 0 && !strings.HasSuffix(link, "/") { + link += "/" + } + + return link +} + +func (p *pagePathBuilder) LinkDir() string { + if p.noSubResources { + return "" + } + + pathDir := p.PathDirBase() + + if p.prefixLink != "" { + pathDir = "/" + p.prefixLink + pathDir + } + + return pathDir +} + +func (p *pagePathBuilder) Path(upperOffset int) string { + upper := len(p.els) + if upperOffset > 0 { + upper -= upperOffset + } + pth := path.Join(p.els[:upper]...) + return paths.AddLeadingSlash(pth) +} + +func (p *pagePathBuilder) PathDir() string { + dir := p.PathDirBase() + if p.prefixPath != "" { + dir = "/" + p.prefixPath + dir + } + return dir +} + +func (p *pagePathBuilder) PathDirBase() string { + if p.noSubResources { + return "" + } + + dir := p.Path(0) + isIndex := strings.HasPrefix(p.Last(), p.d.Type.BaseName+".") + + if isIndex { + dir = paths.Dir(dir) + } else { + dir = strings.TrimSuffix(dir, p.fullSuffix) + } + + if dir == "/" { + dir = "" + } + + return dir +} + +func (p *pagePathBuilder) PathFile() string { + dir := p.Path(0) + if p.prefixPath != "" { + dir = "/" + p.prefixPath + dir + } + return dir +} + +func (p *pagePathBuilder) Prepend(el ...string) { + p.els = append(p.els[:0], append(el, p.els[0:]...)...) +} + +func (p *pagePathBuilder) Sanitize() { + for i, el := range p.els { + p.els[i] = p.d.PathSpec.MakePathSanitized(el) + } +} + +var pagePathBuilderPool = &sync.Pool{ + New: func() any { + return &pagePathBuilder{} + }, +} + +func getPagePathBuilder(d TargetPathDescriptor) *pagePathBuilder { + b := pagePathBuilderPool.Get().(*pagePathBuilder) + b.d = d + return b +} + +func putPagePathBuilder(b *pagePathBuilder) { + b.els = b.els[:0] + b.fullSuffix = "" + b.baseNameSameAsType = false + b.isUgly = false + b.noSubResources = false + b.prefixLink = "" + b.prefixPath = "" + b.linkUpperOffset = 0 + pagePathBuilderPool.Put(b) } diff --git a/resources/page/page_paths_test.go b/resources/page/page_paths_test.go deleted file mode 100644 index dd6457f77..000000000 --- a/resources/page/page_paths_test.go +++ /dev/null @@ -1,295 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package page_test - -import ( - "fmt" - "path/filepath" - "strings" - "testing" - - "github.com/gohugoio/hugo/media" - "github.com/gohugoio/hugo/resources/kinds" - "github.com/gohugoio/hugo/resources/page" - - "github.com/gohugoio/hugo/output" -) - -func TestPageTargetPath(t *testing.T) { - pathSpec := newTestPathSpec() - - noExtNoDelimMediaType := media.WithDelimiterAndSuffixes(media.Builtin.TextType, "", "") - noExtNoDelimMediaType.Delimiter = "" - - // Netlify style _redirects - noExtDelimFormat := output.Format{ - Name: "NER", - MediaType: noExtNoDelimMediaType, - BaseName: "_redirects", - } - - for _, langPrefixPath := range []string{"", "no"} { - for _, langPrefixLink := range []string{"", "no"} { - for _, uglyURLs := range []bool{false, true} { - - tests := []struct { - name string - d page.TargetPathDescriptor - expected page.TargetPaths - }{ - {"JSON home", page.TargetPathDescriptor{Kind: kinds.KindHome, Type: output.JSONFormat}, page.TargetPaths{TargetFilename: "/index.json", SubResourceBaseTarget: "", Link: "/index.json"}}, - {"AMP home", page.TargetPathDescriptor{Kind: kinds.KindHome, Type: output.AMPFormat}, page.TargetPaths{TargetFilename: "/amp/index.html", SubResourceBaseTarget: "/amp", Link: "/amp/"}}, - {"HTML home", page.TargetPathDescriptor{Kind: kinds.KindHome, BaseName: "_index", Type: output.HTMLFormat}, page.TargetPaths{TargetFilename: "/index.html", SubResourceBaseTarget: "", Link: "/"}}, - {"Netlify redirects", page.TargetPathDescriptor{Kind: kinds.KindHome, BaseName: "_index", Type: noExtDelimFormat}, page.TargetPaths{TargetFilename: "/_redirects", SubResourceBaseTarget: "", Link: "/_redirects"}}, - {"HTML section list", page.TargetPathDescriptor{ - Kind: kinds.KindSection, - Sections: []string{"sect1"}, - BaseName: "_index", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/sect1/index.html", SubResourceBaseTarget: "/sect1", Link: "/sect1/"}}, - {"HTML taxonomy term", page.TargetPathDescriptor{ - Kind: kinds.KindTerm, - Sections: []string{"tags", "hugo"}, - BaseName: "_index", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/tags/hugo/index.html", SubResourceBaseTarget: "/tags/hugo", Link: "/tags/hugo/"}}, - {"HTML taxonomy", page.TargetPathDescriptor{ - Kind: kinds.KindTaxonomy, - Sections: []string{"tags"}, - BaseName: "_index", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/tags/index.html", SubResourceBaseTarget: "/tags", Link: "/tags/"}}, - { - "HTML page", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "mypage", - Sections: []string{"a"}, - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/mypage/index.html", SubResourceBaseTarget: "/a/b/mypage", Link: "/a/b/mypage/"}, - }, - - { - "HTML page with index as base", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "index", - Sections: []string{"a"}, - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/index.html", SubResourceBaseTarget: "/a/b", Link: "/a/b/"}, - }, - - { - "HTML page with special chars", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "My Page!", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/my-page/index.html", SubResourceBaseTarget: "/a/b/my-page", Link: "/a/b/my-page/"}, - }, - {"RSS home", page.TargetPathDescriptor{Kind: "rss", Type: output.RSSFormat}, page.TargetPaths{TargetFilename: "/index.xml", SubResourceBaseTarget: "", Link: "/index.xml"}}, - {"RSS section list", page.TargetPathDescriptor{ - Kind: "rss", - Sections: []string{"sect1"}, - Type: output.RSSFormat, - }, page.TargetPaths{TargetFilename: "/sect1/index.xml", SubResourceBaseTarget: "/sect1", Link: "/sect1/index.xml"}}, - { - "AMP page", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b/c", - BaseName: "myamp", - Type: output.AMPFormat, - }, page.TargetPaths{TargetFilename: "/amp/a/b/c/myamp/index.html", SubResourceBaseTarget: "/amp/a/b/c/myamp", Link: "/amp/a/b/c/myamp/"}, - }, - { - "AMP page with URL with suffix", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/url.xhtml", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/some/other/url.xhtml", SubResourceBaseTarget: "/some/other", Link: "/some/other/url.xhtml"}, - }, - { - "JSON page with URL without suffix", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/path/", - Type: output.JSONFormat, - }, page.TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}, - }, - { - "JSON page with URL without suffix and no trailing slash", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/path", - Type: output.JSONFormat, - }, page.TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}, - }, - { - "HTML page with URL without suffix and no trailing slash", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other/path", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/some/other/path/index.html", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/"}, - }, - { - "HTML page with URL containing double hyphen", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/sect/", - BaseName: "mypage", - URL: "/some/other--url/", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/some/other--url/index.html", SubResourceBaseTarget: "/some/other--url", Link: "/some/other--url/"}, - }, - { - "HTML page with expanded permalink", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "mypage", - ExpandedPermalink: "/2017/10/my-title/", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/2017/10/my-title/index.html", SubResourceBaseTarget: "/2017/10/my-title", Link: "/2017/10/my-title/"}, - }, - { - "Paginated HTML home", page.TargetPathDescriptor{ - Kind: kinds.KindHome, - BaseName: "_index", - Type: output.HTMLFormat, - Addends: "page/3", - }, page.TargetPaths{TargetFilename: "/page/3/index.html", SubResourceBaseTarget: "/page/3", Link: "/page/3/"}, - }, - { - "Paginated Taxonomy terms list", page.TargetPathDescriptor{ - Kind: kinds.KindTerm, - BaseName: "_index", - Sections: []string{"tags", "hugo"}, - Type: output.HTMLFormat, - Addends: "page/3", - }, page.TargetPaths{TargetFilename: "/tags/hugo/page/3/index.html", SubResourceBaseTarget: "/tags/hugo/page/3", Link: "/tags/hugo/page/3/"}, - }, - { - "Regular page with addend", page.TargetPathDescriptor{ - Kind: kinds.KindPage, - Dir: "/a/b", - BaseName: "mypage", - Addends: "c/d/e", - Type: output.HTMLFormat, - }, page.TargetPaths{TargetFilename: "/a/b/mypage/c/d/e/index.html", SubResourceBaseTarget: "/a/b/mypage/c/d/e", Link: "/a/b/mypage/c/d/e/"}, - }, - } - - for i, test := range tests { - t.Run(fmt.Sprintf("langPrefixPath=%s,langPrefixLink=%s,uglyURLs=%t,name=%s", langPrefixPath, langPrefixLink, uglyURLs, test.name), - func(t *testing.T) { - test.d.ForcePrefix = true - test.d.PathSpec = pathSpec - test.d.UglyURLs = uglyURLs - test.d.PrefixFilePath = langPrefixPath - test.d.PrefixLink = langPrefixLink - test.d.Dir = filepath.FromSlash(test.d.Dir) - isUgly := uglyURLs && !test.d.Type.NoUgly - - expected := test.expected - - // TODO(bep) simplify - if test.d.Kind == kinds.KindPage && test.d.BaseName == test.d.Type.BaseName { - } else if test.d.Kind == kinds.KindHome && test.d.Type.Path != "" { - } else if test.d.Type.MediaType.FirstSuffix.Suffix != "" && (!strings.HasPrefix(expected.TargetFilename, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly { - expected.TargetFilename = strings.Replace(expected.TargetFilename, - "/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.FirstSuffix.Suffix, - "."+test.d.Type.MediaType.FirstSuffix.Suffix, 1) - expected.Link = strings.TrimSuffix(expected.Link, "/") + "." + test.d.Type.MediaType.FirstSuffix.Suffix - - } - - if test.d.PrefixFilePath != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixFilePath) { - expected.TargetFilename = "/" + test.d.PrefixFilePath + expected.TargetFilename - expected.SubResourceBaseTarget = "/" + test.d.PrefixFilePath + expected.SubResourceBaseTarget - } - - if test.d.PrefixLink != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixLink) { - expected.Link = "/" + test.d.PrefixLink + expected.Link - } - - expected.TargetFilename = filepath.FromSlash(expected.TargetFilename) - expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget) - - pagePath := page.CreateTargetPaths(test.d) - - if !eqTargetPaths(pagePath, expected) { - t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath) - } - }) - } - } - } - } -} - -func TestPageTargetPathPrefix(t *testing.T) { - pathSpec := newTestPathSpec() - tests := []struct { - name string - d page.TargetPathDescriptor - expected page.TargetPaths - }{ - { - "URL set, prefix both, no force", - page.TargetPathDescriptor{Kind: kinds.KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: false, PrefixFilePath: "pf", PrefixLink: "pl"}, - page.TargetPaths{TargetFilename: "/mydir/my.json", SubResourceBaseTarget: "/mydir", SubResourceBaseLink: "/mydir", Link: "/mydir/my.json"}, - }, - { - "URL set, prefix both, force", - page.TargetPathDescriptor{Kind: kinds.KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: true, PrefixFilePath: "pf", PrefixLink: "pl"}, - page.TargetPaths{TargetFilename: "/pf/mydir/my.json", SubResourceBaseTarget: "/pf/mydir", SubResourceBaseLink: "/pl/mydir", Link: "/pl/mydir/my.json"}, - }, - } - - for i, test := range tests { - t.Run(fmt.Sprintf(test.name), - func(t *testing.T) { - test.d.PathSpec = pathSpec - expected := test.expected - expected.TargetFilename = filepath.FromSlash(expected.TargetFilename) - expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget) - - pagePath := page.CreateTargetPaths(test.d) - - if pagePath != expected { - t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath) - } - }) - } -} - -func eqTargetPaths(p1, p2 page.TargetPaths) bool { - if p1.Link != p2.Link { - return false - } - - if p1.SubResourceBaseTarget != p2.SubResourceBaseTarget { - return false - } - - if p1.TargetFilename != p2.TargetFilename { - return false - } - - return true -} diff --git a/resources/page/pagegroup.go b/resources/page/pagegroup.go index d091c6bef..e691a112e 100644 --- a/resources/page/pagegroup.go +++ b/resources/page/pagegroup.go @@ -244,7 +244,7 @@ func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDa return nil, nil } - firstPage := sp[0].(Page) + firstPage := sp[0] date := getDate(firstPage) // Pages may be a mix of multiple languages, so we need to use the language @@ -258,7 +258,7 @@ func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDa i := 0 for _, e := range sp[1:] { - date = getDate(e.(Page)) + date = getDate(e) formatted := formatter.Format(date, format) if r[i].Key.(string) != formatted { r = append(r, PageGroup{Key: formatted}) diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go index 98ab6b222..d804f27a7 100644 --- a/resources/page/pagemeta/page_frontmatter.go +++ b/resources/page/pagemeta/page_frontmatter.go @@ -47,9 +47,8 @@ type FrontMatterHandler struct { // FrontMatterDescriptor describes how to handle front matter for a given Page. // It has pointers to values in the receiving page which gets updated. type FrontMatterDescriptor struct { - - // This the Page's front matter. - Frontmatter map[string]any + // This is the Page's params. + Params map[string]any // This is the Page's base filename (BaseFilename), e.g. page.md., or // if page is a leaf bundle, the bundle folder name (ContentBaseName). @@ -63,9 +62,6 @@ type FrontMatterDescriptor struct { // The below are pointers to values on Page and will be modified. - // This is the Page's params. - Params map[string]any - // This is the Page's dates. Dates *resource.Dates @@ -365,7 +361,7 @@ type frontmatterFieldHandlers int func (f *frontmatterFieldHandlers) newDateFieldHandler(key string, setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler { return func(d *FrontMatterDescriptor) (bool, error) { - v, found := d.Frontmatter[key] + v, found := d.Params[key] if !found { return false, nil @@ -396,7 +392,7 @@ func (f *frontmatterFieldHandlers) newDateFilenameHandler(setter func(d *FrontMa setter(d, date) - if _, found := d.Frontmatter["slug"]; !found { + if _, found := d.Params["slug"]; !found { // Use slug from filename d.PageURLs.Slug = slug } diff --git a/resources/page/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go index f040af163..1aff8b511 100644 --- a/resources/page/pagemeta/page_frontmatter_test.go +++ b/resources/page/pagemeta/page_frontmatter_test.go @@ -29,11 +29,10 @@ import ( func newTestFd() *pagemeta.FrontMatterDescriptor { return &pagemeta.FrontMatterDescriptor{ - Frontmatter: make(map[string]any), - Params: make(map[string]any), - Dates: &resource.Dates{}, - PageURLs: &pagemeta.URLPath{}, - Location: time.UTC, + Params: make(map[string]any), + Dates: &resource.Dates{}, + PageURLs: &pagemeta.URLPath{}, + Location: time.UTC, } } @@ -106,13 +105,13 @@ func TestFrontMatterDatesHandlers(t *testing.T) { case ":git": d.GitAuthorDate = d1 } - d.Frontmatter["date"] = d2 + d.Params["date"] = d2 c.Assert(handler.HandleDates(d), qt.IsNil) c.Assert(d.Dates.FDate, qt.Equals, d1) c.Assert(d.Params["date"], qt.Equals, d2) d = newTestFd() - d.Frontmatter["date"] = d2 + d.Params["date"] = d2 c.Assert(handler.HandleDates(d), qt.IsNil) c.Assert(d.Dates.FDate, qt.Equals, d2) c.Assert(d.Params["date"], qt.Equals, d2) @@ -120,54 +119,6 @@ func TestFrontMatterDatesHandlers(t *testing.T) { } } -func TestFrontMatterDatesCustomConfig(t *testing.T) { - t.Parallel() - - c := qt.New(t) - - cfg := config.New() - cfg.Set("frontmatter", map[string]any{ - "date": []string{"mydate"}, - "lastmod": []string{"publishdate"}, - "publishdate": []string{"publishdate"}, - }) - - conf := testconfig.GetTestConfig(nil, cfg) - handler, err := pagemeta.NewFrontmatterHandler(nil, conf.GetConfigSection("frontmatter").(pagemeta.FrontmatterConfig)) - c.Assert(err, qt.IsNil) - - testDate, err := time.Parse("2006-01-02", "2018-02-01") - c.Assert(err, qt.IsNil) - - d := newTestFd() - d.Frontmatter["mydate"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["date"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["lastmod"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["publishdate"] = testDate - testDate = testDate.Add(24 * time.Hour) - d.Frontmatter["expirydate"] = testDate - - c.Assert(handler.HandleDates(d), qt.IsNil) - - c.Assert(d.Dates.FDate.Day(), qt.Equals, 1) - c.Assert(d.Dates.FLastmod.Day(), qt.Equals, 4) - c.Assert(d.Dates.FPublishDate.Day(), qt.Equals, 4) - c.Assert(d.Dates.FExpiryDate.Day(), qt.Equals, 5) - - c.Assert(d.Params["date"], qt.Equals, d.Dates.FDate) - c.Assert(d.Params["mydate"], qt.Equals, d.Dates.FDate) - c.Assert(d.Params["publishdate"], qt.Equals, d.Dates.FPublishDate) - c.Assert(d.Params["expirydate"], qt.Equals, d.Dates.FExpiryDate) - - c.Assert(handler.IsDateKey("date"), qt.Equals, false) // This looks odd, but is configured like this. - c.Assert(handler.IsDateKey("mydate"), qt.Equals, true) - c.Assert(handler.IsDateKey("publishdate"), qt.Equals, true) - c.Assert(handler.IsDateKey("pubdate"), qt.Equals, true) -} - func TestFrontMatterDatesDefaultKeyword(t *testing.T) { t.Parallel() @@ -186,10 +137,10 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) { testDate, _ := time.Parse("2006-01-02", "2018-02-01") d := newTestFd() - d.Frontmatter["mydate"] = testDate - d.Frontmatter["date"] = testDate.Add(1 * 24 * time.Hour) - d.Frontmatter["mypubdate"] = testDate.Add(2 * 24 * time.Hour) - d.Frontmatter["publishdate"] = testDate.Add(3 * 24 * time.Hour) + d.Params["mydate"] = testDate + d.Params["date"] = testDate.Add(1 * 24 * time.Hour) + d.Params["mypubdate"] = testDate.Add(2 * 24 * time.Hour) + d.Params["publishdate"] = testDate.Add(3 * 24 * time.Hour) c.Assert(handler.HandleDates(d), qt.IsNil) diff --git a/resources/page/pages.go b/resources/page/pages.go index 77e56a062..088abb9ac 100644 --- a/resources/page/pages.go +++ b/resources/page/pages.go @@ -66,9 +66,7 @@ func ToPages(seq any) (Pages, error) { return v.Pages, nil case []Page: pages := make(Pages, len(v)) - for i, vv := range v { - pages[i] = vv - } + copy(pages, v) return pages, nil case []any: pages := make(Pages, len(v)) diff --git a/resources/page/pages_related.go b/resources/page/pages_related.go index 217aced47..3322a4fbf 100644 --- a/resources/page/pages_related.go +++ b/resources/page/pages_related.go @@ -35,7 +35,6 @@ var ( // A PageGenealogist finds related pages in a page collection. This interface is implemented // by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc. type PageGenealogist interface { - // Template example: // {{ $related := .RegularPages.Related . }} Related(ctx context.Context, opts any) (Pages, error) @@ -76,7 +75,6 @@ func (p Pages) Related(ctx context.Context, optsv any) (Pages, error) { } return result, nil - } // RelatedIndices searches the given indices with the search keywords from the @@ -186,6 +184,7 @@ func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex { } return nil } + func (s *RelatedDocsHandler) getOrCreateIndex(ctx context.Context, p Pages) (*related.InvertedIndex, error) { s.mu.RLock() cachedIndex := s.getIndex(p) diff --git a/resources/page/pages_sort.go b/resources/page/pages_sort.go index 32b1b3895..3f4875702 100644 --- a/resources/page/pages_sort.go +++ b/resources/page/pages_sort.go @@ -54,6 +54,19 @@ func getOrdinals(p1, p2 Page) (int, int) { return p1o.Ordinal(), p2o.Ordinal() } +func getWeight0s(p1, p2 Page) (int, int) { + p1w, ok1 := p1.(resource.Weight0Provider) + if !ok1 { + return -1, -1 + } + p2w, ok2 := p2.(resource.Weight0Provider) + if !ok2 { + return -1, -1 + } + + return p1w.Weight0(), p2w.Weight0() +} + // Sort stable sorts the pages given the receiver's sort order. func (by pageBy) Sort(pages Pages) { ps := &pageSorter{ @@ -72,12 +85,17 @@ var ( if o1 != o2 && o1 != -1 && o2 != -1 { return o1 < o2 } + // Weight0, as by the weight of the taxonomy entrie in the front matter. + w01, w02 := getWeight0s(p1, p2) + if w01 != w02 && w01 != -1 && w02 != -1 { + return w01 < w02 + } if p1.Weight() == p2.Weight() { if p1.Date().Unix() == p2.Date().Unix() { c := collatorStringCompare(func(p Page) string { return p.LinkTitle() }, p1, p2) if c == 0 { - if p1.File().IsZero() || p2.File().IsZero() { - return p1.File().IsZero() + if p1.File() == nil || p2.File() == nil { + return p1.File() == nil } return compare.LessStrings(p1.File().Filename(), p2.File().Filename()) } @@ -102,7 +120,7 @@ var ( if p1.Date().Unix() == p2.Date().Unix() { c := compare.Strings(p1.LinkTitle(), p2.LinkTitle()) if c == 0 { - if !p1.File().IsZero() && !p2.File().IsZero() { + if p1.File() != nil && p2.File() != nil { return compare.LessStrings(p1.File().Filename(), p2.File().Filename()) } } @@ -192,7 +210,6 @@ var collatorStringLess = func(p Page) (less func(s1, s2 string) bool, close func func() { coll.Unlock() } - } // ByWeight sorts the Pages by weight and returns a copy. @@ -406,7 +423,6 @@ func (p Pages) ByParam(paramsKey any) Pages { s2 := cast.ToString(v2) return stringLess(s1, s2) - } pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p) diff --git a/resources/page/pages_sort_test.go b/resources/page/pages_sort_test.go index 728237230..12fa4a1e1 100644 --- a/resources/page/pages_sort_test.go +++ b/resources/page/pages_sort_test.go @@ -109,7 +109,6 @@ func TestSortByN(t *testing.T) { byLen := func(p Pages) Pages { return p.ByLength(ctx) - } for i, this := range []struct { @@ -273,7 +272,7 @@ func createSortTestPages(num int) Pages { for i := 0; i < num; i++ { p := newTestPage() p.path = fmt.Sprintf("/x/y/p%d.md", i) - p.title = fmt.Sprintf("Title %d", i%(num+1/2)) + p.title = fmt.Sprintf("Title %d", i%((num+1)/2)) p.params = map[string]any{ "arbitrarily": map[string]any{ "nested": ("xyz" + fmt.Sprintf("%v", 100-i)), diff --git a/resources/page/permalinks.go b/resources/page/permalinks.go index 4577f5240..1677d3a90 100644 --- a/resources/page/permalinks.go +++ b/resources/page/permalinks.go @@ -120,12 +120,18 @@ func (l PermalinkExpander) Expand(key string, p Page) (string, error) { return expand(p) } +// Allow " " and / to represent the root section. +var sectionCutSet = " /" + +func init() { + if string(os.PathSeparator) != "/" { + sectionCutSet += string(os.PathSeparator) + } +} + func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) { expanders := make(map[string]func(Page) (string, error)) - // Allow " " and / to represent the root section. - const sectionCutSet = " /" + string(os.PathSeparator) - for k, pattern := range patterns { k = strings.Trim(k, sectionCutSet) @@ -295,7 +301,7 @@ func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, er } func (l PermalinkExpander) translationBaseName(p Page) string { - if p.File().IsZero() { + if p.File() == nil { return "" } return p.File().TranslationBaseName() diff --git a/resources/page/permalinks_integration_test.go b/resources/page/permalinks_integration_test.go index 6c2411ad7..9a76ac602 100644 --- a/resources/page/permalinks_integration_test.go +++ b/resources/page/permalinks_integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -102,7 +102,6 @@ slug: "mytagslug" "taxonomy": {"tags": "/tagsslug/:slug/"}, "term": {"tags": "/tagsslug/tag/:slug/"}, }) - } func TestPermalinksOldSetup(t *testing.T) { @@ -145,7 +144,6 @@ slug: "p1slugvalue" "taxonomy": {}, "term": {"withpageslug": "/pageslug/:slug/"}, }) - } func TestPermalinksNestedSections(t *testing.T) { @@ -194,5 +192,4 @@ List. b.AssertFileContent("public/libros/index.html", "List.") b.AssertFileContent("public/libros/fiction/index.html", "List.") b.AssertFileContent("public/libros/fiction/2023/book1/index.html", "Single.") - } diff --git a/resources/page/permalinks_test.go b/resources/page/permalinks_test.go index 194387d5c..a3a45bb88 100644 --- a/resources/page/permalinks_test.go +++ b/resources/page/permalinks_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -202,7 +202,6 @@ func TestPermalinkExpansionSliceSyntax(t *testing.T) { c.Assert(fn1("[:last]"), qt.DeepEquals, []string{}) c.Assert(fn1("[1:last]"), qt.DeepEquals, []string{}) c.Assert(fn1("[1]"), qt.DeepEquals, []string{}) - }) c.Run("Out of bounds", func(c *qt.C) { @@ -218,9 +217,7 @@ func TestPermalinkExpansionSliceSyntax(t *testing.T) { c.Assert(fn4("[]"), qt.IsNil) c.Assert(fn4("[1:}"), qt.IsNil) c.Assert(fn4("foo"), qt.IsNil) - }) - } func BenchmarkPermalinkExpand(b *testing.B) { diff --git a/resources/page/site.go b/resources/page/site.go index 0480ce674..9ef76505d 100644 --- a/resources/page/site.go +++ b/resources/page/site.go @@ -21,7 +21,6 @@ import ( "github.com/gohugoio/hugo/config/privacy" "github.com/gohugoio/hugo/config/services" "github.com/gohugoio/hugo/identity" - "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/config" @@ -88,8 +87,12 @@ type Site interface { Taxonomies() TaxonomyList // Returns the last modification date of the content. + // Deprecated: Use .Lastmod instead. LastChange() time.Time + // Returns the last modification date of the content. + Lastmod() time.Time + // Returns the Menus for this site. Menus() navigation.Menus @@ -108,10 +111,6 @@ type Site interface { // Returns the site config. Config() SiteConfig - // Returns the identity of this site. - // This is for internal use only. - GetIdentity() identity.Identity - // Author is deprecated and will be removed in a future release. Author() map[string]interface{} @@ -127,9 +126,6 @@ type Site interface { // Deprecated: Use Config().Privacy.Disqus instead. DisqusShortname() string - // For internal use only. - GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) - // BuildDrafts is deprecated and will be removed in a future release. BuildDrafts() bool @@ -154,6 +150,9 @@ func (s Sites) First() Site { return s[0] } +// Some additional interfaces implemented by siteWrapper that's not on Site. +var _ identity.ForEeachIdentityByNameProvider = (*siteWrapper)(nil) + type siteWrapper struct { s Site } @@ -165,6 +164,10 @@ func WrapSite(s Site) Site { return &siteWrapper{s: s} } +func (s *siteWrapper) Key() string { + return s.s.Language().Lang +} + func (s *siteWrapper) Social() map[string]string { return s.s.Social() } @@ -260,7 +263,11 @@ func (s *siteWrapper) Taxonomies() TaxonomyList { } func (s *siteWrapper) LastChange() time.Time { - return s.s.LastChange() + return s.s.Lastmod() +} + +func (s *siteWrapper) Lastmod() time.Time { + return s.s.Lastmod() } func (s *siteWrapper) Menus() navigation.Menus { @@ -283,14 +290,6 @@ func (s *siteWrapper) Data() map[string]any { return s.s.Data() } -func (s *siteWrapper) GetIdentity() identity.Identity { - return s.s.GetIdentity() -} - -func (s *siteWrapper) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) { - return s.s.GetPageWithTemplateInfo(info, ref...) -} - func (s *siteWrapper) BuildDrafts() bool { return s.s.BuildDrafts() } @@ -312,6 +311,11 @@ func (s *siteWrapper) RSSLink() template.URL { return s.s.RSSLink() } +// For internal use only. +func (s *siteWrapper) ForEeachIdentityByName(name string, f func(identity.Identity) bool) { + s.s.(identity.ForEeachIdentityByNameProvider).ForEeachIdentityByName(name, f) +} + type testSite struct { h hugo.HugoInfo l *langs.Language @@ -341,6 +345,10 @@ func (testSite) LastChange() (t time.Time) { return } +func (testSite) Lastmod() (t time.Time) { + return +} + func (t testSite) Title() string { return "foo" } @@ -386,10 +394,6 @@ func (t testSite) MainSections() []string { return nil } -func (t testSite) GetIdentity() identity.Identity { - return identity.KeyValueIdentity{Key: "site", Value: t.l.Lang} -} - // Deprecated: use hugo.IsServer instead func (t testSite) IsServer() bool { return false @@ -439,10 +443,6 @@ func (s testSite) Config() SiteConfig { return SiteConfig{} } -func (testSite) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) { - return nil, nil -} - // Deprecated: Use .Site.Config.Services.Disqus.Shortname instead func (testSite) DisqusShortname() string { return "" diff --git a/resources/page/siteidentities/identities.go b/resources/page/siteidentities/identities.go new file mode 100644 index 000000000..8481999cf --- /dev/null +++ b/resources/page/siteidentities/identities.go @@ -0,0 +1,34 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package siteidentities + +import ( + "github.com/gohugoio/hugo/identity" +) + +const ( + // Identifies site.Data. + // The change detection in /data is currently very coarse grained. + Data = identity.StringIdentity("site.Data") +) + +// FromString returns the identity from the given string, +// or identity.Anonymous if not found. +func FromString(name string) (identity.Identity, bool) { + switch name { + case "Data": + return Data, true + } + return identity.Anonymous, false +} diff --git a/resources/page/taxonomy.go b/resources/page/taxonomy.go index 3aa0c7a7b..66c9e6fae 100644 --- a/resources/page/taxonomy.go +++ b/resources/page/taxonomy.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/resources/page/testhelpers_page_test.go b/resources/page/testhelpers_page_test.go deleted file mode 100644 index 95124cb58..000000000 --- a/resources/page/testhelpers_page_test.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package page_test - -import ( - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/afero" -) - -func newTestPathSpec() *helpers.PathSpec { - return newTestPathSpecFor(config.New()) -} - -func newTestPathSpecFor(cfg config.Provider) *helpers.PathSpec { - mfs := afero.NewMemMapFs() - conf := testconfig.GetTestConfig(mfs, cfg) - fs := hugofs.NewFrom(mfs, conf.BaseConfig()) - ps, err := helpers.NewPathSpec(fs, conf, loggers.NewDefault()) - if err != nil { - panic(err) - } - return ps -} diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go index ca2c4ff53..e80ed422d 100644 --- a/resources/page/testhelpers_test.go +++ b/resources/page/testhelpers_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,10 +21,7 @@ import ( "path/filepath" "time" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/markup/tableofcontents" - "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/resources/resource" @@ -32,6 +29,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/langs" @@ -54,7 +52,7 @@ func newTestPage() *testPage { func newTestPageWithFile(filename string) *testPage { filename = filepath.FromSlash(filename) - file := source.NewTestFile(filename) + file := source.NewFileInfoFrom(filename, filename) l, err := langs.NewLanguage( "en", @@ -107,7 +105,7 @@ type testPage struct { params map[string]any data map[string]any - file source.File + file *source.File currentSection *testPage sectionEntries []string @@ -141,7 +139,7 @@ func (p *testPage) BaseFileName() string { panic("testpage: not implemented") } -func (p *testPage) BundleType() files.ContentClass { +func (p *testPage) BundleType() string { panic("testpage: not implemented") } @@ -201,7 +199,7 @@ func (p *testPage) Extension() string { panic("testpage: not implemented") } -func (p *testPage) File() source.File { +func (p *testPage) File() *source.File { return p.file } @@ -225,10 +223,6 @@ func (p *testPage) GetPage(ref string) (Page, error) { panic("testpage: not implemented") } -func (p *testPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) { - panic("testpage: not implemented") -} - func (p *testPage) GetParam(key string) any { panic("testpage: not implemented") } @@ -261,15 +255,15 @@ func (p *testPage) Hugo() hugo.HugoInfo { panic("testpage: not implemented") } -func (p *testPage) InSection(other any) (bool, error) { +func (p *testPage) InSection(other any) bool { panic("testpage: not implemented") } -func (p *testPage) IsAncestor(other any) (bool, error) { +func (p *testPage) IsAncestor(other any) bool { panic("testpage: not implemented") } -func (p *testPage) IsDescendant(other any) (bool, error) { +func (p *testPage) IsDescendant(other any) bool { panic("testpage: not implemented") } @@ -301,6 +295,10 @@ func (p *testPage) IsTranslated() bool { panic("testpage: not implemented") } +func (p *testPage) Ancestors() Pages { + panic("testpage: not implemented") +} + func (p *testPage) Keywords() []string { return nil } @@ -415,16 +413,12 @@ func (p *testPage) Parent() Page { panic("testpage: not implemented") } -func (p *testPage) Ancestors() Pages { - panic("testpage: not implemented") -} - func (p *testPage) Path() string { return p.path } -func (p *testPage) Pathc() string { - return p.path +func (p *testPage) PathInfo() *paths.Path { + panic("testpage: not implemented") } func (p *testPage) Permalink() string { @@ -604,10 +598,6 @@ func (p *testPage) WordCount(context.Context) int { panic("testpage: not implemented") } -func (p *testPage) GetIdentity() identity.Identity { - panic("testpage: not implemented") -} - func createTestPages(num int) Pages { pages := make(Pages, num) diff --git a/resources/page/zero_file.autogen.go b/resources/page/zero_file.autogen.go index 72d98998e..4b7c034a1 100644 --- a/resources/page/zero_file.autogen.go +++ b/resources/page/zero_file.autogen.go @@ -14,75 +14,3 @@ // This file is autogenerated. package page - -import ( - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/source" -) - -// ZeroFile represents a zero value of source.File with warnings if invoked. -type zeroFile struct { - log loggers.Logger -} - -func NewZeroFile(log loggers.Logger) source.File { - return zeroFile{log: log} -} - -func (zeroFile) IsZero() bool { - return true -} - -func (z zeroFile) Path() (o0 string) { - z.log.Warnln(".File.Path on zero object. Wrap it in if or with: {{ with .File }}{{ .Path }}{{ end }}") - return -} -func (z zeroFile) Section() (o0 string) { - z.log.Warnln(".File.Section on zero object. Wrap it in if or with: {{ with .File }}{{ .Section }}{{ end }}") - return -} -func (z zeroFile) Lang() (o0 string) { - z.log.Warnln(".File.Lang on zero object. Wrap it in if or with: {{ with .File }}{{ .Lang }}{{ end }}") - return -} -func (z zeroFile) Filename() (o0 string) { - z.log.Warnln(".File.Filename on zero object. Wrap it in if or with: {{ with .File }}{{ .Filename }}{{ end }}") - return -} -func (z zeroFile) Dir() (o0 string) { - z.log.Warnln(".File.Dir on zero object. Wrap it in if or with: {{ with .File }}{{ .Dir }}{{ end }}") - return -} -func (z zeroFile) Extension() (o0 string) { - z.log.Warnln(".File.Extension on zero object. Wrap it in if or with: {{ with .File }}{{ .Extension }}{{ end }}") - return -} -func (z zeroFile) Ext() (o0 string) { - z.log.Warnln(".File.Ext on zero object. Wrap it in if or with: {{ with .File }}{{ .Ext }}{{ end }}") - return -} -func (z zeroFile) LogicalName() (o0 string) { - z.log.Warnln(".File.LogicalName on zero object. Wrap it in if or with: {{ with .File }}{{ .LogicalName }}{{ end }}") - return -} -func (z zeroFile) BaseFileName() (o0 string) { - z.log.Warnln(".File.BaseFileName on zero object. Wrap it in if or with: {{ with .File }}{{ .BaseFileName }}{{ end }}") - return -} -func (z zeroFile) TranslationBaseName() (o0 string) { - z.log.Warnln(".File.TranslationBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .TranslationBaseName }}{{ end }}") - return -} -func (z zeroFile) ContentBaseName() (o0 string) { - z.log.Warnln(".File.ContentBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .ContentBaseName }}{{ end }}") - return -} -func (z zeroFile) UniqueID() (o0 string) { - z.log.Warnln(".File.UniqueID on zero object. Wrap it in if or with: {{ with .File }}{{ .UniqueID }}{{ end }}") - return -} -func (z zeroFile) FileInfo() (o0 hugofs.FileMetaInfo) { - z.log.Warnln(".File.FileInfo on zero object. Wrap it in if or with: {{ with .File }}{{ .FileInfo }}{{ end }}") - return -} diff --git a/resources/postpub/postpub.go b/resources/postpub/postpub.go index 93b5c2638..65e32145c 100644 --- a/resources/postpub/postpub.go +++ b/resources/postpub/postpub.go @@ -31,7 +31,7 @@ import ( type PostPublishedResource interface { resource.ResourceTypeProvider resource.ResourceLinksProvider - resource.ResourceMetaProvider + resource.ResourceNameTitleProvider resource.ResourceParamsProvider resource.ResourceDataProvider resource.OriginProvider diff --git a/resources/resource.go b/resources/resource.go index b7e6b65a8..e78dd12cb 100644 --- a/resources/resource.go +++ b/resources/resource.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,68 +15,55 @@ package resources import ( "context" + "errors" "fmt" "io" - "os" - "path" - "path/filepath" + "mime" "strings" "sync" + "sync/atomic" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/common/herrors" - - "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/media" - "github.com/gohugoio/hugo/source" - - "errors" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/maps" - "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" - "github.com/spf13/afero" "github.com/gohugoio/hugo/helpers" ) var ( - _ resource.ContentResource = (*genericResource)(nil) - _ resource.ReadSeekCloserResource = (*genericResource)(nil) - _ resource.Resource = (*genericResource)(nil) - _ resource.Source = (*genericResource)(nil) - _ resource.Cloner = (*genericResource)(nil) - _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil) - _ permalinker = (*genericResource)(nil) - _ resource.Identifier = (*genericResource)(nil) - _ fileInfo = (*genericResource)(nil) + _ resource.ContentResource = (*genericResource)(nil) + _ resource.ReadSeekCloserResource = (*genericResource)(nil) + _ resource.Resource = (*genericResource)(nil) + _ resource.Source = (*genericResource)(nil) + _ resource.Cloner = (*genericResource)(nil) + _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil) + _ resource.Identifier = (*genericResource)(nil) + _ identity.IdentityGroupProvider = (*genericResource)(nil) + _ identity.DependencyManagerProvider = (*genericResource)(nil) + _ identity.Identity = (*genericResource)(nil) + _ fileInfo = (*genericResource)(nil) ) type ResourceSourceDescriptor struct { - // TargetPaths is a callback to fetch paths's relative to its owner. - TargetPaths func() page.TargetPaths - - // Need one of these to load the resource content. - SourceFile source.File - OpenReadSeekCloser resource.OpenReadSeekCloser - - FileInfo os.FileInfo - - // If OpenReadSeekerCloser is not set, we use this to open the file. - SourceFilename string + // The source content. + OpenReadSeekCloser hugio.OpenReadSeekCloser - Fs afero.Fs + // The canonical source path. + Path *paths.Path - Data map[string]any - - // Set when its known up front, else it's resolved from the target filename. - MediaType media.Type + // The name of the resource. + Name string - // The relative target filename without any language code. - RelTargetFilename string + // The name of the resource as it was read from the source. + NameOriginal string // Any base paths prepended to the target path. This will also typically be the // language code, but setting it here means that it should not have any effect on @@ -85,15 +72,109 @@ type ResourceSourceDescriptor struct { // multiple targets. TargetBasePaths []string + TargetPath string + BasePathRelPermalink string + BasePathTargetPath string + + // The Data to associate with this resource. + Data map[string]any + // Delay publishing until either Permalink or RelPermalink is called. Maybe never. LazyPublish bool + + // Set when its known up front, else it's resolved from the target filename. + MediaType media.Type + + // Used to track depenencies (e.g. imports). May be nil if that's of no concern. + DependencyManager identity.Manager + + // A shared identity for this resource and all its clones. + // If this is not set, an Identity is created. + GroupIdentity identity.Identity } -func (r ResourceSourceDescriptor) Filename() string { - if r.SourceFile != nil { - return r.SourceFile.Filename() +func (fd *ResourceSourceDescriptor) init(r *Spec) error { + if len(fd.TargetBasePaths) == 0 { + // If not set, we publish the same resource to all hosts. + fd.TargetBasePaths = r.MultihostTargetBasePaths + } + + if fd.OpenReadSeekCloser == nil { + panic(errors.New("OpenReadSeekCloser is nil")) + } + + if fd.TargetPath == "" { + panic(errors.New("RelPath is empty")) + } + + if fd.Path == nil { + fd.Path = paths.Parse("", fd.TargetPath) + } + + if fd.TargetPath == "" { + fd.TargetPath = fd.Path.Path() + } else { + fd.TargetPath = paths.ToSlashPreserveLeading(fd.TargetPath) } - return r.SourceFilename + + fd.BasePathRelPermalink = paths.ToSlashPreserveLeading(fd.BasePathRelPermalink) + if fd.BasePathRelPermalink == "/" { + fd.BasePathRelPermalink = "" + } + fd.BasePathTargetPath = paths.ToSlashPreserveLeading(fd.BasePathTargetPath) + if fd.BasePathTargetPath == "/" { + fd.BasePathTargetPath = "" + } + + fd.TargetPath = paths.ToSlashPreserveLeading(fd.TargetPath) + for i, base := range fd.TargetBasePaths { + dir := paths.ToSlashPreserveLeading(base) + if dir == "/" { + dir = "" + } + fd.TargetBasePaths[i] = dir + } + + if fd.Name == "" { + fd.Name = fd.TargetPath + } + + if fd.NameOriginal == "" { + fd.NameOriginal = fd.Name + } + + mediaType := fd.MediaType + if mediaType.IsZero() { + ext := fd.Path.Ext() + var ( + found bool + suffixInfo media.SuffixInfo + ) + mediaType, suffixInfo, found = r.MediaTypes().GetFirstBySuffix(ext) + // TODO(bep) we need to handle these ambiguous types better, but in this context + // we most likely want the application/xml type. + if suffixInfo.Suffix == "xml" && mediaType.SubType == "rss" { + mediaType, found = r.MediaTypes().GetByType("application/xml") + } + + if !found { + // A fallback. Note that mime.TypeByExtension is slow by Hugo standards, + // so we should configure media types to avoid this lookup for most + // situations. + mimeStr := mime.TypeByExtension("." + ext) + if mimeStr != "" { + mediaType, _ = media.FromStringAndExt(mimeStr, ext) + } + } + } + + fd.MediaType = mediaType + + if fd.DependencyManager == nil { + fd.DependencyManager = identity.NopManager + } + + return nil } type ResourceTransformer interface { @@ -147,23 +228,25 @@ type baseResourceResource interface { type baseResourceInternal interface { resource.Source + resource.NameOriginalProvider fileInfo - metaAssigner + mediaTypeAssigner targetPather ReadSeekCloser() (hugio.ReadSeekCloser, error) + identity.IdentityGroupProvider + identity.DependencyManagerProvider + // For internal use. cloneWithUpdates(*transformationUpdate) (baseResource, error) tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser + getResourcePaths() internal.ResourcePaths + specProvider - getResourcePaths() *resourcePathDescriptor - getTargetFilenames() []string openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) - - relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string } type specProvider interface { @@ -173,10 +256,10 @@ type specProvider interface { type baseResource interface { baseResourceResource baseResourceInternal + resource.Staler } -type commonResource struct { -} +type commonResource struct{} // Slice is for internal use. // for the template functions. See collections.Slice. @@ -201,60 +284,131 @@ func (commonResource) Slice(in any) (any, error) { } } -type dirFile struct { - // This is the directory component with Unix-style slashes. - dir string - // This is the file component. - file string +type fileInfo interface { + setOpenSource(hugio.OpenReadSeekCloser) + setSourceFilenameIsHash(bool) + setTargetPath(internal.ResourcePaths) + size() int64 + hashProvider +} + +type hashProvider interface { + hash() string } -func (d dirFile) path() string { - return path.Join(d.dir, d.file) +type StaleValue[V any] struct { + // The value. + Value V + + // IsStaleFunc reports whether the value is stale. + IsStaleFunc func() bool } -type fileInfo interface { - getSourceFilename() string - setSourceFilename(string) - setSourceFilenameIsHash(bool) - setSourceFs(afero.Fs) - getFileInfo() hugofs.FileMetaInfo - hash() (string, error) - size() int +func (s *StaleValue[V]) IsStale() bool { + return s.IsStaleFunc() +} + +type AtomicStaler struct { + stale uint32 +} + +func (s *AtomicStaler) MarkStale() { + atomic.StoreUint32(&s.stale, 1) +} + +func (s *AtomicStaler) IsStale() bool { + return atomic.LoadUint32(&(s.stale)) > 0 +} + +// For internal use. +type GenericResourceTestInfo struct { + Paths internal.ResourcePaths +} + +// For internal use. +func GetTestInfoForResource(r resource.Resource) GenericResourceTestInfo { + var gr *genericResource + switch v := r.(type) { + case *genericResource: + gr = v + case *resourceAdapter: + gr = v.target.(*genericResource) + default: + panic(fmt.Sprintf("unknown resource type: %T", r)) + } + return GenericResourceTestInfo{ + Paths: gr.paths, + } } // genericResource represents a generic linkable resource. type genericResource struct { - *resourcePathDescriptor - *resourceFileInfo *resourceContent - spec *Spec + sd ResourceSourceDescriptor + paths internal.ResourcePaths + + sourceFilenameIsHash bool + + h *resourceHash // A hash of the source content. Is only calculated in caching situations. + + resource.Staler title string name string params map[string]any - data map[string]any - resourceType string - mediaType media.Type + spec *Spec +} + +func (l *genericResource) IdentifierBase() string { + return l.sd.Path.IdentifierBase() +} + +func (l *genericResource) GetIdentityGroup() identity.Identity { + return l.sd.GroupIdentity +} + +func (l *genericResource) GetDependencyManager() identity.Manager { + return l.sd.DependencyManager +} + +func (l *genericResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) { + return l.sd.OpenReadSeekCloser() } func (l *genericResource) Clone() resource.Resource { return l.clone() } -func (l *genericResource) cloneTo(targetPath string) resource.Resource { - c := l.clone() - - targetPath = helpers.ToSlashTrimLeading(targetPath) - dir, file := path.Split(targetPath) +func (l *genericResource) size() int64 { + l.hash() + return l.h.size +} - c.resourcePathDescriptor = &resourcePathDescriptor{ - relTargetDirFile: dirFile{dir: dir, file: file}, +func (l *genericResource) hash() string { + if err := l.h.init(l); err != nil { + panic(err) } + return l.h.value +} - return c +func (l *genericResource) setOpenSource(openSource hugio.OpenReadSeekCloser) { + l.sd.OpenReadSeekCloser = openSource +} +func (l *genericResource) setSourceFilenameIsHash(b bool) { + l.sourceFilenameIsHash = b +} + +func (l *genericResource) setTargetPath(d internal.ResourcePaths) { + l.paths = d +} + +func (l *genericResource) cloneTo(targetPath string) resource.Resource { + c := l.clone() + c.paths = c.paths.FromTargetPath(targetPath) + return c } func (l *genericResource) Content(context.Context) (any, error) { @@ -270,41 +424,50 @@ func (r *genericResource) Err() resource.ResourceError { } func (l *genericResource) Data() any { - return l.data + return l.sd.Data } func (l *genericResource) Key() string { - basePath := l.spec.Cfg.BaseURL().BasePath + basePath := l.spec.Cfg.BaseURL().BasePathNoTrailingSlash + var key string if basePath == "" { - return l.RelPermalink() + key = l.RelPermalink() + } else { + key = strings.TrimPrefix(l.RelPermalink(), basePath) } - return strings.TrimPrefix(l.RelPermalink(), basePath) + + if l.spec.Cfg.IsMultihost() { + key = l.spec.Lang() + key + } + + return key } func (l *genericResource) MediaType() media.Type { - return l.mediaType + return l.sd.MediaType } func (l *genericResource) setMediaType(mediaType media.Type) { - l.mediaType = mediaType + l.sd.MediaType = mediaType } func (l *genericResource) Name() string { return l.name } -func (l *genericResource) Params() maps.Params { - return l.params +func (l *genericResource) NameOriginal() string { + return l.sd.NameOriginal } -func (l *genericResource) Permalink() string { - return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetDirFile.path(), true), l.spec.Cfg.BaseURL().HostURL()) +func (l *genericResource) Params() maps.Params { + return l.params } func (l *genericResource) Publish() error { var err error l.publishInit.Do(func() { - targetFilenames := l.getTargetFilenames() + targetFilenames := l.getResourcePaths().TargetFilenames() + if l.sourceFilenameIsHash { // This is a processed image. We want to avoid copying it if it hasn't changed. var changedFilenames []string @@ -340,40 +503,30 @@ func (l *genericResource) Publish() error { } func (l *genericResource) RelPermalink() string { - return l.relPermalinkFor(l.relTargetDirFile.path()) + return l.spec.PathSpec.GetBasePath(false) + paths.PathEscape(l.paths.TargetLink()) +} + +func (l *genericResource) Permalink() string { + return l.spec.Cfg.BaseURL().WithPathNoTrailingSlash + paths.PathEscape(l.paths.TargetPath()) } func (l *genericResource) ResourceType() string { - return l.resourceType + return l.MediaType().MainType } func (l *genericResource) String() string { - return fmt.Sprintf("Resource(%s: %s)", l.resourceType, l.name) + return fmt.Sprintf("Resource(%s: %s)", l.ResourceType(), l.name) } // Path is stored with Unix style slashes. func (l *genericResource) TargetPath() string { - return l.relTargetDirFile.path() + return l.paths.TargetPath() } func (l *genericResource) Title() string { return l.title } -func (l *genericResource) createBasePath(rel string, isURL bool) string { - if l.targetPathBuilder == nil { - return rel - } - tp := l.targetPathBuilder() - - if isURL { - return path.Join(tp.SubResourceBaseLink, rel) - } - - // TODO(bep) path - return path.Join(filepath.ToSlash(tp.SubResourceBaseTarget), rel) -} - func (l *genericResource) initContent() error { var err error l.contentInit.Do(func() { @@ -396,28 +549,12 @@ func (l *genericResource) initContent() error { return err } -func (l *genericResource) setName(name string) { - l.name = name -} - -func (l *genericResource) getResourcePaths() *resourcePathDescriptor { - return l.resourcePathDescriptor -} - func (l *genericResource) getSpec() *Spec { return l.spec } -func (l *genericResource) getTargetFilenames() []string { - paths := l.relTargetPaths() - for i, p := range paths { - paths[i] = filepath.Clean(p) - } - return paths -} - -func (l *genericResource) setTitle(title string) { - l.title = title +func (l *genericResource) getResourcePaths() internal.ResourcePaths { + return l.paths } func (r *genericResource) tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser { @@ -437,12 +574,12 @@ func (r *genericResource) mergeData(in map[string]any) { if len(in) == 0 { return } - if r.data == nil { - r.data = make(map[string]any) + if r.sd.Data == nil { + r.sd.Data = make(map[string]any) } for k, v := range in { - if _, found := r.data[k]; !found { - r.data[k] = v + if _, found := r.sd.Data[k]; !found { + r.sd.Data[k] = v } } } @@ -453,142 +590,49 @@ func (rc *genericResource) cloneWithUpdates(u *transformationUpdate) (baseResour if u.content != nil { r.contentInit.Do(func() { r.content = *u.content - r.openReadSeekerCloser = func() (hugio.ReadSeekCloser, error) { + r.sd.OpenReadSeekCloser = func() (hugio.ReadSeekCloser, error) { return hugio.NewReadSeekerNoOpCloserFromString(r.content), nil } }) } - r.mediaType = u.mediaType + r.sd.MediaType = u.mediaType if u.sourceFilename != nil { - r.setSourceFilename(*u.sourceFilename) - } - - if u.sourceFs != nil { - r.setSourceFs(u.sourceFs) + if u.sourceFs == nil { + return nil, errors.New("sourceFs is nil") + } + r.setOpenSource(func() (hugio.ReadSeekCloser, error) { + return u.sourceFs.Open(*u.sourceFilename) + }) + } else if u.sourceFs != nil { + return nil, errors.New("sourceFs is set without sourceFilename") } if u.targetPath == "" { return nil, errors.New("missing targetPath") } - fpath, fname := path.Split(u.targetPath) - r.resourcePathDescriptor.relTargetDirFile = dirFile{dir: fpath, file: fname} - + r.setTargetPath(r.paths.FromTargetPath(u.targetPath)) r.mergeData(u.data) return r, nil } func (l genericResource) clone() *genericResource { - gi := *l.resourceFileInfo - rp := *l.resourcePathDescriptor - l.resourceFileInfo = &gi - l.resourcePathDescriptor = &rp l.resourceContent = &resourceContent{} return &l } func (r *genericResource) openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) { - return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, r.relTargetPathsFor(relTargetPath)...) -} - -func (l *genericResource) permalinkFor(target string) string { - return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(target, true), l.spec.Cfg.BaseURL().HostURL()) -} - -func (l *genericResource) relPermalinkFor(target string) string { - return l.relPermalinkForRel(target, false) -} - -func (l *genericResource) relPermalinkForRel(rel string, isAbs bool) string { - return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, false, isAbs, true)) -} - -func (l *genericResource) relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string { - if addBaseTargetPath && len(l.baseTargetPathDirs) > 1 { - panic("multiple baseTargetPathDirs") - } - var basePath string - if addBaseTargetPath && len(l.baseTargetPathDirs) > 0 { - basePath = l.baseTargetPathDirs[0] - } - - return l.relTargetPathForRelAndBasePath(rel, basePath, isAbs, isURL) -} - -func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string { - rel = l.createBasePath(rel, isURL) - - if basePath != "" { - rel = path.Join(basePath, rel) - } - - if l.baseOffset != "" { - rel = path.Join(l.baseOffset, rel) - } - - if isURL { - bp := l.spec.PathSpec.GetBasePath(!isAbs) - if bp != "" { - rel = path.Join(bp, rel) - } - } - - if len(rel) == 0 || rel[0] != '/' { - rel = "/" + rel - } - - return rel -} - -func (l *genericResource) relTargetPaths() []string { - return l.relTargetPathsForRel(l.TargetPath()) -} - -func (l *genericResource) relTargetPathsFor(target string) []string { - return l.relTargetPathsForRel(target) -} - -func (l *genericResource) relTargetPathsForRel(rel string) []string { - if len(l.baseTargetPathDirs) == 0 { - return []string{l.relTargetPathForRelAndBasePath(rel, "", false, false)} - } - - targetPaths := make([]string, len(l.baseTargetPathDirs)) - for i, dir := range l.baseTargetPathDirs { - targetPaths[i] = l.relTargetPathForRelAndBasePath(rel, dir, false, false) - } - return targetPaths -} - -func (l *genericResource) updateParams(params map[string]any) { - if l.params == nil { - l.params = params - return - } - - // Sets the params not already set - for k, v := range params { - if _, found := l.params[k]; !found { - l.params[k] = v - } - } + filenames := r.paths.FromTargetPath(relTargetPath).TargetFilenames() + return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, filenames...) } type targetPather interface { TargetPath() string } -type permalinker interface { - targetPather - permalinkFor(target string) string - relPermalinkFor(target string) string - relTargetPaths() []string - relTargetPathsFor(target string) []string -} - type resourceContent struct { content string contentInit sync.Once @@ -596,113 +640,31 @@ type resourceContent struct { publishInit sync.Once } -type resourceFileInfo struct { - // Will be set if this resource is backed by something other than a file. - openReadSeekerCloser resource.OpenReadSeekCloser - - // This may be set to tell us to look in another filesystem for this resource. - // We, by default, use the sourceFs filesystem in the spec below. - sourceFs afero.Fs - - // Absolute filename to the source, including any content folder path. - // Note that this is absolute in relation to the filesystem it is stored in. - // It can be a base path filesystem, and then this filename will not match - // the path to the file on the real filesystem. - sourceFilename string - - // For performance. This means that whenever the content changes, the filename changes. - sourceFilenameIsHash bool - - fi hugofs.FileMetaInfo - - // A hash of the source content. Is only calculated in caching situations. - h *resourceHash -} - -func (fi *resourceFileInfo) ReadSeekCloser() (hugio.ReadSeekCloser, error) { - if fi.openReadSeekerCloser != nil { - return fi.openReadSeekerCloser() - } - - f, err := fi.getSourceFs().Open(fi.getSourceFilename()) - if err != nil { - return nil, err - } - return f, nil -} - -func (fi *resourceFileInfo) getFileInfo() hugofs.FileMetaInfo { - return fi.fi -} - -func (fi *resourceFileInfo) getSourceFilename() string { - return fi.sourceFilename -} - -func (fi *resourceFileInfo) setSourceFilename(s string) { - // Make sure it's always loaded by sourceFilename. - fi.openReadSeekerCloser = nil - fi.sourceFilename = s -} - -func (fi *resourceFileInfo) setSourceFilenameIsHash(b bool) { - fi.sourceFilenameIsHash = b -} - -func (fi *resourceFileInfo) getSourceFs() afero.Fs { - return fi.sourceFs -} - -func (fi *resourceFileInfo) setSourceFs(fs afero.Fs) { - fi.sourceFs = fs +type resourceHash struct { + value string + size int64 + initOnce sync.Once } -func (fi *resourceFileInfo) hash() (string, error) { - var err error - fi.h.init.Do(func() { +func (r *resourceHash) init(l hugio.ReadSeekCloserProvider) error { + var initErr error + r.initOnce.Do(func() { var hash string - var f hugio.ReadSeekCloser - f, err = fi.ReadSeekCloser() + var size int64 + f, err := l.ReadSeekCloser() if err != nil { - err = fmt.Errorf("failed to open source file: %w", err) + initErr = fmt.Errorf("failed to open source: %w", err) return } defer f.Close() - - hash, err = helpers.MD5FromFileFast(f) + hash, size, err = helpers.MD5FromReaderFast(f) if err != nil { + initErr = fmt.Errorf("failed to calculate hash: %w", err) return } - fi.h.value = hash + r.value = hash + r.size = size }) - return fi.h.value, err -} - -func (fi *resourceFileInfo) size() int { - if fi.fi == nil { - return 0 - } - - return int(fi.fi.Size()) -} - -type resourceHash struct { - value string - init sync.Once -} - -type resourcePathDescriptor struct { - // The relative target directory and filename. - relTargetDirFile dirFile - - // Callback used to construct a target path relative to its owner. - targetPathBuilder func() page.TargetPaths - - // This will normally be the same as above, but this will only apply to publishing - // of resources. It may be multiple values when in multihost mode. - baseTargetPathDirs []string - - // baseOffset is set when the output format's path has a offset, e.g. for AMP. - baseOffset string + return initErr } diff --git a/resources/resource/dates.go b/resources/resource/dates.go index 6d19ca7b9..88968750d 100644 --- a/resources/resource/dates.go +++ b/resources/resource/dates.go @@ -45,6 +45,10 @@ type Dates struct { FExpiryDate time.Time } +func (d *Dates) IsDateOrLastModAfter(in Dated) bool { + return d.Date().After(in.Date()) || d.Lastmod().After(in.Lastmod()) +} + func (d *Dates) UpdateDateAndLastmodIfAfter(in Dated) { if in.Date().After(d.Date()) { d.FDate = in.Date() diff --git a/resources/resource/resources.go b/resources/resource/resources.go index 795fe1934..9f298b7a6 100644 --- a/resources/resource/resources.go +++ b/resources/resource/resources.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import ( "fmt" "strings" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugofs/glob" "github.com/spf13/cast" ) @@ -54,16 +55,33 @@ func (r Resources) ByType(typ any) Resources { // Get locates the name given in Resources. // The search is case insensitive. func (r Resources) Get(name any) Resource { + if r == nil { + return nil + } namestr, err := cast.ToStringE(name) if err != nil { panic(err) } namestr = strings.ToLower(namestr) + + // First check the Name. + // Note that this can be modified by the user in the front matter, + // also, it does not contain any language code. for _, resource := range r { if strings.EqualFold(namestr, resource.Name()) { return resource } } + + // Finally, check the original name. + for _, resource := range r { + if nop, ok := resource.(NameOriginalProvider); ok { + if strings.EqualFold(namestr, nop.NameOriginal()) { + return resource + } + } + } + return nil } @@ -75,13 +93,15 @@ func (r Resources) GetMatch(pattern any) Resource { panic(err) } + patternstr = paths.NormalizePathStringBasic(patternstr) + g, err := glob.GetGlob(patternstr) if err != nil { panic(err) } for _, resource := range r { - if g.Match(strings.ToLower(resource.Name())) { + if g.Match(paths.NormalizePathStringBasic(resource.Name())) { return resource } } @@ -163,7 +183,6 @@ type Source interface { // Note that GetRemote (as found in resources.GetRemote) is // not covered by this interface, as this is only available as a global template function. type ResourceFinder interface { - // Get locates the Resource with the given name in the current context (e.g. in .Page.Resources). // // It returns nil if no Resource could found, panics if name is invalid. diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go index 9e550e252..43d0aa786 100644 --- a/resources/resource/resourcetypes.go +++ b/resources/resource/resourcetypes.go @@ -76,7 +76,7 @@ type Resource interface { ResourceTypeProvider MediaTypeProvider ResourceLinksProvider - ResourceMetaProvider + ResourceNameTitleProvider ResourceParamsProvider ResourceDataProvider ErrProvider @@ -107,19 +107,41 @@ type ResourceLinksProvider interface { RelPermalink() string } +// ResourceMetaProvider provides metadata about a resource. type ResourceMetaProvider interface { + ResourceNameTitleProvider + ResourceParamsProvider +} + +type WithResourceMetaProvider interface { + // WithResourceMeta creates a new Resource with the given metadata. + // For internal use. + WithResourceMeta(ResourceMetaProvider) Resource +} + +type ResourceNameTitleProvider interface { // Name is the logical name of this resource. This can be set in the front matter // metadata for this resource. If not set, Hugo will assign a value. // This will in most cases be the base filename. // So, for the image "/some/path/sunset.jpg" this will be "sunset.jpg". // The value returned by this method will be used in the GetByPrefix and ByPrefix methods // on Resources. + // Note that for bundled content resources with language code in the filename, this will + // be the name without the language code. Name() string // Title returns the title if set in front matter. For content pages, this will be the expected value. Title() string } +type NameOriginalProvider interface { + // NameOriginal is the original name of this resource. + // Note that for bundled content resources with language code in the filename, this will + // be the name with the language code. + // For internal use (for now). + NameOriginal() string +} + type ResourceParamsProvider interface { // Params set in front matter for this resource. Params() maps.Params @@ -146,6 +168,17 @@ type Identifier interface { Key() string } +// WeightProvider provides a weight. +type WeightProvider interface { + Weight() int +} + +// Weight0Provider provides a weight that's considered before the WeightProvider in sorting. +// This allows the weight set on a given term to win. +type Weight0Provider interface { + Weight0() int +} + // ContentResource represents a Resource that provides a way to get to its content. // Most Resource types in Hugo implements this interface, including Page. type ContentResource interface { @@ -166,10 +199,6 @@ type ContentProvider interface { Content(context.Context) (any, error) } -// OpenReadSeekCloser allows setting some other way (than reading from a filesystem) -// to open or create a ReadSeekCloser. -type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error) - // ReadSeekCloserResource is a Resource that supports loading its content. type ReadSeekCloserResource interface { MediaType() media.Type @@ -192,6 +221,41 @@ type TranslationKeyProvider interface { TranslationKey() string } +// Staler controls stale state of a Resource. A stale resource should be discarded. +type Staler interface { + StaleMarker + StaleInfo +} + +// StaleMarker marks a Resource as stale. +type StaleMarker interface { + MarkStale() +} + +// StaleInfo tells if a resource is marked as stale. +type StaleInfo interface { + IsStale() bool +} + +// IsStaleAny reports whether any of the os is marked as stale. +func IsStaleAny(os ...any) bool { + for _, o := range os { + if s, ok := o.(StaleInfo); ok && s.IsStale() { + return true + } + } + return false +} + +// MarkStale will mark any of the oses as stale, if possible. +func MarkStale(os ...any) { + for _, o := range os { + if s, ok := o.(Staler); ok { + s.MarkStale() + } + } +} + // UnmarshableResource represents a Resource that can be unmarshaled to some other format. type UnmarshableResource interface { ReadSeekCloserResource diff --git a/resources/resource_cache.go b/resources/resource_cache.go index 388e293e8..a76a51b1c 100644 --- a/resources/resource_cache.go +++ b/resources/resource_cache.go @@ -14,182 +14,69 @@ package resources import ( + "context" "encoding/json" "io" "path" "path/filepath" - "regexp" "strings" "sync" - "github.com/gohugoio/hugo/helpers" - - hglob "github.com/gohugoio/hugo/hugofs/glob" - "github.com/gohugoio/hugo/resources/resource" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" - - "github.com/BurntSushi/locker" ) -const ( - CACHE_CLEAR_ALL = "clear_all" - CACHE_OTHER = "other" -) +func newResourceCache(rs *Spec, memCache *dynacache.Cache) *ResourceCache { + return &ResourceCache{ + fileCache: rs.FileCaches.AssetsCache(), + cacheResource: dynacache.GetOrCreatePartition[string, resource.Resource]( + memCache, + "/res1", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40}, + ), + cacheResources: dynacache.GetOrCreatePartition[string, resource.Resources]( + memCache, + "/ress", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40}, + ), + cacheResourceTransformation: dynacache.GetOrCreatePartition[string, *resourceAdapterInner]( + memCache, + "/res1/tra", + dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40}, + ), + } +} type ResourceCache struct { sync.RWMutex - // Either resource.Resource or resource.Resources. - cache map[string]any + cacheResource *dynacache.Partition[string, resource.Resource] + cacheResources *dynacache.Partition[string, resource.Resources] + cacheResourceTransformation *dynacache.Partition[string, *resourceAdapterInner] fileCache *filecache.Cache - - // Provides named resource locks. - nlocker *locker.Locker -} - -// ResourceCacheKey converts the filename into the format used in the resource -// cache. -func ResourceCacheKey(filename string) string { - filename = filepath.ToSlash(filename) - return path.Join(resourceKeyPartition(filename), filename) -} - -func resourceKeyPartition(filename string) string { - ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(filename)), ".") - if ext == "" { - ext = CACHE_OTHER - } - return ext -} - -// Commonly used aliases and directory names used for some types. -var extAliasKeywords = map[string][]string{ - "sass": {"scss"}, - "scss": {"sass"}, -} - -// ResourceKeyPartitions resolves a ordered slice of partitions that is -// used to do resource cache invalidations. -// -// We use the first directory path element and the extension, so: -// -// a/b.json => "a", "json" -// b.json => "json" -// -// For some of the extensions we will also map to closely related types, -// e.g. "scss" will also return "sass". -func ResourceKeyPartitions(filename string) []string { - var partitions []string - filename = hglob.NormalizePath(filename) - dir, name := path.Split(filename) - ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(name)), ".") - - if dir != "" { - partitions = append(partitions, strings.Split(dir, "/")[0]) - } - - if ext != "" { - partitions = append(partitions, ext) - } - - if aliases, found := extAliasKeywords[ext]; found { - partitions = append(partitions, aliases...) - } - - if len(partitions) == 0 { - partitions = []string{CACHE_OTHER} - } - - return helpers.UniqueStringsSorted(partitions) -} - -// ResourceKeyContainsAny returns whether the key is a member of any of the -// given partitions. -// -// This is used for resource cache invalidation. -func ResourceKeyContainsAny(key string, partitions []string) bool { - parts := strings.Split(key, "/") - for _, p1 := range partitions { - for _, p2 := range parts { - if p1 == p2 { - return true - } - } - } - return false -} - -func (c *ResourceCache) clear() { - c.Lock() - defer c.Unlock() - - c.cache = make(map[string]any) - c.nlocker = locker.NewLocker() -} - -func (c *ResourceCache) Contains(key string) bool { - key = c.cleanKey(filepath.ToSlash(key)) - _, found := c.get(key) - return found } func (c *ResourceCache) cleanKey(key string) string { - return strings.TrimPrefix(path.Clean(strings.ToLower(key)), "/") + return strings.TrimPrefix(path.Clean(strings.ToLower(filepath.ToSlash(key))), "/") } -func (c *ResourceCache) get(key string) (any, bool) { - c.RLock() - defer c.RUnlock() - r, found := c.cache[key] - return r, found +func (c *ResourceCache) Get(ctx context.Context, key string) (resource.Resource, bool) { + return c.cacheResource.Get(ctx, key) } func (c *ResourceCache) GetOrCreate(key string, f func() (resource.Resource, error)) (resource.Resource, error) { - r, err := c.getOrCreate(key, func() (any, error) { return f() }) - if r == nil || err != nil { - return nil, err - } - return r.(resource.Resource), nil + return c.cacheResource.GetOrCreate(key, func(key string) (resource.Resource, error) { + return f() + }) } func (c *ResourceCache) GetOrCreateResources(key string, f func() (resource.Resources, error)) (resource.Resources, error) { - r, err := c.getOrCreate(key, func() (any, error) { return f() }) - if r == nil || err != nil { - return nil, err - } - return r.(resource.Resources), nil -} - -func (c *ResourceCache) getOrCreate(key string, f func() (any, error)) (any, error) { - key = c.cleanKey(key) - // First check in-memory cache. - r, found := c.get(key) - if found { - return r, nil - } - // This is a potentially long running operation, so get a named lock. - c.nlocker.Lock(key) - - // Double check in-memory cache. - r, found = c.get(key) - if found { - c.nlocker.Unlock(key) - return r, nil - } - - defer c.nlocker.Unlock(key) - - r, err := f() - if err != nil { - return nil, err - } - - c.set(key, r) - - return r, nil + return c.cacheResources.GetOrCreate(key, func(key string) (resource.Resources, error) { + return f() + }) } func (c *ResourceCache) getFilenames(key string) (string, string) { @@ -242,64 +129,3 @@ func (c *ResourceCache) writeMeta(key string, meta transformedResourceMetadata) return fi, fc, err } - -func (c *ResourceCache) set(key string, r any) { - c.Lock() - defer c.Unlock() - c.cache[key] = r -} - -func (c *ResourceCache) DeletePartitions(partitions ...string) { - partitionsSet := map[string]bool{ - // Always clear out the resources not matching any partition. - "other": true, - } - for _, p := range partitions { - partitionsSet[p] = true - } - - if partitionsSet[CACHE_CLEAR_ALL] { - c.clear() - return - } - - c.Lock() - defer c.Unlock() - - for k := range c.cache { - clear := false - for p := range partitionsSet { - if strings.Contains(k, p) { - // There will be some false positive, but that's fine. - clear = true - break - } - } - - if clear { - delete(c.cache, k) - } - } -} - -func (c *ResourceCache) DeleteMatchesRe(re *regexp.Regexp) { - c.Lock() - defer c.Unlock() - - for k := range c.cache { - if re.MatchString(k) { - delete(c.cache, k) - } - } -} - -func (c *ResourceCache) DeleteMatches(match func(string) bool) { - c.Lock() - defer c.Unlock() - - for k := range c.cache { - if match(k) { - delete(c.cache, k) - } - } -} diff --git a/resources/resource_cache_test.go b/resources/resource_cache_test.go deleted file mode 100644 index bcb241025..000000000 --- a/resources/resource_cache_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package resources - -import ( - "path/filepath" - "testing" - - qt "github.com/frankban/quicktest" -) - -func TestResourceKeyPartitions(t *testing.T) { - c := qt.New(t) - - for _, test := range []struct { - input string - expected []string - }{ - {"a.js", []string{"js"}}, - {"a.scss", []string{"sass", "scss"}}, - {"a.sass", []string{"sass", "scss"}}, - {"d/a.js", []string{"d", "js"}}, - {"js/a.js", []string{"js"}}, - {"D/a.JS", []string{"d", "js"}}, - {"d/a", []string{"d"}}, - {filepath.FromSlash("/d/a.js"), []string{"d", "js"}}, - {filepath.FromSlash("/d/e/a.js"), []string{"d", "js"}}, - } { - c.Assert(ResourceKeyPartitions(test.input), qt.DeepEquals, test.expected, qt.Commentf(test.input)) - } -} - -func TestResourceKeyContainsAny(t *testing.T) { - c := qt.New(t) - - for _, test := range []struct { - key string - filename string - expected bool - }{ - {"styles/css", "asdf.css", true}, - {"styles/css", "styles/asdf.scss", true}, - {"js/foo.bar", "asdf.css", false}, - } { - c.Assert(ResourceKeyContainsAny(test.key, ResourceKeyPartitions(test.filename)), qt.Equals, test.expected) - } -} diff --git a/resources/resource_factories/bundler/bundler.go b/resources/resource_factories/bundler/bundler.go index 67f1f90fa..c255da601 100644 --- a/resources/resource_factories/bundler/bundler.go +++ b/resources/resource_factories/bundler/bundler.go @@ -18,7 +18,6 @@ import ( "fmt" "io" "path" - "path/filepath" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/media" @@ -81,8 +80,8 @@ func (r *multiReadSeekCloser) Close() error { // Concat concatenates the list of Resource objects. func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resource, error) { - // The CACHE_OTHER will make sure this will be re-created and published on rebuilds. - return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) { + targetPath = path.Clean(targetPath) + return c.rs.ResourceCache.GetOrCreate(targetPath, func() (resource.Resource, error) { var resolvedm media.Type // The given set of resources must be of the same Media Type. @@ -132,12 +131,11 @@ func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resou return newMultiReadSeekCloser(rcsources...), nil } - composite, err := c.rs.New( + composite, err := c.rs.NewResource( resources.ResourceSourceDescriptor{ - Fs: c.rs.FileCaches.AssetsCache().Fs, LazyPublish: true, OpenReadSeekCloser: concatr, - RelTargetFilename: filepath.Clean(targetPath), + TargetPath: targetPath, }) if err != nil { return nil, err diff --git a/resources/resource_factories/create/create.go b/resources/resource_factories/create/create.go index 2e4721299..e98eb7425 100644 --- a/resources/resource_factories/create/create.go +++ b/resources/resource_factories/create/create.go @@ -17,15 +17,19 @@ package create import ( "net/http" + "os" "path" "path/filepath" "strings" "time" + "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs/glob" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/resources" @@ -53,19 +57,44 @@ func New(rs *resources.Spec) *Client { // Copy copies r to the new targetPath. func (c *Client) Copy(r resource.Resource, targetPath string) (resource.Resource, error) { - return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(targetPath), func() (resource.Resource, error) { + key := dynacache.CleanKey(targetPath) + return c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) { return resources.Copy(r, targetPath), nil }) } -// Get creates a new Resource by opening the given filename in the assets filesystem. -func (c *Client) Get(filename string) (resource.Resource, error) { - filename = filepath.Clean(filename) - return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(filename), func() (resource.Resource, error) { - return c.rs.New(resources.ResourceSourceDescriptor{ - Fs: c.rs.BaseFs.Assets.Fs, - LazyPublish: true, - SourceFilename: filename, +func (c *Client) newDependencyManager() identity.Manager { + if c.rs.Cfg.Running() { + return identity.NewManager("resources") + } + return identity.NopManager +} + +// Get creates a new Resource by opening the given pathname in the assets filesystem. +func (c *Client) Get(pathname string) (resource.Resource, error) { + pathname = path.Clean(pathname) + key := dynacache.CleanKey(pathname) + + return c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) { + // The resource file will not be read before it gets used (e.g. in .Content), + // so we need to check that the file exists here. + filename := filepath.FromSlash(pathname) + if _, err := c.rs.BaseFs.Assets.Fs.Stat(filename); err != nil { + if os.IsNotExist(err) { + return nil, nil + } + // A real error. + return nil, err + } + + return c.rs.NewResource(resources.ResourceSourceDescriptor{ + LazyPublish: true, + OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { + return c.rs.BaseFs.Assets.Fs.Open(filename) + }, + GroupIdentity: identity.StringIdentity(key), + DependencyManager: c.newDependencyManager(), + TargetPath: pathname, }) }) } @@ -95,9 +124,6 @@ func (c *Client) GetMatch(pattern string) (resource.Resource, error) { func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) bool, firstOnly bool) (resource.Resources, error) { pattern = glob.NormalizePath(pattern) partitions := glob.FilterGlobParts(strings.Split(pattern, "/")) - if len(partitions) == 0 { - partitions = []string{resources.CACHE_OTHER} - } key := path.Join(name, path.Join(partitions...)) key = path.Join(key, pattern) @@ -106,13 +132,13 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) handle := func(info hugofs.FileMetaInfo) (bool, error) { meta := info.Meta() - r, err := c.rs.New(resources.ResourceSourceDescriptor{ + r, err := c.rs.NewResource(resources.ResourceSourceDescriptor{ LazyPublish: true, - FileInfo: info, OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { return meta.Open() }, - RelTargetFilename: meta.Path, + GroupIdentity: meta.PathInfo, + TargetPath: meta.PathInfo.PathNoLang(), }) if err != nil { return true, err @@ -138,15 +164,19 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) // FromString creates a new Resource from a string with the given relative target path. // TODO(bep) see #10912; we currently emit a warning for this config scenario. func (c *Client) FromString(targetPath, content string) (resource.Resource, error) { - return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) { - return c.rs.New( + targetPath = path.Clean(targetPath) + key := dynacache.CleanKey(targetPath) + helpers.MD5String(content) + r, err := c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) { + return c.rs.NewResource( resources.ResourceSourceDescriptor{ - Fs: c.rs.FileCaches.AssetsCache().Fs, - LazyPublish: true, + LazyPublish: true, + GroupIdentity: identity.Anonymous, // All usage of this resource are tracked via its string content. OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { return hugio.NewReadSeekerNoOpCloserFromString(content), nil }, - RelTargetFilename: filepath.Clean(targetPath), + TargetPath: targetPath, }) }) + + return r, err } diff --git a/resources/resource_factories/create/integration_test.go b/resources/resource_factories/create/integration_test.go index 140c5d091..61bc17adb 100644 --- a/resources/resource_factories/create/integration_test.go +++ b/resources/resource_factories/create/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -25,7 +25,6 @@ import ( ) func TestGetRemoteHead(t *testing.T) { - files := ` -- config.toml -- [security] @@ -60,7 +59,6 @@ func TestGetRemoteHead(t *testing.T) { "Head Content: .", "Head Data: map[ContentLength:18210 ContentType:image/png Status:200 OK StatusCode:200 TransferEncoding:[]]", ) - } func TestGetRemoteRetry(t *testing.T) { @@ -133,14 +131,11 @@ mediaTypes = ['text/plain'] TxtarString: files, }, ).BuildE() - // This is hard to get stable on GitHub Actions, it sometimes succeeds due to timing issues. if err != nil { b.AssertLogContains("Got Err") b.AssertLogContains("Retry timeout") b.AssertLogContains("ContentLength:0") } - }) - } diff --git a/resources/resource_factories/create/remote.go b/resources/resource_factories/create/remote.go index d1fd2481d..c2d17e7a5 100644 --- a/resources/resource_factories/create/remote.go +++ b/resources/resource_factories/create/remote.go @@ -24,7 +24,6 @@ import ( "net/http/httputil" "net/url" "path" - "path/filepath" "strings" "time" @@ -253,15 +252,16 @@ func (c *Client) FromRemote(uri string, optionsm map[string]any) (resource.Resou resourceID = filename[:len(filename)-len(path.Ext(filename))] + "_" + resourceID + mediaType.FirstSuffix.FullSuffix data := responseToData(res, false) - return c.rs.New( + return c.rs.NewResource( resources.ResourceSourceDescriptor{ - MediaType: mediaType, - Data: data, - LazyPublish: true, + MediaType: mediaType, + Data: data, + GroupIdentity: identity.StringIdentity(resourceID), + LazyPublish: true, OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { return hugio.NewReadSeekerNoOpCloser(bytes.NewReader(body)), nil }, - RelTargetFilename: filepath.Clean(resourceID), + TargetPath: resourceID, }) } diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go index 8954a5109..869fc11bf 100644 --- a/resources/resource_metadata.go +++ b/resources/resource_metadata.go @@ -28,111 +28,161 @@ import ( ) var ( - _ metaAssigner = (*genericResource)(nil) - _ metaAssigner = (*imageResource)(nil) - _ metaAssignerProvider = (*resourceAdapter)(nil) + _ mediaTypeAssigner = (*genericResource)(nil) + _ mediaTypeAssigner = (*imageResource)(nil) + _ resource.Staler = (*genericResource)(nil) + _ resource.NameOriginalProvider = (*genericResource)(nil) ) -type metaAssignerProvider interface { - getMetaAssigner() metaAssigner -} - // metaAssigner allows updating metadata in resources that supports it. type metaAssigner interface { setTitle(title string) setName(name string) - setMediaType(mediaType media.Type) updateParams(params map[string]any) } +// metaAssigner allows updating the media type in resources that supports it. +type mediaTypeAssigner interface { + setMediaType(mediaType media.Type) +} + const counterPlaceHolder = ":counter" +var _ metaAssigner = (*metaResource)(nil) + +// metaResource is a resource with metadata that can be updated. +type metaResource struct { + changed bool + title string + name string + params maps.Params +} + +func (r *metaResource) Name() string { + return r.name +} + +func (r *metaResource) Title() string { + return r.title +} + +func (r *metaResource) Params() maps.Params { + return r.params +} + +func (r *metaResource) setTitle(title string) { + r.title = title + r.changed = true +} + +func (r *metaResource) setName(name string) { + r.name = name + r.changed = true +} + +func (r *metaResource) updateParams(params map[string]any) { + if r.params == nil { + r.params = make(map[string]interface{}) + } + for k, v := range params { + r.params[k] = v + } + r.changed = true +} + +func CloneWithMetadataIfNeeded(m []map[string]any, r resource.Resource) resource.Resource { + wmp, ok := r.(resource.WithResourceMetaProvider) + if !ok { + return r + } + + wrapped := &metaResource{ + name: r.Name(), + title: r.Title(), + params: r.Params(), + } + + assignMetadata(m, wrapped) + if !wrapped.changed { + return r + } + + return wmp.WithResourceMeta(wrapped) +} + // AssignMetadata assigns the given metadata to those resources that supports updates // and matching by wildcard given in `src` using `filepath.Match` with lower cased values. // This assignment is additive, but the most specific match needs to be first. // The `name` and `title` metadata field support shell-matched collection it got a match in. // See https://golang.org/pkg/path/#Match -func AssignMetadata(metadata []map[string]any, resources ...resource.Resource) error { +func assignMetadata(metadata []map[string]any, ma *metaResource) error { counters := make(map[string]int) - for _, r := range resources { - var ma metaAssigner - mp, ok := r.(metaAssignerProvider) - if ok { - ma = mp.getMetaAssigner() - } else { - ma, ok = r.(metaAssigner) - if !ok { - continue - } + var ( + nameSet, titleSet bool + nameCounter, titleCounter = 0, 0 + nameCounterFound, titleCounterFound bool + resourceSrcKey = strings.ToLower(ma.Name()) + ) + + for _, meta := range metadata { + src, found := meta["src"] + if !found { + return fmt.Errorf("missing 'src' in metadata for resource") } - var ( - nameSet, titleSet bool - nameCounter, titleCounter = 0, 0 - nameCounterFound, titleCounterFound bool - resourceSrcKey = strings.ToLower(r.Name()) - ) - - for _, meta := range metadata { - src, found := meta["src"] - if !found { - return fmt.Errorf("missing 'src' in metadata for resource") - } + srcKey := strings.ToLower(cast.ToString(src)) - srcKey := strings.ToLower(cast.ToString(src)) + glob, err := glob.GetGlob(srcKey) + if err != nil { + return fmt.Errorf("failed to match resource with metadata: %w", err) + } - glob, err := glob.GetGlob(srcKey) - if err != nil { - return fmt.Errorf("failed to match resource with metadata: %w", err) - } + match := glob.Match(resourceSrcKey) - match := glob.Match(resourceSrcKey) - - if match { - if !nameSet { - name, found := meta["name"] - if found { - name := cast.ToString(name) - if !nameCounterFound { - nameCounterFound = strings.Contains(name, counterPlaceHolder) - } - if nameCounterFound && nameCounter == 0 { - counterKey := "name_" + srcKey - nameCounter = counters[counterKey] + 1 - counters[counterKey] = nameCounter - } - - ma.setName(replaceResourcePlaceholders(name, nameCounter)) - nameSet = true + if match { + if !nameSet { + name, found := meta["name"] + if found { + name := cast.ToString(name) + if !nameCounterFound { + nameCounterFound = strings.Contains(name, counterPlaceHolder) } - } - - if !titleSet { - title, found := meta["title"] - if found { - title := cast.ToString(title) - if !titleCounterFound { - titleCounterFound = strings.Contains(title, counterPlaceHolder) - } - if titleCounterFound && titleCounter == 0 { - counterKey := "title_" + srcKey - titleCounter = counters[counterKey] + 1 - counters[counterKey] = titleCounter - } - ma.setTitle((replaceResourcePlaceholders(title, titleCounter))) - titleSet = true + if nameCounterFound && nameCounter == 0 { + counterKey := "name_" + srcKey + nameCounter = counters[counterKey] + 1 + counters[counterKey] = nameCounter } + + ma.setName(replaceResourcePlaceholders(name, nameCounter)) + nameSet = true } + } - params, found := meta["params"] + if !titleSet { + title, found := meta["title"] if found { - m := maps.ToStringMap(params) - // Needed for case insensitive fetching of params values - maps.PrepareParams(m) - ma.updateParams(m) + title := cast.ToString(title) + if !titleCounterFound { + titleCounterFound = strings.Contains(title, counterPlaceHolder) + } + if titleCounterFound && titleCounter == 0 { + counterKey := "title_" + srcKey + titleCounter = counters[counterKey] + 1 + counters[counterKey] = titleCounter + } + ma.setTitle((replaceResourcePlaceholders(title, titleCounter))) + titleSet = true } } + + params, found := meta["params"] + if found { + m := maps.ToStringMap(params) + // Needed for case insensitive fetching of params values + maps.PrepareParams(m) + ma.updateParams(m) + } } } diff --git a/resources/resource_spec.go b/resources/resource_spec.go index 3e1b53205..66f56d147 100644 --- a/resources/resource_spec.go +++ b/resources/resource_spec.go @@ -14,54 +14,44 @@ package resources import ( - "errors" - "fmt" - "mime" - "os" "path" - "path/filepath" - "strings" "sync" - "github.com/BurntSushi/locker" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources/jsconfig" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/resources/postpub" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources/images" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/tpl" - "github.com/spf13/afero" ) func NewSpec( s *helpers.PathSpec, common *SpecCommon, // may be nil - imageCache *ImageCache, // may be nil + fileCaches filecache.Caches, + memCache *dynacache.Cache, incr identity.Incrementer, logger loggers.Logger, errorHandler herrors.ErrorSender, - execHelper *hexec.Exec) (*Spec, error) { - - fileCaches, err := filecache.NewCaches(s) - if err != nil { - return nil, fmt.Errorf("failed to create file caches from configuration: %w", err) - } - + execHelper *hexec.Exec, +) (*Spec, error) { conf := s.Cfg.GetConfig().(*allconfig.Config) imgConfig := conf.Imaging @@ -91,37 +81,28 @@ func NewSpec( PostProcessResources: make(map[string]postpub.PostPublishedResource), JSConfigBuilder: jsconfig.NewBuilder(), }, - ResourceCache: &ResourceCache{ - fileCache: fileCaches.AssetsCache(), - cache: make(map[string]any), - nlocker: locker.NewLocker(), - }, } } - if imageCache == nil { - imageCache = newImageCache( - fileCaches.ImageCache(), - s, - ) - } else { - imageCache = imageCache.WithPathSpec(s) - - } - rs := &Spec{ PathSpec: s, Logger: logger, ErrorSender: errorHandler, imaging: imaging, - ImageCache: imageCache, - ExecHelper: execHelper, + ImageCache: newImageCache( + fileCaches.ImageCache(), + memCache, + s, + ), + ExecHelper: execHelper, Permalinks: permalinks, SpecCommon: common, } + rs.ResourceCache = newResourceCache(rs, memCache) + return rs, nil } @@ -162,221 +143,65 @@ type PostBuildAssets struct { JSConfigBuilder *jsconfig.Builder } -func (r *Spec) New(fd ResourceSourceDescriptor) (resource.Resource, error) { - return r.newResourceFor(fd) -} - -func (r *Spec) MediaTypes() media.Types { - return r.Cfg.GetConfigSection("mediaTypes").(media.Types) -} - -func (r *Spec) OutputFormats() output.Formats { - return r.Cfg.GetConfigSection("outputFormats").(output.Formats) -} - -func (r *Spec) BuildConfig() config.BuildConfig { - return r.Cfg.GetConfigSection("build").(config.BuildConfig) -} - -func (r *Spec) CacheStats() string { - r.ImageCache.mu.RLock() - defer r.ImageCache.mu.RUnlock() - - s := fmt.Sprintf("Cache entries: %d", len(r.ImageCache.store)) - - count := 0 - for k := range r.ImageCache.store { - if count > 5 { - break - } - s += "\n" + k - count++ - } - - return s -} - -func (r *Spec) ClearCaches() { - r.ImageCache.clear() - r.ResourceCache.clear() -} - -func (r *Spec) DeleteBySubstring(s string) { - r.ImageCache.deleteIfContains(s) -} - -func (s *Spec) String() string { - return "spec" -} - -// TODO(bep) clean up below -func (r *Spec) newGenericResource(sourceFs afero.Fs, - targetPathBuilder func() page.TargetPaths, - osFileInfo os.FileInfo, - sourceFilename, - baseFilename string, - mediaType media.Type) *genericResource { - return r.newGenericResourceWithBase( - sourceFs, - nil, - nil, - targetPathBuilder, - osFileInfo, - sourceFilename, - baseFilename, - mediaType, - nil, - ) -} - -func (r *Spec) newGenericResourceWithBase( - sourceFs afero.Fs, - openReadSeekerCloser resource.OpenReadSeekCloser, - targetPathBaseDirs []string, - targetPathBuilder func() page.TargetPaths, - osFileInfo os.FileInfo, - sourceFilename, - baseFilename string, - mediaType media.Type, - data map[string]any, -) *genericResource { - if osFileInfo != nil && osFileInfo.IsDir() { - panic(fmt.Sprintf("dirs not supported resource types: %v", osFileInfo)) - } - - // This value is used both to construct URLs and file paths, but start - // with a Unix-styled path. - baseFilename = helpers.ToSlashTrimLeading(baseFilename) - fpath, fname := path.Split(baseFilename) - - resourceType := mediaType.MainType - - pathDescriptor := &resourcePathDescriptor{ - baseTargetPathDirs: helpers.UniqueStringsReuse(targetPathBaseDirs), - targetPathBuilder: targetPathBuilder, - relTargetDirFile: dirFile{dir: fpath, file: fname}, - } - - var fim hugofs.FileMetaInfo - if osFileInfo != nil { - fim = osFileInfo.(hugofs.FileMetaInfo) - } - - gfi := &resourceFileInfo{ - fi: fim, - openReadSeekerCloser: openReadSeekerCloser, - sourceFs: sourceFs, - sourceFilename: sourceFilename, - h: &resourceHash{}, - } - - g := &genericResource{ - resourceFileInfo: gfi, - resourcePathDescriptor: pathDescriptor, - mediaType: mediaType, - resourceType: resourceType, - spec: r, - params: make(map[string]any), - name: baseFilename, - title: baseFilename, - resourceContent: &resourceContent{}, - data: data, +// NewResource creates a new Resource from the given ResourceSourceDescriptor. +func (r *Spec) NewResource(rd ResourceSourceDescriptor) (resource.Resource, error) { + if err := rd.init(r); err != nil { + return nil, err } - return g -} - -func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) { - fi := fd.FileInfo - var sourceFilename string - - if fd.OpenReadSeekCloser != nil { - } else if fd.SourceFilename != "" { - var err error - fi, err = sourceFs.Stat(fd.SourceFilename) - if err != nil { - if herrors.IsNotExist(err) { - return nil, nil - } - return nil, err - } - sourceFilename = fd.SourceFilename - } else { - sourceFilename = fd.SourceFile.Filename() + dir, name := path.Split(rd.TargetPath) + dir = paths.ToSlashPreserveLeading(dir) + if dir == "/" { + dir = "" } - - if fd.RelTargetFilename == "" { - fd.RelTargetFilename = sourceFilename + rp := internal.ResourcePaths{ + File: name, + Dir: dir, + BaseDirTarget: rd.BasePathTargetPath, + BaseDirLink: rd.BasePathRelPermalink, + TargetBasePaths: rd.TargetBasePaths, } - mimeType := fd.MediaType - if mimeType.IsZero() { - ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename)) - var ( - found bool - suffixInfo media.SuffixInfo - ) - mimeType, suffixInfo, found = r.MediaTypes().GetFirstBySuffix(strings.TrimPrefix(ext, ".")) - // TODO(bep) we need to handle these ambiguous types better, but in this context - // we most likely want the application/xml type. - if suffixInfo.Suffix == "xml" && mimeType.SubType == "rss" { - mimeType, found = r.MediaTypes().GetByType("application/xml") - } - - if !found { - // A fallback. Note that mime.TypeByExtension is slow by Hugo standards, - // so we should configure media types to avoid this lookup for most - // situations. - mimeStr := mime.TypeByExtension(ext) - if mimeStr != "" { - mimeType, _ = media.FromStringAndExt(mimeStr, ext) - } - } + gr := &genericResource{ + Staler: &AtomicStaler{}, + h: &resourceHash{}, + paths: rp, + spec: r, + sd: rd, + params: make(map[string]any), + name: rd.Name, + title: rd.Name, + resourceContent: &resourceContent{}, } - gr := r.newGenericResourceWithBase( - sourceFs, - fd.OpenReadSeekCloser, - fd.TargetBasePaths, - fd.TargetPaths, - fi, - sourceFilename, - fd.RelTargetFilename, - mimeType, - fd.Data) - - if mimeType.MainType == "image" { - imgFormat, ok := images.ImageFormatFromMediaSubType(mimeType.SubType) + if rd.MediaType.MainType == "image" { + imgFormat, ok := images.ImageFormatFromMediaSubType(rd.MediaType.SubType) if ok { ir := &imageResource{ Image: images.NewImage(imgFormat, r.imaging, nil, gr), baseResource: gr, } ir.root = ir - return newResourceAdapter(gr.spec, fd.LazyPublish, ir), nil + return newResourceAdapter(gr.spec, rd.LazyPublish, ir), nil } + } - return newResourceAdapter(gr.spec, fd.LazyPublish, gr), nil + return newResourceAdapter(gr.spec, rd.LazyPublish, gr), nil } -func (r *Spec) newResourceFor(fd ResourceSourceDescriptor) (resource.Resource, error) { - if fd.OpenReadSeekCloser == nil { - if fd.SourceFile != nil && fd.SourceFilename != "" { - return nil, errors.New("both SourceFile and AbsSourceFilename provided") - } else if fd.SourceFile == nil && fd.SourceFilename == "" { - return nil, errors.New("either SourceFile or AbsSourceFilename must be provided") - } - } +func (r *Spec) MediaTypes() media.Types { + return r.Cfg.GetConfigSection("mediaTypes").(media.Types) +} - if fd.RelTargetFilename == "" { - fd.RelTargetFilename = fd.Filename() - } +func (r *Spec) OutputFormats() output.Formats { + return r.Cfg.GetConfigSection("outputFormats").(output.Formats) +} - if len(fd.TargetBasePaths) == 0 { - // If not set, we publish the same resource to all hosts. - fd.TargetBasePaths = r.MultihostTargetBasePaths - } +func (r *Spec) BuildConfig() config.BuildConfig { + return r.Cfg.GetConfigSection("build").(config.BuildConfig) +} - return r.newResource(fd.Fs, fd) +func (s *Spec) String() string { + return "spec" } diff --git a/resources/resource_spec_test.go b/resources/resource_spec_test.go new file mode 100644 index 000000000..67fe09992 --- /dev/null +++ b/resources/resource_spec_test.go @@ -0,0 +1,48 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources_test + +import ( + "testing" + + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/identity" + "github.com/gohugoio/hugo/resources" +) + +func TestNewResource(t *testing.T) { + c := qt.New(t) + + spec := newTestResourceSpec(specDescriptor{c: c}) + + open := hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString("content")) + + rd := resources.ResourceSourceDescriptor{ + OpenReadSeekCloser: open, + TargetPath: "a/b.txt", + BasePathRelPermalink: "c/d", + BasePathTargetPath: "e/f", + GroupIdentity: identity.Anonymous, + } + + r, err := spec.NewResource(rd) + c.Assert(err, qt.IsNil) + c.Assert(r, qt.Not(qt.IsNil)) + c.Assert(r.RelPermalink(), qt.Equals, "/c/d/a/b.txt") + + info := resources.GetTestInfoForResource(r) + c.Assert(info.Paths.TargetLink(), qt.Equals, "/c/d/a/b.txt") + c.Assert(info.Paths.TargetPath(), qt.Equals, "/e/f/a/b.txt") +} diff --git a/resources/resource_test.go b/resources/resource_test.go deleted file mode 100644 index d6065c248..000000000 --- a/resources/resource_test.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package resources_test - -import ( - "testing" - - "github.com/gohugoio/hugo/resources" - - "github.com/gohugoio/hugo/media" - - qt "github.com/frankban/quicktest" -) - -func TestNewResourceFromFilename(t *testing.T) { - c := qt.New(t) - spec := newTestResourceSpec(specDescriptor{c: c}) - - writeSource(t, spec.Fs, "assets/a/b/logo.png", "image") - writeSource(t, spec.Fs, "assets/a/b/data.json", "json") - - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: "a/b/logo.png"}) - - c.Assert(err, qt.IsNil) - c.Assert(r, qt.Not(qt.IsNil)) - c.Assert(r.ResourceType(), qt.Equals, "image") - c.Assert(r.RelPermalink(), qt.Equals, "/a/b/logo.png") - c.Assert(r.Permalink(), qt.Equals, "https://example.com/a/b/logo.png") - - r, err = spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: "a/b/data.json"}) - - c.Assert(err, qt.IsNil) - c.Assert(r, qt.Not(qt.IsNil)) - c.Assert(r.ResourceType(), qt.Equals, "application") -} - -var pngType, _ = media.FromStringAndExt("image/png", "png") diff --git a/resources/resource_transformers/babel/babel.go b/resources/resource_transformers/babel/babel.go index 2999d73cb..212331d8e 100644 --- a/resources/resource_transformers/babel/babel.go +++ b/resources/resource_transformers/babel/babel.go @@ -140,7 +140,7 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile) if configFile == "" && t.options.Config != "" { // Only fail if the user specified config file is not found. - return fmt.Errorf("babel config %q not found:", configFile) + return fmt.Errorf("babel config %q not found", configFile) } } @@ -177,7 +177,6 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx // ARGA [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel812882892/babel.config.js --source-maps --filename=js/main2.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-2237820197.js] // [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel332846848/babel.config.js --filename=js/main.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-1451390834.js 0x10304ee60 0x10304ed60 0x10304f060] cmd, err := ex.Npx(binaryName, cmdArgs...) - if err != nil { if hexec.IsNotFound(err) { // This may be on a CI server etc. Will fall back to pre-built assets. @@ -187,7 +186,6 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx } stdin, err := cmd.StdinPipe() - if err != nil { return err } diff --git a/resources/resource_transformers/htesting/testhelpers.go b/resources/resource_transformers/htesting/testhelpers.go index b1feccc5f..c9382b828 100644 --- a/resources/resource_transformers/htesting/testhelpers.go +++ b/resources/resource_transformers/htesting/testhelpers.go @@ -16,54 +16,25 @@ package htesting import ( "path/filepath" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources" "github.com/spf13/afero" ) -func NewTestResourceSpec() (*resources.Spec, error) { - cfg := config.New() - - imagingCfg := map[string]any{ - "resampleFilter": "linear", - "quality": 68, - "anchor": "left", - } - - cfg.Set("imaging", imagingCfg) - afs := afero.NewMemMapFs() - - conf := testconfig.GetTestConfig(afs, cfg) - fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(afs), conf.BaseConfig()) - s, err := helpers.NewPathSpec(fs, conf, nil) - if err != nil { - return nil, err - } - - spec, err := resources.NewSpec(s, nil, nil, nil, nil, nil, nil) - return spec, err -} - -func NewResourceTransformer(filename, content string) (resources.ResourceTransformer, error) { - spec, err := NewTestResourceSpec() - if err != nil { - return nil, err - } - return NewResourceTransformerForSpec(spec, filename, content) -} - func NewResourceTransformerForSpec(spec *resources.Spec, filename, content string) (resources.ResourceTransformer, error) { filename = filepath.FromSlash(filename) fs := spec.Fs.Source - if err := afero.WriteFile(fs, filename, []byte(content), 0777); err != nil { + if err := afero.WriteFile(fs, filename, []byte(content), 0o777); err != nil { return nil, err } - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: fs, SourceFilename: filename}) + var open hugio.OpenReadSeekCloser = func() (hugio.ReadSeekCloser, error) { + return fs.Open(filename) + } + + r, err := spec.NewResource(resources.ResourceSourceDescriptor{TargetPath: filepath.FromSlash(filename), OpenReadSeekCloser: open, GroupIdentity: identity.Anonymous}) if err != nil { return nil, err } diff --git a/resources/resource_transformers/integrity/integrity.go b/resources/resource_transformers/integrity/integrity.go index 63f4f4c76..aef744443 100644 --- a/resources/resource_transformers/integrity/integrity.go +++ b/resources/resource_transformers/integrity/integrity.go @@ -23,6 +23,7 @@ import ( "hash" "io" + "github.com/gohugoio/hugo/common/constants" "github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources" @@ -47,7 +48,7 @@ type fingerprintTransformation struct { } func (t *fingerprintTransformation) Key() internal.ResourceTransformationKey { - return internal.NewResourceTransformationKey("fingerprint", t.algo) + return internal.NewResourceTransformationKey(constants.ResourceTransformationFingerprint, t.algo) } // Transform creates a MD5 hash of the Resource content and inserts that hash before diff --git a/resources/resource_transformers/integrity/integrity_test.go b/resources/resource_transformers/integrity/integrity_test.go index 27e193618..e0af68ae9 100644 --- a/resources/resource_transformers/integrity/integrity_test.go +++ b/resources/resource_transformers/integrity/integrity_test.go @@ -17,6 +17,7 @@ import ( "context" "testing" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/resources/resource" qt "github.com/frankban/quicktest" @@ -51,11 +52,12 @@ func TestHashFromAlgo(t *testing.T) { func TestTransform(t *testing.T) { c := qt.New(t) - spec, err := htesting.NewTestResourceSpec() - c.Assert(err, qt.IsNil) - client := New(spec) + d := testconfig.GetTestDeps(nil, nil) + t.Cleanup(func() { c.Assert(d.Close(), qt.IsNil) }) + + client := New(d.ResourceSpec) - r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.txt", "Hugo Rocks!") + r, err := htesting.NewResourceTransformerForSpec(d.ResourceSpec, "hugo.txt", "Hugo Rocks!") c.Assert(err, qt.IsNil) transformed, err := client.Fingerprint(r, "") diff --git a/resources/resource_transformers/js/build.go b/resources/resource_transformers/js/build.go index aa802d81e..cc68d2253 100644 --- a/resources/resource_transformers/js/build.go +++ b/resources/resource_transformers/js/build.go @@ -14,6 +14,7 @@ package js import ( + "errors" "fmt" "io" "os" @@ -22,8 +23,6 @@ import ( "regexp" "strings" - "errors" - "github.com/spf13/afero" "github.com/gohugoio/hugo/hugofs" @@ -93,7 +92,7 @@ func (t *buildTransformation) Transform(ctx *resources.ResourceTransformationCtx return err } - buildOptions.Plugins, err = createBuildPlugins(t.c, opts) + buildOptions.Plugins, err = createBuildPlugins(ctx.DependencyManager, t.c, opts) if err != nil { return err } diff --git a/resources/resource_transformers/js/integration_test.go b/resources/resource_transformers/js/integration_test.go index 0e311107b..304c51d33 100644 --- a/resources/resource_transformers/js/integration_test.go +++ b/resources/resource_transformers/js/integration_test.go @@ -29,6 +29,7 @@ func TestBuildVariants(t *testing.T) { mainWithImport := ` -- config.toml -- disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT"] +disableLiveReload = true -- assets/js/main.js -- import { hello1, hello2 } from './util1'; hello1(); @@ -61,7 +62,7 @@ JS Content:{{ $js.Content }}:End: b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build() b.AssertFileContent("public/index.html", `abcd`) - b.EditFileReplace("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build() + b.EditFileReplaceFunc("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build() b.AssertFileContent("public/index.html", `1234`) }) @@ -69,7 +70,7 @@ JS Content:{{ $js.Content }}:End: b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build() b.AssertFileContent("public/index.html", `efgh`) - b.EditFileReplace("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build() + b.EditFileReplaceFunc("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build() b.AssertFileContent("public/index.html", `1234`) }) } @@ -257,7 +258,6 @@ JS Content:{{ $js.Content }}:End: b.Assert(err, qt.IsNotNil) b.Assert(err.Error(), qt.Contains, `util1.js:4:17": No matching export in`) }) - } // See issue 10527. @@ -301,7 +301,6 @@ IMPORT_SRC_DIR:imp3/foo.ts b.AssertFileContent("public/js/main.js", expected) }) } - } // See https://github.com/evanw/esbuild/issues/2745 @@ -342,7 +341,6 @@ License util2 Main license `) - } // Issue #11232 diff --git a/resources/resource_transformers/js/options.go b/resources/resource_transformers/js/options.go index e9ffbabe4..df32e7012 100644 --- a/resources/resource_transformers/js/options.go +++ b/resources/resource_transformers/js/options.go @@ -21,11 +21,12 @@ import ( "strings" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/identity" "github.com/spf13/afero" "github.com/evanw/esbuild/pkg/api" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/media" "github.com/mitchellh/mapstructure" @@ -113,7 +114,7 @@ func decodeOptions(m map[string]any) (Options, error) { } if opts.TargetPath != "" { - opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath) } opts.Target = strings.ToLower(opts.Target) @@ -203,7 +204,7 @@ func resolveComponentInAssets(fs afero.Fs, impPath string) *hugofs.FileMeta { return m } -func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) { +func createBuildPlugins(depsManager identity.Manager, c *Client, opts Options) ([]api.Plugin, error) { fs := c.rs.Assets resolveImport := func(args api.OnResolveArgs) (api.OnResolveResult, error) { @@ -224,6 +225,7 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) { // ESBuild resolve this. return api.OnResolveResult{}, nil } + relDir = filepath.Dir(rel) } else { relDir = opts.sourceDir @@ -238,6 +240,8 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) { m := resolveComponentInAssets(fs.Fs, impPath) if m != nil { + depsManager.AddIdentity(m.PathInfo) + // Store the source root so we can create a jsconfig.json // to help IntelliSense when the build is done. // This should be a small number of elements, and when diff --git a/resources/resource_transformers/js/options_test.go b/resources/resource_transformers/js/options_test.go index a76a24caa..b8b031b81 100644 --- a/resources/resource_transformers/js/options_test.go +++ b/resources/resource_transformers/js/options_test.go @@ -14,10 +14,15 @@ package js import ( + "path" "path/filepath" "testing" + "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/hugolib/filesystems" + "github.com/gohugoio/hugo/hugolib/paths" "github.com/gohugoio/hugo/media" "github.com/spf13/afero" @@ -164,20 +169,27 @@ func TestResolveComponentInAssets(t *testing.T) { mfs := afero.NewMemMapFs() for _, filename := range test.files { - c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0777), qt.IsNil) + c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0o777), qt.IsNil) } - bfs := hugofs.DecorateBasePathFs(afero.NewBasePathFs(mfs, baseDir).(*afero.BasePathFs)) + conf := testconfig.GetTestConfig(mfs, config.New()) + fs := hugofs.NewFrom(mfs, conf.BaseConfig()) - got := resolveComponentInAssets(bfs, test.impPath) + p, err := paths.New(fs, conf) + c.Assert(err, qt.IsNil) + bfs, err := filesystems.NewBase(p, nil) + c.Assert(err, qt.IsNil) + + got := resolveComponentInAssets(bfs.Assets.Fs, test.impPath) gotPath := "" + expect := test.expect if got != nil { - gotPath = filepath.ToSlash(got.Path) + gotPath = filepath.ToSlash(got.Filename) + expect = path.Join(baseDir, test.expect) } - c.Assert(gotPath, qt.Equals, test.expect) + c.Assert(gotPath, qt.Equals, expect) }) - } } diff --git a/resources/resource_transformers/minifier/minify_test.go b/resources/resource_transformers/minifier/minify_test.go index b2d8ed734..030abf426 100644 --- a/resources/resource_transformers/minifier/minify_test.go +++ b/resources/resource_transformers/minifier/minify_test.go @@ -17,6 +17,7 @@ import ( "context" "testing" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/resources/resource" qt "github.com/frankban/quicktest" @@ -26,11 +27,11 @@ import ( func TestTransform(t *testing.T) { c := qt.New(t) - spec, err := htesting.NewTestResourceSpec() - c.Assert(err, qt.IsNil) - client, _ := New(spec) + d := testconfig.GetTestDeps(nil, nil) + t.Cleanup(func() { c.Assert(d.Close(), qt.IsNil) }) - r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.html", "<h1> Hugo Rocks! </h1>") + client, _ := New(d.ResourceSpec) + r, err := htesting.NewResourceTransformerForSpec(d.ResourceSpec, "hugo.html", "<h1> Hugo Rocks! </h1>") c.Assert(err, qt.IsNil) transformed, err := client.Minify(r) diff --git a/resources/resource_transformers/postcss/integration_test.go b/resources/resource_transformers/postcss/integration_test.go index 74aaa2661..957e69403 100644 --- a/resources/resource_transformers/postcss/integration_test.go +++ b/resources/resource_transformers/postcss/integration_test.go @@ -139,7 +139,6 @@ Styles Content: Len: 770917| b.AssertLogContains("Hugo PublishDir: " + filepath.Join(tempDir, "public")) } } - } // 9880 @@ -149,7 +148,7 @@ func TestTransformPostCSSError(t *testing.T) { } if runtime.GOOS == "windows" { - //TODO(bep) This has started to fail on Windows with Go 1.19 on GitHub Actions for some mysterious reason. + // TODO(bep) This has started to fail on Windows with Go 1.19 on GitHub Actions for some mysterious reason. t.Skip("Skip on Windows") } @@ -165,7 +164,6 @@ func TestTransformPostCSSError(t *testing.T) { s.AssertIsFileError(err) c.Assert(err.Error(), qt.Contains, "a.css:4:2") - } func TestTransformPostCSSNotInstalledError(t *testing.T) { @@ -184,7 +182,6 @@ func TestTransformPostCSSNotInstalledError(t *testing.T) { s.AssertIsFileError(err) c.Assert(err.Error(), qt.Contains, `binary with name "npx" not found`) - } // #9895 @@ -206,8 +203,7 @@ func TestTransformPostCSSImportError(t *testing.T) { s.AssertIsFileError(err) c.Assert(err.Error(), qt.Contains, "styles.css:4:3") - c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "css/components/doesnotexist.css"`)) - + c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "/css/components/doesnotexist.css"`)) } func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) { @@ -230,7 +226,6 @@ func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) { }).Build() s.AssertFileContent("public/css/styles.css", `@import "components/doesnotexist.css";`) - } // Issue 9787 @@ -267,5 +262,4 @@ Styles Content: Len: 770917 `) } - } diff --git a/resources/resource_transformers/postcss/postcss.go b/resources/resource_transformers/postcss/postcss.go index a65fa3783..9015e120d 100644 --- a/resources/resource_transformers/postcss/postcss.go +++ b/resources/resource_transformers/postcss/postcss.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ import ( "bytes" "crypto/sha256" "encoding/hex" + "errors" "fmt" "io" "path" @@ -30,6 +31,7 @@ import ( "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/common/hugo" @@ -37,8 +39,6 @@ import ( "github.com/spf13/afero" "github.com/spf13/cast" - "errors" - "github.com/mitchellh/mapstructure" "github.com/gohugoio/hugo/common/herrors" @@ -86,7 +86,6 @@ func (c *Client) Process(res resources.ResourceTransformer, options map[string]a // Some of the options from https://github.com/postcss/postcss-cli type Options struct { - // Set a custom path to look for a config file. Config string @@ -151,7 +150,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC const binaryName = "postcss" infol := t.rs.Logger.InfoCommand(binaryName) - infoW := loggers.LevelLoggerToWriter(infol) + infow := loggers.LevelLoggerToWriter(infol) ex := t.rs.ExecHelper @@ -179,7 +178,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile) if configFile == "" && options.Config != "" { // Only fail if the user specified config file is not found. - return fmt.Errorf("postcss config %q not found:", options.Config) + return fmt.Errorf("postcss config %q not found", options.Config) } } @@ -196,7 +195,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC var errBuf bytes.Buffer - stderr := io.MultiWriter(infoW, &errBuf) + stderr := io.MultiWriter(infow, &errBuf) cmdArgs = append(cmdArgs, hexec.WithStderr(stderr)) cmdArgs = append(cmdArgs, hexec.WithStdout(ctx.To)) cmdArgs = append(cmdArgs, hexec.WithEnviron(hugo.GetExecEnviron(t.rs.Cfg.BaseConfig().WorkingDir, t.rs.Cfg, t.rs.BaseFs.Assets.Fs))) @@ -221,7 +220,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC ctx.From, ctx.InPath, options, - t.rs.Assets.Fs, t.rs.Logger, + t.rs.Assets.Fs, t.rs.Logger, ctx.DependencyManager, ) if options.InlineImports { @@ -260,17 +259,19 @@ type importResolver struct { inPath string opts Options - contentSeen map[string]bool - linemap map[int]fileOffset - fs afero.Fs - logger loggers.Logger + contentSeen map[string]bool + dependencyManager identity.Manager + linemap map[int]fileOffset + fs afero.Fs + logger loggers.Logger } -func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger) *importResolver { +func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger, dependencyManager identity.Manager) *importResolver { return &importResolver{ - r: r, - inPath: inPath, - fs: fs, logger: logger, + r: r, + dependencyManager: dependencyManager, + inPath: inPath, + fs: fs, logger: logger, linemap: make(map[int]fileOffset), contentSeen: make(map[string]bool), opts: opts, } @@ -289,7 +290,8 @@ func (imp *importResolver) contentHash(filename string) ([]byte, string) { func (imp *importResolver) importRecursive( lineNum int, content string, - inPath string) (int, string, error) { + inPath string, +) (int, string, error) { basePath := path.Dir(inPath) var replacements []string @@ -312,6 +314,7 @@ func (imp *importResolver) importRecursive( } else { path := strings.Trim(strings.TrimPrefix(line, importIdentifier), " \"';") filename := filepath.Join(basePath, path) + imp.dependencyManager.AddIdentity(identity.CleanStringIdentity(filename)) importContent, hash := imp.contentHash(filename) if importContent == nil { @@ -364,8 +367,6 @@ func (imp *importResolver) importRecursive( } func (imp *importResolver) resolve() (io.Reader, error) { - const importIdentifier = "@import" - content, err := io.ReadAll(imp.r) if err != nil { return nil, err @@ -438,6 +439,5 @@ func (imp *importResolver) toFileError(output string) error { pos.LineNumber = file.Offset + 1 return ferr.UpdatePosition(pos).UpdateContent(f, nil) - //return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher) - + // return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher) } diff --git a/resources/resource_transformers/postcss/postcss_test.go b/resources/resource_transformers/postcss/postcss_test.go index dd0695cd1..1edaaaaf5 100644 --- a/resources/resource_transformers/postcss/postcss_test.go +++ b/resources/resource_transformers/postcss/postcss_test.go @@ -20,6 +20,7 @@ import ( "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/htesting/hqt" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/helpers" @@ -71,7 +72,7 @@ func TestImportResolver(t *testing.T) { fs := afero.NewMemMapFs() writeFile := func(name, content string) { - c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil) + c.Assert(afero.WriteFile(fs, name, []byte(content), 0o777), qt.IsNil) } writeFile("a.css", `@import "b.css"; @@ -96,6 +97,7 @@ LOCAL_STYLE "styles.css", Options{}, fs, loggers.NewDefault(), + identity.NopManager, ) r, err := imp.resolve() @@ -123,7 +125,7 @@ func BenchmarkImportResolver(b *testing.B) { fs := afero.NewMemMapFs() writeFile := func(name, content string) { - c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil) + c.Assert(afero.WriteFile(fs, name, []byte(content), 0o777), qt.IsNil) } writeFile("a.css", `@import "b.css"; @@ -153,6 +155,7 @@ LOCAL_STYLE "styles.css", Options{}, fs, logger, + identity.NopManager, ) b.StartTimer() diff --git a/resources/resource_transformers/templates/execute_as_template.go b/resources/resource_transformers/templates/execute_as_template.go index efe3e4c57..79d249bd6 100644 --- a/resources/resource_transformers/templates/execute_as_template.go +++ b/resources/resource_transformers/templates/execute_as_template.go @@ -18,6 +18,7 @@ import ( "context" "fmt" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/internal" @@ -68,7 +69,7 @@ func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransforma func (c *Client) ExecuteAsTemplate(ctx context.Context, res resources.ResourceTransformer, targetPath string, data any) (resource.Resource, error) { return res.TransformWithContext(ctx, &executeAsTemplateTransform{ rs: c.rs, - targetPath: helpers.ToSlashTrimLeading(targetPath), + targetPath: paths.ToSlashTrimLeading(targetPath), t: c.t, data: data, }) diff --git a/resources/resource_transformers/tocss/dartsass/client.go b/resources/resource_transformers/tocss/dartsass/client.go index 929900ca8..4b8ca97eb 100644 --- a/resources/resource_transformers/tocss/dartsass/client.go +++ b/resources/resource_transformers/tocss/dartsass/client.go @@ -25,6 +25,7 @@ import ( "github.com/bep/logg" "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hugo" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugolib/filesystems" @@ -78,7 +79,6 @@ func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) } }, }) - } else { transpilerv1, err = godartsassv1.Start(godartsassv1.Options{ DartSassEmbeddedFilename: hugo.DartSassBinaryName, @@ -153,11 +153,11 @@ func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result, } } else { res, err = c.transpiler.Execute(args) - } if err != nil { if err.Error() == "unexpected EOF" { + //lint:ignore ST1005 end user message. return res, fmt.Errorf("got unexpected EOF when executing %q. The user running hugo must have read and execute permissions on this program. With execute permissions only, this error is thrown.", hugo.DartSassBinaryName) } return res, herrors.NewFileErrorFromFileInErr(err, hugofs.Os, herrors.OffsetMatcher) @@ -167,7 +167,6 @@ func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result, } type Options struct { - // Hugo, will by default, just replace the extension of the source // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can // control this by setting this, e.g. "styles/main.css" will create @@ -204,7 +203,7 @@ func decodeOptions(m map[string]any) (opts Options, err error) { err = mapstructure.WeakDecode(m, &opts) if opts.TargetPath != "" { - opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath) } return diff --git a/resources/resource_transformers/tocss/dartsass/transform.go b/resources/resource_transformers/tocss/dartsass/transform.go index 32855e1c5..73eca6a53 100644 --- a/resources/resource_transformers/tocss/dartsass/transform.go +++ b/resources/resource_transformers/tocss/dartsass/transform.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources" @@ -80,8 +81,9 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error { URL: filename, IncludePaths: t.c.sfs.RealDirs(baseDir), ImportResolver: importResolver{ - baseDir: baseDir, - c: t.c, + baseDir: baseDir, + c: t.c, + dependencyManager: ctx.DependencyManager, varsStylesheet: godartsass.Import{Content: sass.CreateVarsStyleSheet(opts.Vars)}, }, @@ -126,10 +128,10 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error { } type importResolver struct { - baseDir string - c *Client - - varsStylesheet godartsass.Import + baseDir string + c *Client + dependencyManager identity.Manager + varsStylesheet godartsass.Import } func (t importResolver) CanonicalizeURL(url string) (string, error) { @@ -172,6 +174,7 @@ func (t importResolver) CanonicalizeURL(url string) (string, error) { fi, err := t.c.sfs.Fs.Stat(filenameToCheck) if err == nil { if fim, ok := fi.(hugofs.FileMetaInfo); ok { + t.dependencyManager.AddIdentity(identity.CleanStringIdentity(filenameToCheck)) return "file://" + filepath.ToSlash(fim.Meta().Filename), nil } } @@ -196,7 +199,6 @@ func (t importResolver) Load(url string) (godartsass.Import, error) { } return godartsass.Import{Content: string(b), SourceSyntax: sourceSyntax}, err - } type importResolverV1 struct { diff --git a/resources/resource_transformers/tocss/internal/sass/helpers.go b/resources/resource_transformers/tocss/internal/sass/helpers.go index acd6d86d5..c1cef141e 100644 --- a/resources/resource_transformers/tocss/internal/sass/helpers.go +++ b/resources/resource_transformers/tocss/internal/sass/helpers.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -56,7 +56,6 @@ func CreateVarsStyleSheet(vars map[string]any) string { sort.Strings(varsSlice) varsStylesheet = strings.Join(varsSlice, "\n") return varsStylesheet - } var ( diff --git a/resources/resource_transformers/tocss/internal/sass/helpers_test.go b/resources/resource_transformers/tocss/internal/sass/helpers_test.go index 56e73736e..ef31fdd8f 100644 --- a/resources/resource_transformers/tocss/internal/sass/helpers_test.go +++ b/resources/resource_transformers/tocss/internal/sass/helpers_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,5 +40,4 @@ func TestIsUnquotedCSSValue(t *testing.T) { } { c.Assert(isTypedCSSValue(test.in), qt.Equals, test.out) } - } diff --git a/resources/resource_transformers/tocss/scss/client.go b/resources/resource_transformers/tocss/scss/client.go index 2028163ff..aead6279b 100644 --- a/resources/resource_transformers/tocss/scss/client.go +++ b/resources/resource_transformers/tocss/scss/client.go @@ -16,7 +16,7 @@ package scss import ( "regexp" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugolib/filesystems" "github.com/gohugoio/hugo/resources" "github.com/spf13/afero" @@ -37,7 +37,6 @@ func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) } type Options struct { - // Hugo, will by default, just replace the extension of the source // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can // control this by setting this, e.g. "styles/main.css" will create @@ -73,7 +72,7 @@ func DecodeOptions(m map[string]any) (opts Options, err error) { err = mapstructure.WeakDecode(m, &opts) if opts.TargetPath != "" { - opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath) } return diff --git a/resources/resource_transformers/tocss/scss/tocss.go b/resources/resource_transformers/tocss/scss/tocss.go index 1018ea02e..a4c4e6d8e 100644 --- a/resources/resource_transformers/tocss/scss/tocss.go +++ b/resources/resource_transformers/tocss/scss/tocss.go @@ -20,7 +20,6 @@ import ( "fmt" "io" "path" - "path/filepath" "strings" @@ -29,6 +28,7 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/resource_transformers/tocss/internal/sass" @@ -115,6 +115,7 @@ func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx fi, err := t.c.sfs.Fs.Stat(filenameToCheck) if err == nil { if fim, ok := fi.(hugofs.FileMetaInfo); ok { + ctx.DependencyManager.AddIdentity(identity.CleanStringIdentity(filenameToCheck)) return fim.Meta().Filename, "", true } } diff --git a/resources/testhelpers_test.go b/resources/testhelpers_test.go index 1de2f54f6..028524619 100644 --- a/resources/testhelpers_test.go +++ b/resources/testhelpers_test.go @@ -2,23 +2,21 @@ package resources_test import ( "image" - "io" "os" "path/filepath" "runtime" "strings" - "testing" + "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources" qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/resources/images" - "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" "github.com/spf13/afero" ) @@ -44,7 +42,7 @@ func newTestResourceSpec(desc specDescriptor) *resources.Spec { panic("osFs not supported for this test") } - if err := afs.MkdirAll("assets", 0755); err != nil { + if err := afs.MkdirAll("assets", 0o755); err != nil { panic(err) } @@ -64,16 +62,13 @@ func newTestResourceSpec(desc specDescriptor) *resources.Spec { func(d *deps.Deps) { d.Fs.PublishDir = hugofs.NewCreateCountingFs(d.Fs.PublishDir) }, ) - return d.ResourceSpec -} - -func newTargetPaths(link string) func() page.TargetPaths { - return func() page.TargetPaths { - return page.TargetPaths{ - SubResourceBaseTarget: filepath.FromSlash(link), - SubResourceBaseLink: link, + desc.c.Cleanup(func() { + if err := d.Close(); err != nil { + panic(err) } - } + }) + + return d.ResourceSpec } func newTestResourceOsFs(c *qt.C) (*resources.Spec, string) { @@ -92,7 +87,7 @@ func newTestResourceOsFs(c *qt.C) (*resources.Spec, string) { cfg.Set("workingDir", workDir) - os.MkdirAll(filepath.Join(workDir, "assets"), 0755) + os.MkdirAll(filepath.Join(workDir, "assets"), 0o755) d := testconfig.GetTestDeps(hugofs.Os, cfg) @@ -116,22 +111,16 @@ func fetchImageForSpec(spec *resources.Spec, c *qt.C, name string) images.ImageR } func fetchResourceForSpec(spec *resources.Spec, c *qt.C, name string, targetPathAddends ...string) resource.ContentResource { - src, err := os.Open(filepath.FromSlash("testdata/" + name)) - c.Assert(err, qt.IsNil) - if len(targetPathAddends) > 0 { - addends := strings.Join(targetPathAddends, "_") - name = addends + "_" + name - } - out, err := helpers.OpenFileForWriting(spec.Fs.WorkingDirWritable, filepath.Join(filepath.Join("assets", name))) - c.Assert(err, qt.IsNil) - _, err = io.Copy(out, src) - out.Close() - src.Close() + b, err := os.ReadFile(filepath.FromSlash("testdata/" + name)) c.Assert(err, qt.IsNil) - - factory := newTargetPaths("/a") - - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, TargetPaths: factory, LazyPublish: true, RelTargetFilename: name, SourceFilename: name}) + open := hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromBytes(b)) + targetPath := name + base := "/a/" + r, err := spec.NewResource(resources.ResourceSourceDescriptor{ + LazyPublish: true, + Name: name, TargetPath: targetPath, BasePathRelPermalink: base, BasePathTargetPath: base, OpenReadSeekCloser: open, + GroupIdentity: identity.Anonymous, + }) c.Assert(err, qt.IsNil) c.Assert(r, qt.Not(qt.IsNil)) @@ -150,17 +139,3 @@ func assertImageFile(c *qt.C, fs afero.Fs, filename string, width, height int) { c.Assert(config.Width, qt.Equals, width) c.Assert(config.Height, qt.Equals, height) } - -func assertFileCache(c *qt.C, fs afero.Fs, filename string, width, height int) { - assertImageFile(c, fs, filepath.Clean(filename), width, height) -} - -func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { - writeToFs(t, fs.Source, filename, content) -} - -func writeToFs(t testing.TB, fs afero.Fs, filename, content string) { - if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil { - t.Fatalf("Failed to write file: %s", err) - } -} diff --git a/resources/transform.go b/resources/transform.go index 0c38345ad..408decbb8 100644 --- a/resources/transform.go +++ b/resources/transform.go @@ -23,7 +23,9 @@ import ( "strings" "sync" + "github.com/gohugoio/hugo/common/constants" "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/resources/images" "github.com/gohugoio/hugo/resources/images/exif" @@ -42,13 +44,18 @@ import ( ) var ( - _ resource.ContentResource = (*resourceAdapter)(nil) - _ resourceCopier = (*resourceAdapter)(nil) - _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil) - _ resource.Resource = (*resourceAdapter)(nil) - _ resource.Source = (*resourceAdapter)(nil) - _ resource.Identifier = (*resourceAdapter)(nil) - _ resource.ResourceMetaProvider = (*resourceAdapter)(nil) + _ resource.ContentResource = (*resourceAdapter)(nil) + _ resourceCopier = (*resourceAdapter)(nil) + _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil) + _ resource.Resource = (*resourceAdapter)(nil) + _ resource.Staler = (*resourceAdapterInner)(nil) + _ resource.Source = (*resourceAdapter)(nil) + _ resource.Identifier = (*resourceAdapter)(nil) + _ resource.ResourceNameTitleProvider = (*resourceAdapter)(nil) + _ resource.WithResourceMetaProvider = (*resourceAdapter)(nil) + _ identity.DependencyManagerProvider = (*resourceAdapter)(nil) + _ identity.IdentityGroupProvider = (*resourceAdapter)(nil) + _ resource.NameOriginalProvider = (*resourceAdapter)(nil) ) // These are transformations that need special support in Hugo that may not @@ -68,11 +75,13 @@ func newResourceAdapter(spec *Spec, lazyPublish bool, target transformableResour } return &resourceAdapter{ resourceTransformations: &resourceTransformations{}, + metaProvider: target, resourceAdapterInner: &resourceAdapterInner{ - ctx: context.TODO(), + ctx: context.Background(), spec: spec, publishOnce: po, target: target, + Staler: &AtomicStaler{}, }, } } @@ -88,6 +97,9 @@ type ResourceTransformationCtx struct { // The context that started the transformation. Ctx context.Context + // The dependency manager to use for dependency tracking. + DependencyManager identity.Manager + // The content to transform. From io.Reader @@ -162,8 +174,11 @@ type resourceAdapter struct { commonResource *resourceTransformations *resourceAdapterInner + metaProvider resource.ResourceMetaProvider } +var _ identity.ForEeachIdentityByNameProvider = (*resourceAdapter)(nil) + func (r *resourceAdapter) Content(ctx context.Context) (any, error) { r.init(false, true) if r.transformationsErr != nil { @@ -176,16 +191,41 @@ func (r *resourceAdapter) Err() resource.ResourceError { return nil } +func (r *resourceAdapter) GetIdentity() identity.Identity { + return identity.FirstIdentity(r.target) +} + func (r *resourceAdapter) Data() any { r.init(false, false) return r.target.Data() } +func (r *resourceAdapter) ForEeachIdentityByName(name string, f func(identity.Identity) bool) { + if constants.IsFieldRelOrPermalink(name) && !r.resourceTransformations.hasTransformationPermalinkHash() { + // Special case for links without any content hash in the URL. + // We don't need to rebuild all pages that use this resource, + // but we want to make sure that the resource is accessed at least once. + f(identity.NewFindFirstManagerIdentityProvider(r.target.GetDependencyManager(), r.target.GetIdentityGroup())) + return + } + f(r.target.GetIdentityGroup()) + f(r.target.GetDependencyManager()) +} + +func (r *resourceAdapter) GetIdentityGroup() identity.Identity { + return r.target.GetIdentityGroup() +} + +func (r *resourceAdapter) GetDependencyManager() identity.Manager { + return r.target.GetDependencyManager() +} + func (r resourceAdapter) cloneTo(targetPath string) resource.Resource { newtTarget := r.target.cloneTo(targetPath) newInner := &resourceAdapterInner{ ctx: r.ctx, spec: r.spec, + Staler: r.Staler, target: newtTarget.(transformableResource), } if r.resourceAdapterInner.publishOnce != nil { @@ -239,12 +279,17 @@ func (r *resourceAdapter) MediaType() media.Type { func (r *resourceAdapter) Name() string { r.init(false, false) - return r.target.Name() + return r.metaProvider.Name() +} + +func (r *resourceAdapter) NameOriginal() string { + r.init(false, false) + return r.target.(resource.NameOriginalProvider).NameOriginal() } func (r *resourceAdapter) Params() maps.Params { r.init(false, false) - return r.target.Params() + return r.metaProvider.Params() } func (r *resourceAdapter) Permalink() string { @@ -283,7 +328,7 @@ func (r *resourceAdapter) String() string { func (r *resourceAdapter) Title() string { r.init(false, false) - return r.target.Title() + return r.metaProvider.Title() } func (r resourceAdapter) Transform(t ...ResourceTransformation) (ResourceTransformer, error) { @@ -298,6 +343,7 @@ func (r resourceAdapter) TransformWithContext(ctx context.Context, t ...Resource r.resourceAdapterInner = &resourceAdapterInner{ ctx: ctx, spec: r.spec, + Staler: r.Staler, publishOnce: &publishOnce{}, target: r.target, } @@ -313,6 +359,11 @@ func (r *resourceAdapter) DecodeImage() (image.Image, error) { return r.getImageOps().DecodeImage() } +func (r resourceAdapter) WithResourceMeta(mp resource.ResourceMetaProvider) resource.Resource { + r.metaProvider = mp + return &r +} + func (r *resourceAdapter) getImageOps() images.ImageResourceOps { img, ok := r.target.(images.ImageResourceOps) if !ok { @@ -326,14 +377,6 @@ func (r *resourceAdapter) getImageOps() images.ImageResourceOps { return img } -func (r *resourceAdapter) getMetaAssigner() metaAssigner { - return r.target -} - -func (r *resourceAdapter) getSpec() *Spec { - return r.spec -} - func (r *resourceAdapter) publish() { if r.publishOnce == nil { return @@ -349,41 +392,28 @@ func (r *resourceAdapter) publish() { } func (r *resourceAdapter) TransformationKey() string { - // Files with a suffix will be stored in cache (both on disk and in memory) - // partitioned by their suffix. var key string for _, tr := range r.transformations { key = key + "_" + tr.Key().Value() } - - base := ResourceCacheKey(r.target.Key()) - return r.spec.ResourceCache.cleanKey(base) + "_" + helpers.MD5String(key) + return r.spec.ResourceCache.cleanKey(r.target.Key()) + "_" + helpers.MD5String(key) } -func (r *resourceAdapter) transform(publish, setContent bool) error { - cache := r.spec.ResourceCache - +func (r *resourceAdapter) getOrTransform(publish, setContent bool) error { key := r.TransformationKey() - - cached, found := cache.get(key) - - if found { - r.resourceAdapterInner = cached.(*resourceAdapterInner) - return nil + res, err := r.spec.ResourceCache.cacheResourceTransformation.GetOrCreate(key, func(string) (*resourceAdapterInner, error) { + return r.transform(key, publish, setContent) + }) + if err != nil { + return err } - // Acquire a write lock for the named transformation. - cache.nlocker.Lock(key) - // Check the cache again. - cached, found = cache.get(key) - if found { - r.resourceAdapterInner = cached.(*resourceAdapterInner) - cache.nlocker.Unlock(key) - return nil - } + r.resourceAdapterInner = res + return nil +} - defer cache.nlocker.Unlock(key) - defer cache.set(key, r.resourceAdapterInner) +func (r *resourceAdapter) transform(key string, publish, setContent bool) (*resourceAdapterInner, error) { + cache := r.spec.ResourceCache b1 := bp.GetBuffer() b2 := bp.GetBuffer() @@ -394,6 +424,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { Ctx: r.ctx, Data: make(map[string]any), OpenResourcePublisher: r.target.openPublishFileForWriting, + DependencyManager: r.target.GetDependencyManager(), } tctx.InMediaType = r.target.MediaType() @@ -406,7 +437,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { contentrc, err := contentReadSeekerCloser(r.target) if err != nil { - return err + return nil, err } defer contentrc.Close() @@ -479,14 +510,14 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { } else { err = tr.Transform(tctx) if err != nil && err != herrors.ErrFeatureNotAvailable { - return newErr(err) + return nil, newErr(err) } if mayBeCachedOnDisk { tryFileCache = bcfg.UseResourceCache(err) } if err != nil && !tryFileCache { - return newErr(err) + return nil, newErr(err) } } @@ -494,9 +525,9 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { f := r.target.tryTransformedFileCache(key, updates) if f == nil { if err != nil { - return newErr(err) + return nil, newErr(err) } - return newErr(fmt.Errorf("resource %q not found in file cache", key)) + return nil, newErr(fmt.Errorf("resource %q not found in file cache", key)) } transformedContentr = f updates.sourceFs = cache.fileCache.Fs @@ -521,7 +552,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { if publish { publicw, err := r.target.openPublishFileForWriting(updates.targetPath) if err != nil { - return err + return nil, err } publishwriters = append(publishwriters, publicw) } @@ -531,7 +562,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { // Also write it to the cache fi, metaw, err := cache.writeMeta(key, updates.toTransformedResourceMetadata()) if err != nil { - return err + return nil, err } updates.sourceFilename = &fi.Name updates.sourceFs = cache.fileCache.Fs @@ -562,7 +593,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { publishw := hugio.NewMultiWriteCloser(publishwriters...) _, err = io.Copy(publishw, transformedContentr) if err != nil { - return err + return nil, err } publishw.Close() @@ -573,11 +604,11 @@ func (r *resourceAdapter) transform(publish, setContent bool) error { newTarget, err := r.target.cloneWithUpdates(updates) if err != nil { - return err + return nil, err } r.target = newTarget - return nil + return r.resourceAdapterInner, nil } func (r *resourceAdapter) init(publish, setContent bool) { @@ -597,7 +628,7 @@ func (r *resourceAdapter) initTransform(publish, setContent bool) { r.publishOnce = nil } - r.transformationsErr = r.transform(publish, setContent) + r.transformationsErr = r.getOrTransform(publish, setContent) if r.transformationsErr != nil { if r.spec.ErrorSender != nil { r.spec.ErrorSender.SendError(r.transformationsErr) @@ -618,24 +649,42 @@ type resourceAdapterInner struct { target transformableResource + resource.Staler + spec *Spec // Handles publishing (to /public) if needed. *publishOnce } +func (r *resourceAdapterInner) IsStale() bool { + return r.Staler.IsStale() || r.target.IsStale() +} + type resourceTransformations struct { transformationsInit sync.Once transformationsErr error transformations []ResourceTransformation } +// hasTransformationPermalinkHash reports whether any of the transformations +// in the chain creates a permalink that's based on the content, e.g. fingerprint. +func (r *resourceTransformations) hasTransformationPermalinkHash() bool { + for _, t := range r.transformations { + if constants.IsResourceTransformationPermalinkHash(t.Key().Name) { + return true + } + } + return false +} + type transformableResource interface { baseResourceInternal resource.ContentProvider resource.Resource resource.Identifier + resource.Staler resourceCopier } diff --git a/resources/transform_integration_test.go b/resources/transform_integration_test.go new file mode 100644 index 000000000..4404f1642 --- /dev/null +++ b/resources/transform_integration_test.go @@ -0,0 +1,50 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources_test + +import ( + "testing" + + "github.com/gohugoio/hugo/hugolib" +) + +func TestTransformCached(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +-- assets/css/main.css -- +body { + background: #fff; +} +-- content/p1.md -- +--- +title: "P1" +--- +P1. +-- content/p2.md -- +--- +title: "P2" +--- +P2. +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +{{ $css := resources.Get "css/main.css" | resources.Minify }} +CSS: {{ $css.Content }} +` + + b := hugolib.Test(t, files) + + b.AssertFileContent("public/p1/index.html", "CSS: body{background:#fff}") +} diff --git a/resources/transform_test.go b/resources/transform_test.go index d430bfb6c..fd152a47c 100644 --- a/resources/transform_test.go +++ b/resources/transform_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -25,10 +25,12 @@ import ( "testing" "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/resources/images" @@ -47,12 +49,13 @@ const gopher = `iVBORw0KGgoAAAANSUhEUgAAAEsAAAA8CAAAAAALAhhPAAAFfUlEQVRYw62XeWwU func gopherPNG() io.Reader { return base64.NewDecoder(base64.StdEncoding, strings.NewReader(gopher)) } func TestTransform(t *testing.T) { - createTransformer := func(c *qt.C, spec *resources.Spec, filename, content string) resources.Transformer { - filename = filepath.FromSlash(filename) - err := afero.WriteFile(spec.Fs.Source, filepath.Join("assets", filename), []byte(content), 0777) - c.Assert(err, qt.IsNil) - r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: filename}) + targetPath := identity.CleanString(filename) + r, err := spec.NewResource(resources.ResourceSourceDescriptor{ + TargetPath: targetPath, + OpenReadSeekCloser: hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString(content)), + GroupIdentity: identity.StringIdentity(targetPath), + }) c.Assert(err, qt.IsNil) c.Assert(r, qt.Not(qt.IsNil), qt.Commentf(filename)) return r.(resources.Transformer) @@ -310,8 +313,10 @@ func TestTransform(t *testing.T) { r := createTransformer(c, spec, "f1.txt", "color is blue") - tr1, _ := r.Transform(t1) - tr2, _ := tr1.Transform(t2) + tr1, err := r.Transform(t1) + c.Assert(err, qt.IsNil) + tr2, err := tr1.Transform(t2) + c.Assert(err, qt.IsNil) content1, err := tr1.(resource.ContentProvider).Content(context.Background()) c.Assert(err, qt.IsNil) diff --git a/scripts/fork_go_templates/main.go b/scripts/fork_go_templates/main.go index 8e14813ec..5b9262c0a 100644 --- a/scripts/fork_go_templates/main.go +++ b/scripts/fork_go_templates/main.go @@ -168,6 +168,9 @@ func doWithGoFiles(dir string, return } must(filepath.Walk(filepath.Join(forkRoot, dir), func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } if info.IsDir() { return nil } diff --git a/source/content_directory_test.go b/source/content_directory_test.go index 7d1630529..96ee22bc7 100644 --- a/source/content_directory_test.go +++ b/source/content_directory_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/source/fileInfo.go b/source/fileInfo.go index 60c6e6ea8..5b24bbeb2 100644 --- a/source/fileInfo.go +++ b/source/fileInfo.go @@ -14,9 +14,7 @@ package source import ( - "fmt" "path/filepath" - "strings" "sync" "time" @@ -24,8 +22,6 @@ import ( "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/paths" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/hugofs" @@ -33,269 +29,121 @@ import ( "github.com/gohugoio/hugo/helpers" ) -// fileInfo implements the File interface. -var ( - _ File = (*FileInfo)(nil) -) - -// File represents a source file. -// This is a temporary construct until we resolve page.Page conflicts. -// TODO(bep) remove this construct once we have resolved page deprecations -type File interface { - fileOverlap - FileWithoutOverlap -} - -// Temporary to solve duplicate/deprecated names in page.Page -type fileOverlap interface { - // Path gets the relative path including file name and extension. - // The directory is relative to the content root. - Path() string - - // Section is first directory below the content root. - // For page bundles in root, the Section will be empty. - Section() string - - // Lang is the language code for this page. It will be the - // same as the site's language code. - Lang() string - - IsZero() bool -} - -type FileWithoutOverlap interface { - // Filename gets the full path and filename to the file. - Filename() string - - // Dir gets the name of the directory that contains this file. - // The directory is relative to the content root. - Dir() string - - // Extension is an alias to Ext(). - // Deprecated: Use Ext instead. - Extension() string - - // Ext gets the file extension, i.e "myblogpost.md" will return "md". - Ext() string - - // LogicalName is filename and extension of the file. - LogicalName() string - - // BaseFileName is a filename without extension. - BaseFileName() string - - // TranslationBaseName is a filename with no extension, - // not even the optional language extension part. - TranslationBaseName() string - - // ContentBaseName is a either TranslationBaseName or name of containing folder - // if file is a leaf bundle. - ContentBaseName() string - - // UniqueID is the MD5 hash of the file's path and is for most practical applications, - // Hugo content files being one of them, considered to be unique. - UniqueID() string - - // For internal use only. - FileInfo() hugofs.FileMetaInfo -} - -// FileInfo describes a source file. -type FileInfo struct { - // Absolute filename to the file on disk. - filename string - - sp *SourceSpec - - fi hugofs.FileMetaInfo - - // Derived from filename - ext string // Extension without any "." - lang string - - name string - - dir string - relDir string - relPath string - baseName string - translationBaseName string - contentBaseName string - section string - classifier files.ContentClass +// File describes a source file. +type File struct { + fim hugofs.FileMetaInfo uniqueID string - lazyInit sync.Once } // Filename returns a file's absolute path and filename on disk. -func (fi *FileInfo) Filename() string { return fi.filename } +func (fi *File) Filename() string { return fi.fim.Meta().Filename } // Path gets the relative path including file name and extension. The directory // is relative to the content root. -func (fi *FileInfo) Path() string { return fi.relPath } +func (fi *File) Path() string { return filepath.Join(fi.p().Dir()[1:], fi.p().Name()) } // Dir gets the name of the directory that contains this file. The directory is // relative to the content root. -func (fi *FileInfo) Dir() string { return fi.relDir } +func (fi *File) Dir() string { + return fi.pathToDir(fi.p().Dir()) +} // Extension is an alias to Ext(). -func (fi *FileInfo) Extension() string { +func (fi *File) Extension() string { hugo.Deprecate(".File.Extension", "Use .File.Ext instead.", "v0.96.0") return fi.Ext() } -// Ext returns a file's extension without the leading period (ie. "md"). -func (fi *FileInfo) Ext() string { return fi.ext } +// Ext returns a file's extension without the leading period (e.g. "md"). +// Deprecated: Use Extension() instead. +func (fi *File) Ext() string { return fi.p().Ext() } -// Lang returns a file's language (ie. "sv"). -func (fi *FileInfo) Lang() string { return fi.lang } +// Lang returns a file's language (e.g. "sv"). +func (fi *File) Lang() string { + return fi.fim.Meta().Lang +} -// LogicalName returns a file's name and extension (ie. "page.sv.md"). -func (fi *FileInfo) LogicalName() string { return fi.name } +// LogicalName returns a file's name and extension (e.g. "page.sv.md"). +func (fi *File) LogicalName() string { + return fi.p().Name() +} -// BaseFileName returns a file's name without extension (ie. "page.sv"). -func (fi *FileInfo) BaseFileName() string { return fi.baseName } +// BaseFileName returns a file's name without extension (e.g. "page.sv"). +func (fi *File) BaseFileName() string { + return fi.p().NameNoExt() +} // TranslationBaseName returns a file's translation base name without the -// language segment (ie. "page"). -func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName } +// language segment (e.g. "page"). +func (fi *File) TranslationBaseName() string { return fi.p().NameNoIdentifier() } // ContentBaseName is a either TranslationBaseName or name of containing folder -// if file is a leaf bundle. -func (fi *FileInfo) ContentBaseName() string { - fi.init() - return fi.contentBaseName +// if file is a bundle. +func (fi *File) ContentBaseName() string { + return fi.p().BaseNameNoIdentifier() } // Section returns a file's section. -func (fi *FileInfo) Section() string { - fi.init() - return fi.section +func (fi *File) Section() string { + return fi.p().Section() } // UniqueID returns a file's unique, MD5 hash identifier. -func (fi *FileInfo) UniqueID() string { +func (fi *File) UniqueID() string { fi.init() return fi.uniqueID } // FileInfo returns a file's underlying os.FileInfo. -// For internal use only. -func (fi *FileInfo) FileInfo() hugofs.FileMetaInfo { return fi.fi } +func (fi *File) FileInfo() hugofs.FileMetaInfo { return fi.fim } -func (fi *FileInfo) String() string { return fi.BaseFileName() } +func (fi *File) String() string { return fi.BaseFileName() } // Open implements ReadableFile. -func (fi *FileInfo) Open() (hugio.ReadSeekCloser, error) { - f, err := fi.fi.Meta().Open() +func (fi *File) Open() (hugio.ReadSeekCloser, error) { + f, err := fi.fim.Meta().Open() return f, err } -func (fi *FileInfo) IsZero() bool { +func (fi *File) IsZero() bool { return fi == nil } // We create a lot of these FileInfo objects, but there are parts of it used only // in some cases that is slightly expensive to construct. -func (fi *FileInfo) init() { +func (fi *File) init() { fi.lazyInit.Do(func() { - relDir := strings.Trim(fi.relDir, helpers.FilePathSeparator) - parts := strings.Split(relDir, helpers.FilePathSeparator) - var section string - if (fi.classifier != files.ContentClassLeaf && len(parts) == 1) || len(parts) > 1 { - section = parts[0] - } - fi.section = section - - if fi.classifier.IsBundle() && len(parts) > 0 { - fi.contentBaseName = parts[len(parts)-1] - } else { - fi.contentBaseName = fi.translationBaseName - } - - fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.relPath)) + fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.Path())) }) } -// NewTestFile creates a partially filled File used in unit tests. -// TODO(bep) improve this package -func NewTestFile(filename string) *FileInfo { - base := filepath.Base(filepath.Dir(filename)) - return &FileInfo{ - filename: filename, - translationBaseName: base, +func (fi *File) pathToDir(s string) string { + if s == "" { + return s } + return filepath.FromSlash(s[1:] + "/") } -func (sp *SourceSpec) NewFileInfoFrom(path, filename string) (*FileInfo, error) { +func (fi *File) p() *paths.Path { + return fi.fim.Meta().PathInfo +} + +func NewFileInfoFrom(path, filename string) *File { meta := &hugofs.FileMeta{ Filename: filename, - Path: path, + PathInfo: paths.Parse("", filepath.ToSlash(path)), } - return sp.NewFileInfo(hugofs.NewFileMetaInfo(nil, meta)) + return NewFileInfo(hugofs.NewFileMetaInfo(nil, meta)) } -func (sp *SourceSpec) NewFileInfo(fi hugofs.FileMetaInfo) (*FileInfo, error) { - m := fi.Meta() - - filename := m.Filename - relPath := m.Path - - if relPath == "" { - return nil, fmt.Errorf("no Path provided by %v (%T)", m, m.Fs) - } - - if filename == "" { - return nil, fmt.Errorf("no Filename provided by %v (%T)", m, m.Fs) +func NewFileInfo(fi hugofs.FileMetaInfo) *File { + return &File{ + fim: fi, } - - relDir := filepath.Dir(relPath) - if relDir == "." { - relDir = "" - } - if !strings.HasSuffix(relDir, helpers.FilePathSeparator) { - relDir = relDir + helpers.FilePathSeparator - } - - lang := m.Lang - translationBaseName := m.TranslationBaseName - - dir, name := filepath.Split(relPath) - if !strings.HasSuffix(dir, helpers.FilePathSeparator) { - dir = dir + helpers.FilePathSeparator - } - - ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), ".")) - baseName := paths.Filename(name) - - if translationBaseName == "" { - // This is usually provided by the filesystem. But this FileInfo is also - // created in a standalone context when doing "hugo new". This is - // an approximate implementation, which is "good enough" in that case. - fileLangExt := filepath.Ext(baseName) - translationBaseName = strings.TrimSuffix(baseName, fileLangExt) - } - - f := &FileInfo{ - sp: sp, - filename: filename, - fi: fi, - lang: lang, - ext: ext, - dir: dir, - relDir: relDir, // Dir() - relPath: relPath, // Path() - name: name, - baseName: baseName, // BaseFileName() - translationBaseName: translationBaseName, - classifier: m.Classifier, - } - - return f, nil } func NewGitInfo(info gitmap.GitInfo) GitInfo { diff --git a/source/fileInfo_test.go b/source/fileInfo_test.go deleted file mode 100644 index e2a3edd30..000000000 --- a/source/fileInfo_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2017-present The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package source_test - -import ( - "path/filepath" - "strings" - "testing" - - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/source" -) - -func TestFileInfo(t *testing.T) { - c := qt.New(t) - - s := newTestSourceSpec() - - for _, this := range []struct { - base string - filename string - assert func(f *source.FileInfo) - }{ - {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/page.md"), func(f *source.FileInfo) { - c.Assert(f.Filename(), qt.Equals, filepath.FromSlash("/a/b/page.md")) - c.Assert(f.Dir(), qt.Equals, filepath.FromSlash("b/")) - c.Assert(f.Path(), qt.Equals, filepath.FromSlash("b/page.md")) - c.Assert(f.Section(), qt.Equals, "b") - c.Assert(f.TranslationBaseName(), qt.Equals, filepath.FromSlash("page")) - c.Assert(f.BaseFileName(), qt.Equals, filepath.FromSlash("page")) - }}, - {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/c/d/page.md"), func(f *source.FileInfo) { - c.Assert(f.Section(), qt.Equals, "b") - }}, - {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/page.en.MD"), func(f *source.FileInfo) { - c.Assert(f.Section(), qt.Equals, "b") - c.Assert(f.Path(), qt.Equals, filepath.FromSlash("b/page.en.MD")) - c.Assert(f.TranslationBaseName(), qt.Equals, filepath.FromSlash("page")) - c.Assert(f.BaseFileName(), qt.Equals, filepath.FromSlash("page.en")) - }}, - } { - path := strings.TrimPrefix(this.filename, this.base) - f, err := s.NewFileInfoFrom(path, this.filename) - c.Assert(err, qt.IsNil) - this.assert(f) - } -} diff --git a/source/filesystem.go b/source/filesystem.go index 283863dbf..208f5036f 100644 --- a/source/filesystem.go +++ b/source/filesystem.go @@ -14,66 +14,27 @@ package source import ( - "fmt" "path/filepath" - "sync" "github.com/gohugoio/hugo/hugofs" + "github.com/spf13/afero" ) // Filesystem represents a source filesystem. type Filesystem struct { - files []File - filesInit sync.Once - filesInitErr error - Base string - - fi hugofs.FileMetaInfo - + fs afero.Fs + fi hugofs.FileMetaInfo SourceSpec } -// NewFilesystem returns a new filesystem for a given source spec. +// NewFilesystem returns a new filesytem for a given source spec. func (sp SourceSpec) NewFilesystem(base string) *Filesystem { - return &Filesystem{SourceSpec: sp, Base: base} + return &Filesystem{SourceSpec: sp, Base: base, fs: sp.Fs.Source} } -func (sp SourceSpec) NewFilesystemFromFileMetaInfo(fi hugofs.FileMetaInfo) *Filesystem { - return &Filesystem{SourceSpec: sp, fi: fi} -} - -// Files returns a slice of readable files. -func (f *Filesystem) Files() ([]File, error) { - f.filesInit.Do(func() { - err := f.captureFiles() - if err != nil { - f.filesInitErr = fmt.Errorf("capture files: %w", err) - } - }) - return f.files, f.filesInitErr -} - -// add populates a file in the Filesystem.files -func (f *Filesystem) add(name string, fi hugofs.FileMetaInfo) (err error) { - var file File - - file, err = f.SourceSpec.NewFileInfo(fi) - if err != nil { - return err - } - - f.files = append(f.files, file) - - return err -} - -func (f *Filesystem) captureFiles() error { - walker := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } - +func (f *Filesystem) Walk(addFile func(*File) error) error { + walker := func(path string, fi hugofs.FileMetaInfo) error { if fi.IsDir() { return nil } @@ -87,14 +48,16 @@ func (f *Filesystem) captureFiles() error { } if b { - err = f.add(filename, fi) + if err = addFile(NewFileInfo(fi)); err != nil { + return err + } } return err } w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Fs: f.SourceFs, + Fs: f.fs, Info: f.fi, Root: f.Base, WalkFn: walker, diff --git a/source/filesystem_test.go b/source/filesystem_test.go index 1067d5839..9118285da 100644 --- a/source/filesystem_test.go +++ b/source/filesystem_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,71 +14,35 @@ package source_test import ( - "fmt" - "path/filepath" "runtime" "testing" - "github.com/spf13/afero" - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/hugolib" + "golang.org/x/text/unicode/norm" ) -func TestEmptySourceFilesystem(t *testing.T) { - c := qt.New(t) - ss := newTestSourceSpec() - src := ss.NewFilesystem("") - files, err := src.Files() - c.Assert(err, qt.IsNil) - if len(files) != 0 { - t.Errorf("new filesystem should contain 0 files.") - } -} - func TestUnicodeNorm(t *testing.T) { if runtime.GOOS != "darwin" { - // Normalization code is only for Mac OS, since it is not necessary for other OSes. - return - } - - c := qt.New(t) - - paths := []struct { - NFC string - NFD string - }{ - {NFC: "å", NFD: "\x61\xcc\x8a"}, - {NFC: "é", NFD: "\x65\xcc\x81"}, - } - - ss := newTestSourceSpec() - - for i, path := range paths { - base := fmt.Sprintf("base%d", i) - c.Assert(afero.WriteFile(ss.Fs.Source, filepath.Join(base, path.NFD), []byte("some data"), 0777), qt.IsNil) - src := ss.NewFilesystem(base) - files, err := src.Files() - c.Assert(err, qt.IsNil) - f := files[0] - if f.BaseFileName() != path.NFC { - t.Fatalf("file %q name in NFD form should be normalized (%s)", f.BaseFileName(), path.NFC) - } + t.Skip("Skipping test on non-Darwin OS") } -} - -func newTestSourceSpec() *source.SourceSpec { - v := config.New() - afs := hugofs.NewBaseFileDecorator(afero.NewMemMapFs()) - conf := testconfig.GetTestConfig(afs, v) - fs := hugofs.NewFrom(afs, conf.BaseConfig()) - ps, err := helpers.NewPathSpec(fs, conf, nil) - if err != nil { - panic(err) + t.Parallel() + files := ` +-- hugo.toml -- +-- content/å.md -- +-- content/é.md -- +-- content/å/å.md -- +-- content/é/é.md -- +-- layouts/_default/single.html -- +Title: {{ .Title }}|File: {{ .File.Path}} +` + b := hugolib.Test(t, files, hugolib.TestOptWithNFDOnDarwin()) + + for _, p := range b.H.Sites[0].RegularPages() { + f := p.File() + b.Assert(norm.NFC.IsNormalString(f.Path()), qt.IsTrue) + b.Assert(norm.NFC.IsNormalString(f.Dir()), qt.IsTrue) + b.Assert(norm.NFC.IsNormalString(f.Filename()), qt.IsTrue) + b.Assert(norm.NFC.IsNormalString(f.BaseFileName()), qt.IsTrue) } - return source.NewSourceSpec(ps, nil, fs.Source) } diff --git a/source/sourceSpec.go b/source/sourceSpec.go index dc44994a8..ea1b977f3 100644 --- a/source/sourceSpec.go +++ b/source/sourceSpec.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,7 +15,6 @@ package source import ( - "os" "path/filepath" "runtime" @@ -38,7 +37,6 @@ type SourceSpec struct { // NewSourceSpec initializes SourceSpec using languages the given filesystem and PathSpec. func NewSourceSpec(ps *helpers.PathSpec, inclusionFilter *glob.FilenameFilter, fs afero.Fs) *SourceSpec { - shouldInclude := func(filename string) bool { if !inclusionFilter.Match(filename, false) { return false @@ -90,34 +88,3 @@ func (s *SourceSpec) IgnoreFile(filename string) bool { return false } - -// IsRegularSourceFile returns whether filename represents a regular file in the -// source filesystem. -func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) { - fi, err := helpers.LstatIfPossible(s.SourceFs, filename) - if err != nil { - return false, err - } - - if fi.IsDir() { - return false, nil - } - - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - link, err := filepath.EvalSymlinks(filename) - if err != nil { - return false, err - } - - fi, err = helpers.LstatIfPossible(s.SourceFs, link) - if err != nil { - return false, err - } - - if fi.IsDir() { - return false, nil - } - } - - return true, nil -} diff --git a/testscripts/commands/hugo__processingstats.txt b/testscripts/commands/hugo__processingstats.txt index 0e700b607..3d30b8155 100644 --- a/testscripts/commands/hugo__processingstats.txt +++ b/testscripts/commands/hugo__processingstats.txt @@ -1,17 +1,32 @@ cp $SOURCE/resources/testdata/pix.gif content/en/bundle1/pix.gif cp $SOURCE/resources/testdata/pix.gif content/en/bundle2/pix.gif cp $SOURCE/resources/testdata/pix.gif content/fr/bundle1/pix.gif +mkdir static/images +cp $SOURCE/resources/testdata/pix.gif static/images/p1.gif +cp $SOURCE/resources/testdata/pix.gif static/images/p2.gif +cp $SOURCE/resources/testdata/pix.gif static/images/p3.gif +cp $SOURCE/resources/testdata/pix.gif static/images/p4.gif + hugo stdout 'Pages.*3.*2' stdout 'Processed images.*2.*1' +stdout 'Static files.*4 |' + +ls public/images +stdout 'p1.gif' +stdout 'p2.gif' +stdout 'p3.gif' +stdout 'p4.gif' -- content/en/bundle1/index.md -- -- content/en/bundle2/index.md -- -- content/fr/bundle1/index.md -- -- hugo.toml -- disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT", "404"] +defaultLanguage = "en" +defaultLanguageInSubdir = true baseURL = "https://example.com/" [languages] [languages.en] diff --git a/testscripts/commands/hugo__processingstats2.txt b/testscripts/commands/hugo__processingstats2.txt new file mode 100644 index 000000000..2f8226faa --- /dev/null +++ b/testscripts/commands/hugo__processingstats2.txt @@ -0,0 +1,16 @@ +cp $SOURCE/resources/testdata/pix.gif content/posts/post-1/p1.gif +cp $SOURCE/resources/testdata/pix.gif content/posts/post-1/p2.gif + +hugo + +stdout 'Pages.*/| 10\s' +stdout 'Non-page files.*/| 2\s' + +-- content/posts/post-1/index.md -- +-- hugo.toml -- +baseURL = "https://example.com/" +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +Single. + diff --git a/testscripts/commands/mod_npm.txt b/testscripts/commands/mod_npm.txt index fb0aa38c8..32cc37f06 100644 --- a/testscripts/commands/mod_npm.txt +++ b/testscripts/commands/mod_npm.txt @@ -1,23 +1,43 @@ # Test mod npm. +dostounix golden/package.json + hugo mod npm pack -cmp package.hugo.json golden/package.hugo.json +cmp package.json golden/package.json -- hugo.toml -- baseURL = "https://example.org/" --- package.json -- -{ - "name": "test", - "version": "1.0.0", - "dependencies": { - "mod": "foo-bar" - } -} --- golden/package.hugo.json -- +[module] +[[module.imports]] +path="github.com/gohugoio/hugoTestModule2" + + +-- golden/package.json -- { - "name": "test", - "version": "1.0.0", + "comments": { + "dependencies": { + "react-dom": "github.com/gohugoio/hugoTestModule2" + }, + "devDependencies": { + "@babel/cli": "github.com/gohugoio/hugoTestModule2", + "@babel/core": "github.com/gohugoio/hugoTestModule2", + "@babel/preset-env": "github.com/gohugoio/hugoTestModule2", + "postcss-cli": "github.com/gohugoio/hugoTestModule2", + "tailwindcss": "github.com/gohugoio/hugoTestModule2" + } + }, "dependencies": { - "mod": "foo-bar" - } + "react-dom": "^16.13.1" + }, + "devDependencies": { + "@babel/cli": "7.8.4", + "@babel/core": "7.9.0", + "@babel/preset-env": "7.9.5", + "postcss-cli": "7.1.0", + "tailwindcss": "1.2.0" + }, + "name": "script-mod_npm", + "version": "0.1.0" } +-- go.mod -- +module github.com/gohugoio/hugoTestModule diff --git a/testscripts/commands/mod_npm_withexisting.txt b/testscripts/commands/mod_npm_withexisting.txt new file mode 100644 index 000000000..e92eba3fd --- /dev/null +++ b/testscripts/commands/mod_npm_withexisting.txt @@ -0,0 +1,57 @@ +# Test mod npm. + +dostounix golden/package.json + +hugo mod npm pack +cmp package.json golden/package.json + +-- hugo.toml -- +baseURL = "https://example.org/" +[module] +[[module.imports]] +path="github.com/gohugoio/hugoTestModule2" +-- package.json -- +{ + "comments": { + "foo": { + "a": "b" + } + }, + "devDependencies": { + "tailwindcss": "2.2.0" + }, + "name": "mypackage", + "version": "1.1.0" +} +-- golden/package.json -- +{ + "comments": { + "dependencies": { + "react-dom": "github.com/gohugoio/hugoTestModule2" + }, + "devDependencies": { + "@babel/cli": "github.com/gohugoio/hugoTestModule2", + "@babel/core": "github.com/gohugoio/hugoTestModule2", + "@babel/preset-env": "github.com/gohugoio/hugoTestModule2", + "postcss-cli": "github.com/gohugoio/hugoTestModule2", + "tailwindcss": "project" + }, + "foo": { + "a": "b" + } + }, + "dependencies": { + "react-dom": "^16.13.1" + }, + "devDependencies": { + "@babel/cli": "7.8.4", + "@babel/core": "7.9.0", + "@babel/preset-env": "7.9.5", + "postcss-cli": "7.1.0", + "tailwindcss": "2.2.0" + }, + "name": "mypackage", + "version": "1.1.0" +} +-- go.mod -- +module github.com/gohugoio/hugoTestModule diff --git a/testscripts/commands/new_content_archetypedir.txt b/testscripts/commands/new_content_archetypedir.txt new file mode 100644 index 000000000..ccd85c999 --- /dev/null +++ b/testscripts/commands/new_content_archetypedir.txt @@ -0,0 +1,40 @@ +mkdir content +hugo new content --kind mybundle post/first-post +grep 'First Post' content/post/first-post/index.md +grep 'Site Lang: en' content/post/first-post/index.md +grep 'Site Lang: no' content/post/first-post/index.no.md +grep 'A text file.' content/post/first-post/file.txt + +-- hugo.toml -- +baseURL = "http://example.org/" +[languages] +[languages.en] +languageName = "English" +weight = 1 +[languages.no] +languageName = "Norsk" +weight = 2 + +-- archetypes/mybundle/index.md -- +--- +title: "{{ replace .Name "-" " " | title }}" +date: {{ .Date }} +draft: true +--- + +Site Lang: {{ site.Language.Lang }}. +-- archetypes/mybundle/index.no.md -- +--- +title: "{{ replace .Name "-" " " | title }}" +date: {{ .Date }} +draft: true +--- + +Site Lang: {{ site.Language.Lang }}. + +-- archetypes/mybundle/file.txt -- +A text file. + + + + diff --git a/testscripts/commands/server.txt b/testscripts/commands/server.txt index fd6b200bc..777a91454 100644 --- a/testscripts/commands/server.txt +++ b/testscripts/commands/server.txt @@ -7,6 +7,7 @@ waitServer httpget $HUGOTEST_BASEURL_0 'Title: Hugo Server Test' $HUGOTEST_BASEURL_0 'ServerPort: \d{4,5}' 'myenv: thedevelopment' 'livereload\.js' 'Env: development' 'IsServer: true' httpget ${HUGOTEST_BASEURL_0}doesnotexist 'custom 404' +httpget ${HUGOTEST_BASEURL_0}livereload.js 'function' # By defauilt, the server renders to memory. ! exists public/index.html diff --git a/tpl/collections/apply.go b/tpl/collections/apply.go index 397ba0fdb..3d50395b9 100644 --- a/tpl/collections/apply.go +++ b/tpl/collections/apply.go @@ -67,7 +67,7 @@ func (ns *Namespace) Apply(ctx context.Context, c any, fname string, args ...any func applyFnToThis(ctx context.Context, fn, this reflect.Value, args ...any) (reflect.Value, error) { num := fn.Type().NumIn() - if num > 0 && fn.Type().In(0).Implements(hreflect.ContextInterface) { + if num > 0 && hreflect.IsContextType(fn.Type().In(0)) { args = append([]any{ctx}, args...) } diff --git a/tpl/collections/apply_test.go b/tpl/collections/apply_test.go index aa39923b7..0a5764264 100644 --- a/tpl/collections/apply_test.go +++ b/tpl/collections/apply_test.go @@ -22,6 +22,7 @@ import ( qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/config/testconfig" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output/layouts" "github.com/gohugoio/hugo/tpl" @@ -29,6 +30,10 @@ import ( type templateFinder int +func (templateFinder) GetIdentity(string) (identity.Identity, bool) { + return identity.StringIdentity("test"), true +} + func (templateFinder) Lookup(name string) (tpl.Template, bool) { return nil, false } diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go index e34753f17..61fd138e9 100644 --- a/tpl/collections/collections.go +++ b/tpl/collections/collections.go @@ -35,11 +35,6 @@ import ( "github.com/spf13/cast" ) -func init() { - // htime.Now cannot be used here - rand.Seed(time.Now().UTC().UnixNano()) -} - // New returns a new instance of the collections-namespaced template functions. func New(deps *deps.Deps) *Namespace { language := deps.Conf.Language() @@ -149,7 +144,7 @@ func (ns *Namespace) Delimit(ctx context.Context, l, sep any, last ...any) (stri } default: - return "", fmt.Errorf("can't iterate over %v", l) + return "", fmt.Errorf("can't iterate over %T", l) } return str, nil diff --git a/tpl/collections/collections_test.go b/tpl/collections/collections_test.go index dcdd3bd5c..7dd518759 100644 --- a/tpl/collections/collections_test.go +++ b/tpl/collections/collections_test.go @@ -699,7 +699,6 @@ func TestShuffleRandomising(t *testing.T) { // of the sequence happens to be the same as the original sequence. However // the probability of the event is 10^-158 which is negligible. seqLen := 100 - rand.Seed(time.Now().UTC().UnixNano()) for _, test := range []struct { seq []int @@ -895,6 +894,7 @@ func (x TstX) TstRv2() string { return "r" + x.B } +//lint:ignore U1000 reflect test func (x TstX) unexportedMethod() string { return x.unexported } @@ -923,7 +923,7 @@ func (x TstX) String() string { type TstX struct { A, B string - unexported string + unexported string //lint:ignore U1000 reflect test } type TstParams struct { diff --git a/tpl/collections/integration_test.go b/tpl/collections/integration_test.go index a443755f8..24727a12c 100644 --- a/tpl/collections/integration_test.go +++ b/tpl/collections/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -97,11 +97,9 @@ func TestAppendSliceToASliceOfSlices(t *testing.T) { ).Build() b.AssertFileContent("public/index.html", "[[a] [b] [c]]") - } func TestAppendNilToSlice(t *testing.T) { - t.Parallel() files := ` @@ -123,11 +121,9 @@ func TestAppendNilToSlice(t *testing.T) { ).Build() b.AssertFileContent("public/index.html", "[a <nil>]") - } func TestAppendNilsToSliceWithNils(t *testing.T) { - t.Parallel() files := ` @@ -153,7 +149,6 @@ func TestAppendNilsToSliceWithNils(t *testing.T) { b.AssertFileContent("public/index.html", "[a <nil> c <nil>]") } - } // Issue 11234. diff --git a/tpl/collections/where.go b/tpl/collections/where.go index 07c2d3deb..bf3f75044 100644 --- a/tpl/collections/where.go +++ b/tpl/collections/where.go @@ -51,7 +51,7 @@ func (ns *Namespace) Where(ctx context.Context, c, key any, args ...any) (any, e case reflect.Map: return ns.checkWhereMap(ctxv, seqv, kv, mv, path, op) default: - return nil, fmt.Errorf("can't iterate over %v", c) + return nil, fmt.Errorf("can't iterate over %T", c) } } @@ -320,7 +320,7 @@ func evaluateSubElem(ctx, obj reflect.Value, elemName string) (reflect.Value, er mt := objPtr.Type().Method(index) num := mt.Type.NumIn() maxNumIn := 1 - if num > 1 && mt.Type.In(1).Implements(hreflect.ContextInterface) { + if num > 1 && hreflect.IsContextType(mt.Type.In(1)) { args = []reflect.Value{ctx} maxNumIn = 2 } diff --git a/tpl/data/data.go b/tpl/data/data.go index 380c25685..b6b0515e8 100644 --- a/tpl/data/data.go +++ b/tpl/data/data.go @@ -24,6 +24,7 @@ import ( "net/http" "strings" + "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/config/security" @@ -33,7 +34,6 @@ import ( "github.com/spf13/cast" - "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/deps" ) @@ -108,7 +108,7 @@ func (ns *Namespace) GetJSON(args ...any) (any, error) { req, err := http.NewRequest("GET", url, nil) if err != nil { - return nil, fmt.Errorf("Failed to create request for getJSON resource %s: %w", url, err) + return nil, fmt.Errorf("failed to create request for getJSON resource %s: %w", url, err) } unmarshal := func(b []byte) (bool, error) { diff --git a/tpl/data/resources.go b/tpl/data/resources.go index 45764dae7..3a3701d60 100644 --- a/tpl/data/resources.go +++ b/tpl/data/resources.go @@ -23,7 +23,6 @@ import ( "time" "github.com/gohugoio/hugo/cache/filecache" - "github.com/gohugoio/hugo/helpers" "github.com/spf13/afero" ) @@ -68,7 +67,7 @@ func (ns *Namespace) getRemote(cache *filecache.Cache, unmarshal func([]byte) (b res.Body.Close() if isHTTPError(res) { - return nil, fmt.Errorf("Failed to retrieve remote file: %s, body: %q", http.StatusText(res.StatusCode), b) + return nil, fmt.Errorf("failed to retrieve remote file: %s, body: %q", http.StatusText(res.StatusCode), b) } retry, err = unmarshal(b) diff --git a/tpl/data/resources_test.go b/tpl/data/resources_test.go index d452a2a43..b8003bf43 100644 --- a/tpl/data/resources_test.go +++ b/tpl/data/resources_test.go @@ -15,9 +15,6 @@ package data import ( "bytes" - - "github.com/gohugoio/hugo/common/loggers" - "net/http" "net/http/httptest" "net/url" @@ -26,12 +23,14 @@ import ( "testing" "time" + "github.com/gohugoio/hugo/cache/filecache" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/helpers" qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/hugofs" diff --git a/tpl/debug/integration_test.go b/tpl/debug/integration_test.go index 3d120580d..9a36e2d12 100644 --- a/tpl/debug/integration_test.go +++ b/tpl/debug/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -41,5 +41,5 @@ disableKinds = ["taxonomy", "term"] }, ).Build() - b.AssertLogContains("imer: name \"foo\" count '\\x05' duration") + b.AssertLogContains("timer: name foo count 5 duration") } diff --git a/tpl/diagrams/diagrams.go b/tpl/diagrams/diagrams.go index dfa29a978..6a58bcfe4 100644 --- a/tpl/diagrams/diagrams.go +++ b/tpl/diagrams/diagrams.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/diagrams/goat.go b/tpl/diagrams/goat.go index f3d4f4bfb..fe156f1e8 100644 --- a/tpl/diagrams/goat.go +++ b/tpl/diagrams/goat.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/diagrams/init.go b/tpl/diagrams/init.go index e6356ce9c..0cbec7e1b 100644 --- a/tpl/diagrams/init.go +++ b/tpl/diagrams/init.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/fmt/integration_test.go b/tpl/fmt/integration_test.go index 5010fa90e..40bfefcdc 100644 --- a/tpl/fmt/integration_test.go +++ b/tpl/fmt/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -41,5 +41,4 @@ ignoreErrors = ['error-b'] b.BuildE() b.AssertLogMatches(`^ERROR a\nYou can suppress this error by adding the following to your site configuration:\nignoreErrors = \['error-a'\]\n$`) - } diff --git a/tpl/images/integration_test.go b/tpl/images/integration_test.go index ad810ad92..81f35e39c 100644 --- a/tpl/images/integration_test.go +++ b/tpl/images/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/internal/go_templates/staticcheck.conf b/tpl/internal/go_templates/staticcheck.conf new file mode 100644 index 000000000..9cf5a78a4 --- /dev/null +++ b/tpl/internal/go_templates/staticcheck.conf @@ -0,0 +1 @@ +checks = ["none"]
\ No newline at end of file diff --git a/tpl/internal/go_templates/texttemplate/hugo_template.go b/tpl/internal/go_templates/texttemplate/hugo_template.go index 78be55e18..4db40ce82 100644 --- a/tpl/internal/go_templates/texttemplate/hugo_template.go +++ b/tpl/internal/go_templates/texttemplate/hugo_template.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -59,23 +59,6 @@ func NewExecuter(helper ExecHelper) Executer { return &executer{helper: helper} } -type ( - pageContextKeyType string - hasLockContextKeyType string - stackContextKeyType string - callbackContextKeyType string -) - -const ( - // The data page passed to ExecuteWithContext gets stored with this key. - PageContextKey = pageContextKeyType("page") - // Used in partialCached to signal to nested templates that a lock is already taken. - HasLockContextKey = hasLockContextKeyType("hasLock") - - // Used to pass down a callback function to nested templates. - CallbackContextKey = callbackContextKeyType("callback") -) - // Note: The context is currently not fully implemented in Hugo. This is a work in progress. func (t *executer) ExecuteWithContext(ctx context.Context, p Preparer, wr io.Writer, data any) error { if ctx == nil { diff --git a/tpl/internal/go_templates/texttemplate/hugo_template_test.go b/tpl/internal/go_templates/texttemplate/hugo_template_test.go index cc88151e3..c68b747dd 100644 --- a/tpl/internal/go_templates/texttemplate/hugo_template_test.go +++ b/tpl/internal/go_templates/texttemplate/hugo_template_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -56,7 +56,7 @@ func (e *execHelper) GetMapValue(ctx context.Context, tmpl Preparer, m, key refl return m.MapIndex(key), true } -func (e *execHelper) GetMethod(ctx context.Context, tmpl Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value) { +func (e *execHelper) GetMethod(ctx context.Context, tmpl Preparer, receiver reflect.Value, name string) (reflect.Value, reflect.Value) { if name != "Hello1" { return zero, zero } diff --git a/tpl/internal/templatefuncsRegistry.go b/tpl/internal/templatefuncsRegistry.go index c1b01f5a5..fc02a6ef9 100644 --- a/tpl/internal/templatefuncsRegistry.go +++ b/tpl/internal/templatefuncsRegistry.go @@ -170,7 +170,7 @@ func (namespaces TemplateFuncsNamespaces) MarshalJSON() ([]byte, error) { for i, ns := range namespaces { - b, err := ns.toJSON(context.TODO()) + b, err := ns.toJSON(context.Background()) if err != nil { return nil, err } diff --git a/tpl/js/js.go b/tpl/js/js.go index bb8d20966..63a676532 100644 --- a/tpl/js/js.go +++ b/tpl/js/js.go @@ -34,7 +34,6 @@ func New(deps *deps.Deps) *Namespace { // Namespace provides template functions for the "js" namespace. type Namespace struct { - deps *deps.Deps client *js.Client } diff --git a/tpl/lang/lang_test.go b/tpl/lang/lang_test.go index 8d5430f6f..6ec40cab3 100644 --- a/tpl/lang/lang_test.go +++ b/tpl/lang/lang_test.go @@ -41,8 +41,8 @@ func TestNumFmt(t *testing.T) { {6, -12345.6789, "-|,| ", "|", "-12 345,678900"}, // Arabic, ar_AE - {6, -12345.6789, "- ٫ ٬", "", "-12٬345٫678900"}, - {6, -12345.6789, "-|٫| ", "|", "-12 345٫678900"}, + {6, -12345.6789, "\u200f- ٫ ٬", "", "\u200f-12٬345٫678900"}, + {6, -12345.6789, "\u200f-|٫| ", "|", "\u200f-12 345٫678900"}, } for _, cas := range cases { @@ -65,7 +65,6 @@ func TestNumFmt(t *testing.T) { } func TestFormatNumbers(t *testing.T) { - c := qt.New(t) nsNn := New(&deps.Deps{}, translators.GetTranslator("nn")) @@ -103,12 +102,10 @@ func TestFormatNumbers(t *testing.T) { c.Assert(err, qt.IsNil) c.Assert(got, qt.Equals, "$20,000.00") }) - } // Issue 9446 func TestLanguageKeyFormat(t *testing.T) { - c := qt.New(t) nsUnderscoreUpper := New(&deps.Deps{}, translators.GetTranslator("es_ES")) @@ -134,7 +131,5 @@ func TestLanguageKeyFormat(t *testing.T) { got, err = nsHyphenLower.FormatNumber(3, pi) c.Assert(err, qt.IsNil) c.Assert(got, qt.Equals, "3,142") - }) - } diff --git a/tpl/math/math_test.go b/tpl/math/math_test.go index 5b54b6ac8..4cde3fb85 100644 --- a/tpl/math/math_test.go +++ b/tpl/math/math_test.go @@ -335,7 +335,7 @@ func TestRound(t *testing.T) { {0.5, 1.0}, {1.1, 1.0}, {1.5, 2.0}, - {-0.1, -0.0}, + {-0.1, 0.0}, {-0.5, -1.0}, {-1.1, -1.0}, {-1.5, -2.0}, @@ -524,7 +524,6 @@ func TestSum(t *testing.T) { _, err := ns.Sum() c.Assert(err, qt.Not(qt.IsNil)) - } func TestProduct(t *testing.T) { @@ -547,5 +546,4 @@ func TestProduct(t *testing.T) { _, err := ns.Product() c.Assert(err, qt.Not(qt.IsNil)) - } diff --git a/tpl/openapi/openapi3/integration_test.go b/tpl/openapi/openapi3/integration_test.go index d3be0eda9..6914a60b3 100644 --- a/tpl/openapi/openapi3/integration_test.go +++ b/tpl/openapi/openapi3/integration_test.go @@ -67,7 +67,7 @@ API: {{ $api.Info.Title | safeHTML }} b.AssertFileContent("public/index.html", `API: Sample API`) b. - EditFileReplace("assets/api/myapi.yaml", func(s string) string { return strings.ReplaceAll(s, "Sample API", "Hugo API") }). + EditFileReplaceFunc("assets/api/myapi.yaml", func(s string) string { return strings.ReplaceAll(s, "Sample API", "Hugo API") }). Build() b.AssertFileContent("public/index.html", `API: Hugo API`) diff --git a/tpl/openapi/openapi3/openapi3.go b/tpl/openapi/openapi3/openapi3.go index 38857dd98..f929c7f62 100644 --- a/tpl/openapi/openapi3/openapi3.go +++ b/tpl/openapi/openapi3/openapi3.go @@ -15,44 +15,42 @@ package openapi3 import ( + "errors" "fmt" "io" gyaml "github.com/ghodss/yaml" - "errors" - kopenapi3 "github.com/getkin/kin-openapi/openapi3" - "github.com/gohugoio/hugo/cache/namedmemcache" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/deps" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/resources/resource" ) // New returns a new instance of the openapi3-namespaced template functions. func New(deps *deps.Deps) *Namespace { - // TODO(bep) consolidate when merging that "other branch" -- but be aware of the keys. - cache := namedmemcache.New() - deps.BuildStartListeners.Add( - func() { - cache.Clear() - }) - return &Namespace{ - cache: cache, + cache: dynacache.GetOrCreatePartition[string, *OpenAPIDocument](deps.MemCache, "/tmpl/openapi3", dynacache.OptionsPartition{Weight: 30, ClearWhen: dynacache.ClearOnChange}), deps: deps, } } // Namespace provides template functions for the "openapi3". type Namespace struct { - cache *namedmemcache.Cache + cache *dynacache.Partition[string, *OpenAPIDocument] deps *deps.Deps } // OpenAPIDocument represents an OpenAPI 3 document. type OpenAPIDocument struct { *kopenapi3.T + identityGroup identity.Identity +} + +func (o *OpenAPIDocument) GetIdentityGroup() identity.Identity { + return o.identityGroup } // Unmarshal unmarshals the given resource into an OpenAPI 3 document. @@ -62,7 +60,7 @@ func (ns *Namespace) Unmarshal(r resource.UnmarshableResource) (*OpenAPIDocument return nil, errors.New("no Key set in Resource") } - v, err := ns.cache.GetOrCreate(key, func() (any, error) { + v, err := ns.cache.GetOrCreate(key, func(string) (*OpenAPIDocument, error) { f := metadecoders.FormatFromStrings(r.MediaType().Suffixes()...) if f == "" { return nil, fmt.Errorf("MIME %q not supported", r.MediaType()) @@ -92,11 +90,11 @@ func (ns *Namespace) Unmarshal(r resource.UnmarshableResource) (*OpenAPIDocument err = kopenapi3.NewLoader().ResolveRefsIn(s, nil) - return &OpenAPIDocument{T: s}, err + return &OpenAPIDocument{T: s, identityGroup: identity.FirstIdentity(r)}, err }) if err != nil { return nil, err } - return v.(*OpenAPIDocument), nil + return v, nil } diff --git a/tpl/os/integration_test.go b/tpl/os/integration_test.go index d08374f8f..58e0ef70a 100644 --- a/tpl/os/integration_test.go +++ b/tpl/os/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/page/init.go b/tpl/page/init.go index 52aeaafd6..826aa45d3 100644 --- a/tpl/page/init.go +++ b/tpl/page/init.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -32,7 +32,7 @@ func init() { ns := &internal.TemplateFuncsNamespace{ Name: name, Context: func(ctx context.Context, args ...interface{}) (interface{}, error) { - v := tpl.GetPageFromContext(ctx) + v := tpl.Context.Page.Get(ctx) if v == nil { // The multilingual sitemap does not have a page as its context. return nil, nil diff --git a/tpl/page/integration_test.go b/tpl/page/integration_test.go index 74788377d..632c3b64e 100644 --- a/tpl/page/integration_test.go +++ b/tpl/page/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -112,11 +112,11 @@ Bundled page: {{ $p2_1.Content }} -- layouts/shortcodes/shortcode.html -- {{ if page.IsHome }}Shortcode {{ .Get 0 }} OK.{{ else }}Failed.{{ end }} -- layouts/sitemap.xml -- -HRE?{{ if eq page . }}Sitemap OK.{{ else }}Failed.{{ end }} +{{ if eq page . }}Sitemap OK.{{ else }}Failed.{{ end }} -- layouts/robots.txt -- {{ if eq page . }}Robots OK.{{ else }}Failed.{{ end }} -- layouts/sitemapindex.xml -- -{{ if not page }}SitemapIndex OK.{{ else }}Failed.{{ end }} +{{ with page }}SitemapIndex OK: {{ .Kind }}{{ else }}Failed.{{ end }} ` @@ -167,15 +167,12 @@ Shortcode in bundled page OK. b.AssertFileContent("public/page/1/index.html", `Alias OK.`) b.AssertFileContent("public/page/2/index.html", `Page OK.`) if multilingual { - b.AssertFileContent("public/sitemap.xml", `SitemapIndex OK.`) + b.AssertFileContent("public/sitemap.xml", `SitemapIndex OK: sitemapindex`) } else { b.AssertFileContent("public/sitemap.xml", `Sitemap OK.`) } - }) - } - } // Issue 10791. @@ -207,5 +204,23 @@ title: "P1" ).Build() b.AssertFileContent("public/p1/index.html", "<nav id=\"TableOfContents\"></nav> \n<h1 id=\"heading-1\">Heading 1</h1>") +} + +func TestFromStringRunning(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableLiveReload = true +-- layouts/index.html -- +{{ with resources.FromString "foo" "{{ seq 3 }}" }} +{{ with resources.ExecuteAsTemplate "bar" $ . }} + {{ .Content | safeHTML }} +{{ end }} +{{ end }} + ` + + b := hugolib.TestRunning(t, files) + b.AssertFileContent("public/index.html", "1\n2\n3") } diff --git a/tpl/partials/integration_test.go b/tpl/partials/integration_test.go index 3dbaf2ce4..e48f3bb20 100644 --- a/tpl/partials/integration_test.go +++ b/tpl/partials/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -297,7 +297,6 @@ timeout = '200ms' b.Assert(err, qt.Not(qt.IsNil)) b.Assert(err.Error(), qt.Contains, "timed out") - } func TestIncludeCachedTimeout(t *testing.T) { @@ -322,7 +321,6 @@ timeout = '200ms' b.Assert(err, qt.Not(qt.IsNil)) b.Assert(err.Error(), qt.Contains, "timed out") - } // See Issue #10789 @@ -350,5 +348,4 @@ BAR ).Build() b.AssertFileContent("public/index.html", "OO:BAR") - } diff --git a/tpl/partials/partials.go b/tpl/partials/partials.go index 3834529ce..8e36e21b9 100644 --- a/tpl/partials/partials.go +++ b/tpl/partials/partials.go @@ -40,9 +40,10 @@ type partialCacheKey struct { Variants []any } type includeResult struct { - name string - result any - err error + name string + result any + mangager identity.Manager + err error } func (k partialCacheKey) Key() string { @@ -65,7 +66,7 @@ type partialCache struct { } func (p *partialCache) clear() { - p.cache.DeleteFunc(func(string, includeResult) bool { + p.cache.DeleteFunc(func(s string, r includeResult) bool { return true }) } @@ -75,7 +76,7 @@ func New(deps *deps.Deps) *Namespace { // This lazycache was introduced in Hugo 0.111.0. // We're going to expand and consolidate all memory caches in Hugo using this, // so just set a high limit for now. - lru := lazycache.New[string, includeResult](lazycache.Options{MaxEntries: 1000}) + lru := lazycache.New(lazycache.Options[string, includeResult]{MaxEntries: 1000}) cache := &partialCache{cache: lru} deps.BuildStartListeners.Add( @@ -142,11 +143,11 @@ func (ns *Namespace) includWithTimeout(ctx context.Context, name string, dataLis case <-timeoutCtx.Done(): err := timeoutCtx.Err() if err == context.DeadlineExceeded { + //lint:ignore ST1005 end user message. err = fmt.Errorf("partial %q timed out after %s. This is most likely due to infinite recursion. If this is just a slow template, you can try to increase the 'timeout' config setting.", name, ns.deps.Conf.Timeout()) } return includeResult{err: err} } - } // include is a helper function that lookups and executes the named partial. @@ -215,7 +216,6 @@ func (ns *Namespace) include(ctx context.Context, name string, dataList ...any) name: templ.Name(), result: result, } - } // IncludeCached executes and caches partial templates. The cache is created with name+variants as the key. @@ -226,12 +226,22 @@ func (ns *Namespace) IncludeCached(ctx context.Context, name string, context any Name: name, Variants: variants, } + depsManagerIn := tpl.Context.GetDependencyManagerInCurrentScope(ctx) r, found, err := ns.cachedPartials.cache.GetOrCreate(key.Key(), func(string) (includeResult, error) { + var depsManagerShared identity.Manager + if ns.deps.Conf.Watching() { + // We need to create a shared dependency manager to pass downwards + // and add those same dependencies to any cached invocation of this partial. + depsManagerShared = identity.NewManager("partials") + ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, depsManagerShared.(identity.DependencyManagerScopedProvider)) + } r := ns.includWithTimeout(ctx, key.Name, context) + if ns.deps.Conf.Watching() { + r.mangager = depsManagerShared + } return r, r.err }) - if err != nil { return nil, err } @@ -242,10 +252,13 @@ func (ns *Namespace) IncludeCached(ctx context.Context, name string, context any // We need to track the time spent in the cache to // get the totals correct. ns.deps.Metrics.MeasureSince(key.templateName(), start) - } ns.deps.Metrics.TrackValue(key.templateName(), r.result, found) } + if r.mangager != nil && depsManagerIn != nil { + depsManagerIn.AddIdentity(r.mangager) + } + return r.result, nil } diff --git a/tpl/reflect/reflect_test.go b/tpl/reflect/reflect_test.go index f85af87dd..84ffe813b 100644 --- a/tpl/reflect/reflect_test.go +++ b/tpl/reflect/reflect_test.go @@ -21,8 +21,6 @@ import ( var ns = New() -type tstNoStringer struct{} - func TestIsMap(t *testing.T) { c := qt.New(t) for _, test := range []struct { diff --git a/tpl/resources/integration_test.go b/tpl/resources/integration_test.go index 0e0a29a98..02aa5d29d 100644 --- a/tpl/resources/integration_test.go +++ b/tpl/resources/integration_test.go @@ -72,10 +72,9 @@ Copy3: /blog/js/copies/moo.a677329fc6c4ad947e0c7116d91f37a2.min.js|text/javascri `) - b.AssertDestinationExists("images/copy2.png", true) + b.AssertFileExists("public/images/copy2.png", true) // No permalink used. - b.AssertDestinationExists("images/copy3.png", false) - + b.AssertFileExists("public/images/copy3.png", false) } func TestCopyPageShouldFail(t *testing.T) { @@ -96,7 +95,6 @@ func TestCopyPageShouldFail(t *testing.T) { }).BuildE() b.Assert(err, qt.IsNotNil) - } func TestGet(t *testing.T) { @@ -125,5 +123,4 @@ Image OK Empty string not found `) - } diff --git a/tpl/resources/resources.go b/tpl/resources/resources.go index d18797ebc..04af756ef 100644 --- a/tpl/resources/resources.go +++ b/tpl/resources/resources.go @@ -16,16 +16,15 @@ package resources import ( "context" + "errors" "fmt" "sync" - "errors" - "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/tpl/internal/resourcehelpers" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/resources/postpub" "github.com/gohugoio/hugo/deps" @@ -104,7 +103,6 @@ func (ns *Namespace) getscssClientDartSass() (*dartsass.Client, error) { return } ns.deps.BuildClosers.Add(ns.scssClientDartSass) - }) return ns.scssClientDartSass, err @@ -122,7 +120,6 @@ func (ns *Namespace) Copy(s any, r resource.Resource) (resource.Resource, error) // Get locates the filename given in Hugo's assets filesystem // and creates a Resource object that can be used for further transformations. func (ns *Namespace) Get(filename any) resource.Resource { - filenamestr, err := cast.ToStringE(filename) if err != nil { panic(err) @@ -172,7 +169,6 @@ func (ns *Namespace) GetRemote(args ...any) resource.Resource { } return ns.createClient.FromRemote(urlstr, options) - } r, err := get(args...) @@ -183,10 +179,8 @@ func (ns *Namespace) GetRemote(args ...any) resource.Resource { default: return resources.NewErrorResource(resource.NewResourceError(fmt.Errorf("error calling resources.GetRemote: %w", err), make(map[string]any))) } - } return r - } // GetMatch finds the first Resource matching the given pattern, or nil if none found. @@ -344,7 +338,6 @@ func (ns *Namespace) Minify(r resources.ResourceTransformer) (resource.Resource, // as second argument. As an option, you can e.g. specify e.g. the target path (string) // for the converted CSS resource. func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) { - if len(args) > 2 { return nil, errors.New("must not provide more arguments than resource object and options") } @@ -389,7 +382,7 @@ func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) { if transpiler == transpilerLibSass { var options scss.Options if targetPath != "" { - options.TargetPath = helpers.ToSlashTrimLeading(targetPath) + options.TargetPath = paths.ToSlashTrimLeading(targetPath) } else if m != nil { options, err = scss.DecodeOptions(m) if err != nil { @@ -413,12 +406,10 @@ func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) { } return client.ToCSS(r, m) - } // PostCSS processes the given Resource with PostCSS func (ns *Namespace) PostCSS(args ...any) (resource.Resource, error) { - if len(args) > 2 { return nil, errors.New("must not provide more arguments than resource object and options") } @@ -438,7 +429,6 @@ func (ns *Namespace) PostProcess(r resource.Resource) (postpub.PostPublishedReso // Babel processes the given Resource with Babel. func (ns *Namespace) Babel(args ...any) (resource.Resource, error) { - if len(args) > 2 { return nil, errors.New("must not provide more arguments than resource object and options") } diff --git a/tpl/safe/init.go b/tpl/safe/init.go index 8fc0e82ea..3b498e6df 100644 --- a/tpl/safe/init.go +++ b/tpl/safe/init.go @@ -70,11 +70,6 @@ func init() { }, ) - ns.AddMethodMapping(ctx.SanitizeURL, - []string{"sanitizeURL", "sanitizeurl"}, - [][2]string{}, - ) - return ns } diff --git a/tpl/safe/safe.go b/tpl/safe/safe.go index d1a2e8d4e..81b4e0480 100644 --- a/tpl/safe/safe.go +++ b/tpl/safe/safe.go @@ -18,7 +18,6 @@ package safe import ( "html/template" - "github.com/gohugoio/hugo/helpers" "github.com/spf13/cast" ) @@ -65,9 +64,3 @@ func (ns *Namespace) URL(s any) (template.URL, error) { ss, err := cast.ToStringE(s) return template.URL(ss), err } - -// SanitizeURL returns the string s as html/template URL content. -func (ns *Namespace) SanitizeURL(s any) (string, error) { - ss, err := cast.ToStringE(s) - return helpers.SanitizeURL(ss), err -} diff --git a/tpl/safe/safe_test.go b/tpl/safe/safe_test.go index 81fa40fd8..f2a54755d 100644 --- a/tpl/safe/safe_test.go +++ b/tpl/safe/safe_test.go @@ -182,30 +182,3 @@ func TestURL(t *testing.T) { c.Assert(result, qt.Equals, test.expect) } } - -func TestSanitizeURL(t *testing.T) { - t.Parallel() - c := qt.New(t) - - ns := New() - - for _, test := range []struct { - a any - expect any - }{ - {"http://foo/../../bar", "http://foo/bar"}, - // errors - {tstNoStringer{}, false}, - } { - - result, err := ns.SanitizeURL(test.a) - - if b, ok := test.expect.(bool); ok && !b { - c.Assert(err, qt.Not(qt.IsNil)) - continue - } - - c.Assert(err, qt.IsNil) - c.Assert(result, qt.Equals, test.expect) - } -} diff --git a/tpl/site/init.go b/tpl/site/init.go index 1c018e14e..1fcb309a0 100644 --- a/tpl/site/init.go +++ b/tpl/site/init.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/strings/strings.go b/tpl/strings/strings.go index 9f16f1581..cd233b0a4 100644 --- a/tpl/strings/strings.go +++ b/tpl/strings/strings.go @@ -47,7 +47,7 @@ type Namespace struct { func (ns *Namespace) CountRunes(s any) (int, error) { ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert content to string: %w", err) + return 0, fmt.Errorf("failed to convert content to string: %w", err) } counter := 0 @@ -64,7 +64,7 @@ func (ns *Namespace) CountRunes(s any) (int, error) { func (ns *Namespace) RuneCount(s any) (int, error) { ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert content to string: %w", err) + return 0, fmt.Errorf("failed to convert content to string: %w", err) } return utf8.RuneCountInString(ss), nil } @@ -73,12 +73,12 @@ func (ns *Namespace) RuneCount(s any) (int, error) { func (ns *Namespace) CountWords(s any) (int, error) { ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert content to string: %w", err) + return 0, fmt.Errorf("failed to convert content to string: %w", err) } isCJKLanguage, err := regexp.MatchString(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`, ss) if err != nil { - return 0, fmt.Errorf("Failed to match regex pattern against string: %w", err) + return 0, fmt.Errorf("failed to match regex pattern against string: %w", err) } if !isCJKLanguage { @@ -103,11 +103,11 @@ func (ns *Namespace) CountWords(s any) (int, error) { func (ns *Namespace) Count(substr, s any) (int, error) { substrs, err := cast.ToStringE(substr) if err != nil { - return 0, fmt.Errorf("Failed to convert substr to string: %w", err) + return 0, fmt.Errorf("failed to convert substr to string: %w", err) } ss, err := cast.ToStringE(s) if err != nil { - return 0, fmt.Errorf("Failed to convert s to string: %w", err) + return 0, fmt.Errorf("failed to convert s to string: %w", err) } return strings.Count(ss, substrs), nil } diff --git a/tpl/template.go b/tpl/template.go index 1f0127c66..e9725bd74 100644 --- a/tpl/template.go +++ b/tpl/template.go @@ -23,6 +23,8 @@ import ( "unicode" bp "github.com/gohugoio/hugo/bufferpool" + "github.com/gohugoio/hugo/common/hcontext" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/output/layouts" "github.com/gohugoio/hugo/output" @@ -69,6 +71,7 @@ type TemplateHandler interface { ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error LookupLayout(d layouts.LayoutDescriptor, f output.Format) (Template, bool, error) HasTemplate(name string) bool + GetIdentity(name string) (identity.Identity, bool) } type TemplateLookup interface { @@ -95,6 +98,27 @@ type Template interface { Prepare() (*texttemplate.Template, error) } +// AddIdentity checks if t is an identity.Identity and returns it if so. +// Else it wraps it in a templateIdentity using its name as the base. +func AddIdentity(t Template) Template { + if _, ok := t.(identity.IdentityProvider); ok { + return t + } + return templateIdentityProvider{ + Template: t, + id: identity.StringIdentity(t.Name()), + } +} + +type templateIdentityProvider struct { + Template + id identity.Identity +} + +func (t templateIdentityProvider) GetIdentity() identity.Identity { + return t.id +} + // TemplateParser is used to parse ad-hoc templates, e.g. in the Resource chain. type TemplateParser interface { Parse(name, tpl string) (Template, error) @@ -111,18 +135,6 @@ type TemplateDebugger interface { Debug() } -// templateInfo wraps a Template with some additional information. -type templateInfo struct { - Template - Info -} - -// templateInfo wraps a Template with some additional information. -type templateInfoManager struct { - Template - InfoManager -} - // TemplatesProvider as implemented by deps.Deps. type TemplatesProvider interface { Tmpl() TemplateHandler @@ -144,34 +156,38 @@ type TemplateFuncGetter interface { GetFunc(name string) (reflect.Value, bool) } -// GetPageFromContext returns the top level Page. -func GetPageFromContext(ctx context.Context) any { - return ctx.Value(texttemplate.PageContextKey) +type contextKey string + +// Context manages values passed in the context to templates. +var Context = struct { + DependencyManagerScopedProvider hcontext.ContextDispatcher[identity.DependencyManagerScopedProvider] + GetDependencyManagerInCurrentScope func(context.Context) identity.Manager + SetDependencyManagerInCurrentScope func(context.Context, identity.Manager) context.Context + DependencyScope hcontext.ContextDispatcher[int] + Page hcontext.ContextDispatcher[page] +}{ + DependencyManagerScopedProvider: hcontext.NewContextDispatcher[identity.DependencyManagerScopedProvider](contextKey("DependencyManagerScopedProvider")), + DependencyScope: hcontext.NewContextDispatcher[int](contextKey("DependencyScope")), + Page: hcontext.NewContextDispatcher[page](contextKey("Page")), } -// SetPageInContext sets the top level Page. -func SetPageInContext(ctx context.Context, p page) context.Context { - return context.WithValue(ctx, texttemplate.PageContextKey, p) +func init() { + Context.GetDependencyManagerInCurrentScope = func(ctx context.Context) identity.Manager { + idmsp := Context.DependencyManagerScopedProvider.Get(ctx) + if idmsp != nil { + return idmsp.GetDependencyManagerForScope(Context.DependencyScope.Get(ctx)) + } + return nil + } } type page interface { IsNode() bool } -func GetCallbackFunctionFromContext(ctx context.Context) any { - return ctx.Value(texttemplate.CallbackContextKey) -} - -func SetCallbackFunctionInContext(ctx context.Context, fn any) context.Context { - return context.WithValue(ctx, texttemplate.CallbackContextKey, fn) -} - const hugoNewLinePlaceholder = "___hugonl_" -var ( - stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "</p>", hugoNewLinePlaceholder, "<br>", hugoNewLinePlaceholder, "<br />", hugoNewLinePlaceholder) - whitespaceRe = regexp.MustCompile(`\s+`) -) +var stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "</p>", hugoNewLinePlaceholder, "<br>", hugoNewLinePlaceholder, "<br />", hugoNewLinePlaceholder) // StripHTML strips out all HTML tags in s. func StripHTML(s string) string { diff --git a/tpl/template_info.go b/tpl/template_info.go index 5f748d682..b27debf1f 100644 --- a/tpl/template_info.go +++ b/tpl/template_info.go @@ -13,18 +13,11 @@ package tpl -import ( - "github.com/gohugoio/hugo/identity" -) - // Increments on breaking changes. const TemplateVersion = 2 type Info interface { ParseInfo() ParseInfo - - // Identifies this template and its dependencies. - identity.Provider } type FileInfo interface { @@ -32,13 +25,6 @@ type FileInfo interface { Filename() string } -type InfoManager interface { - ParseInfo() ParseInfo - - // Identifies and manages this template and its dependencies. - identity.Manager -} - type ParseInfo struct { // Set for shortcode templates with any {{ .Inner }} IsInner bool diff --git a/tpl/template_test.go b/tpl/template_test.go index d989b7158..333513a3d 100644 --- a/tpl/template_test.go +++ b/tpl/template_test.go @@ -67,5 +67,3 @@ More text here.</p> } } } - -const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>" diff --git a/tpl/templates/integration_test.go b/tpl/templates/integration_test.go index 7935fa5e3..7e0bcc824 100644 --- a/tpl/templates/integration_test.go +++ b/tpl/templates/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tpl/time/init.go b/tpl/time/init.go index 01783270f..5f9dd77bf 100644 --- a/tpl/time/init.go +++ b/tpl/time/init.go @@ -51,7 +51,7 @@ func init() { // 3 or more arguments. Currently not supported. default: - return nil, errors.New("Invalid arguments supplied to `time`. Refer to time documentation: https://gohugo.io/functions/time/") + return nil, errors.New("invalid arguments supplied to `time`") } }, } diff --git a/tpl/time/time.go b/tpl/time/time.go index cd78b83aa..57b115f35 100644 --- a/tpl/time/time.go +++ b/tpl/time/time.go @@ -17,7 +17,6 @@ package time import ( "fmt" "time" - _time "time" "github.com/gohugoio/hugo/common/htime" @@ -47,14 +46,13 @@ func (ns *Namespace) AsTime(v any, args ...any) (any, error) { if err != nil { return nil, err } - loc, err = _time.LoadLocation(locStr) + loc, err = time.LoadLocation(locStr) if err != nil { return nil, err } } return htime.ToTimeInDefaultLocationE(v, loc) - } // Format converts the textual representation of the datetime string in v into @@ -69,7 +67,7 @@ func (ns *Namespace) Format(layout string, v any) (string, error) { } // Now returns the current local time or `clock` time -func (ns *Namespace) Now() _time.Time { +func (ns *Namespace) Now() time.Time { return htime.Now() } @@ -79,34 +77,34 @@ func (ns *Namespace) Now() _time.Time { // such as "300ms", "-1.5h" or "2h45m". // Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". // See https://golang.org/pkg/time/#ParseDuration -func (ns *Namespace) ParseDuration(s any) (_time.Duration, error) { +func (ns *Namespace) ParseDuration(s any) (time.Duration, error) { ss, err := cast.ToStringE(s) if err != nil { return 0, err } - return _time.ParseDuration(ss) + return time.ParseDuration(ss) } -var durationUnits = map[string]_time.Duration{ - "nanosecond": _time.Nanosecond, - "ns": _time.Nanosecond, - "microsecond": _time.Microsecond, - "us": _time.Microsecond, - "µs": _time.Microsecond, - "millisecond": _time.Millisecond, - "ms": _time.Millisecond, - "second": _time.Second, - "s": _time.Second, - "minute": _time.Minute, - "m": _time.Minute, - "hour": _time.Hour, - "h": _time.Hour, +var durationUnits = map[string]time.Duration{ + "nanosecond": time.Nanosecond, + "ns": time.Nanosecond, + "microsecond": time.Microsecond, + "us": time.Microsecond, + "µs": time.Microsecond, + "millisecond": time.Millisecond, + "ms": time.Millisecond, + "second": time.Second, + "s": time.Second, + "minute": time.Minute, + "m": time.Minute, + "hour": time.Hour, + "h": time.Hour, } // Duration converts the given number to a time.Duration. // Unit is one of nanosecond/ns, microsecond/us/µs, millisecond/ms, second/s, minute/m or hour/h. -func (ns *Namespace) Duration(unit any, number any) (_time.Duration, error) { +func (ns *Namespace) Duration(unit any, number any) (time.Duration, error) { unitStr, err := cast.ToStringE(unit) if err != nil { return 0, err @@ -119,5 +117,5 @@ func (ns *Namespace) Duration(unit any, number any) (_time.Duration, error) { if err != nil { return 0, err } - return _time.Duration(n) * unitDuration, nil + return time.Duration(n) * unitDuration, nil } diff --git a/tpl/tplimpl/template.go b/tpl/tplimpl/template.go index 053b53b53..a8ba6815d 100644 --- a/tpl/tplimpl/template.go +++ b/tpl/tplimpl/template.go @@ -42,7 +42,6 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/hugofs/files" htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate" texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate" @@ -121,10 +120,6 @@ func needsBaseTemplate(templ string) bool { return baseTemplateDefineRe.MatchString(templ[idx:]) } -func newIdentity(name string) identity.Manager { - return identity.NewManager(identity.NewPathIdentity(files.ComponentFolderLayouts, name)) -} - func newStandaloneTextTemplate(funcs map[string]any) tpl.TemplateParseFinder { return &textTemplateWrapperWithLock{ RWMutex: &sync.RWMutex{}, @@ -147,7 +142,6 @@ func newTemplateHandlers(d *deps.Deps) (*tpl.TemplateHandlers, error) { h := &templateHandler{ nameBaseTemplateName: make(map[string]string), transformNotFound: make(map[string]*templateState), - identityNotFound: make(map[string][]identity.Manager), shortcodes: make(map[string]*shortcodeTemplates), templateInfo: make(map[string]tpl.Info), @@ -187,7 +181,6 @@ func newTemplateHandlers(d *deps.Deps) (*tpl.TemplateHandlers, error) { Tmpl: e, TxtTmpl: newStandaloneTextTemplate(funcMap), }, nil - } func newTemplateNamespace(funcs map[string]any) *templateNamespace { @@ -200,13 +193,16 @@ func newTemplateNamespace(funcs map[string]any) *templateNamespace { } } -func newTemplateState(templ tpl.Template, info templateInfo) *templateState { +func newTemplateState(templ tpl.Template, info templateInfo, id identity.Identity) *templateState { + if id == nil { + id = info + } return &templateState{ info: info, typ: info.resolveType(), Template: templ, - Manager: newIdentity(info.name), parseInfo: tpl.DefaultParseInfo, + id: id, } } @@ -288,7 +284,7 @@ func (t *templateExec) UnusedTemplates() []tpl.FileInfo { for _, ts := range t.main.templates { ti := ts.info - if strings.HasPrefix(ti.name, "_internal/") || ti.realFilename == "" { + if strings.HasPrefix(ti.name, "_internal/") || ti.meta == nil { continue } @@ -346,9 +342,6 @@ type templateHandler struct { // AST transformation pass. transformNotFound map[string]*templateState - // Holds identities of templates not found during first pass. - identityNotFound map[string][]identity.Manager - // shortcodes maps shortcode name to template variants // (language, output format etc.) of that shortcode. shortcodes map[string]*shortcodeTemplates @@ -405,7 +398,6 @@ func (t *templateHandler) LookupLayout(d layouts.LayoutDescriptor, f output.Form cacheVal := layoutCacheEntry{found: found, templ: templ, err: err} t.layoutTemplateCache[key] = cacheVal return cacheVal.templ, cacheVal.found, cacheVal.err - } // This currently only applies to shortcodes and what we get here is the @@ -456,6 +448,22 @@ func (t *templateHandler) HasTemplate(name string) bool { return found } +func (t *templateHandler) GetIdentity(name string) (identity.Identity, bool) { + if _, found := t.needsBaseof[name]; found { + return identity.StringIdentity(name), true + } + + if _, found := t.baseof[name]; found { + return identity.StringIdentity(name), true + } + + tt, found := t.Lookup(name) + if !found { + return nil, false + } + return tt.(identity.IdentityProvider).GetIdentity(), found +} + func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format) (tpl.Template, bool, error) { d.OutputFormatName = f.Name d.Suffix = f.MediaType.FirstSuffix.Suffix @@ -488,13 +496,10 @@ func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format return nil, false, err } - ts := newTemplateState(templ, overlay) + ts := newTemplateState(templ, overlay, identity.Or(base, overlay)) if found { ts.baseInfo = base - - // Add the base identity to detect changes - ts.Add(identity.NewPathIdentity(files.ComponentFolderLayouts, base.name)) } t.applyTemplateTransformers(t.main, ts) @@ -510,13 +515,6 @@ func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format return nil, false, nil } -func (t *templateHandler) findTemplate(name string) *templateState { - if templ, found := t.Lookup(name); found { - return templ.(*templateState) - } - return nil -} - func (t *templateHandler) newTemplateInfo(name, tpl string) templateInfo { var isText bool name, isText = t.nameIsText(name) @@ -539,9 +537,8 @@ func (t *templateHandler) addFileContext(templ tpl.Template, inerr error) error identifiers := t.extractIdentifiers(inerr.Error()) - //lint:ignore ST1008 the error is the main result checkFilename := func(info templateInfo, inErr error) (error, bool) { - if info.filename == "" { + if info.meta == nil { return inErr, false } @@ -560,13 +557,13 @@ func (t *templateHandler) addFileContext(templ tpl.Template, inerr error) error return -1 } - f, err := t.layoutsFs.Open(info.filename) + f, err := info.meta.Open() if err != nil { return inErr, false } defer f.Close() - fe := herrors.NewFileErrorFromName(inErr, info.realFilename) + fe := herrors.NewFileErrorFromName(inErr, info.meta.Filename) fe.UpdateContent(f, lineMatcher) if !fe.ErrorContext().Position.IsValid() { @@ -621,37 +618,33 @@ func (t *templateHandler) addShortcodeVariant(ts *templateState) { } } -func (t *templateHandler) addTemplateFile(name, path string) error { - getTemplate := func(filename string) (templateInfo, error) { - fs := t.Layouts.Fs - b, err := afero.ReadFile(fs, filename) +func (t *templateHandler) addTemplateFile(name string, fim hugofs.FileMetaInfo) error { + getTemplate := func(fim hugofs.FileMetaInfo) (templateInfo, error) { + meta := fim.Meta() + f, err := meta.Open() if err != nil { - return templateInfo{filename: filename, fs: fs}, err + return templateInfo{meta: meta}, err + } + defer f.Close() + b, err := io.ReadAll(f) + if err != nil { + return templateInfo{meta: meta}, err } s := removeLeadingBOM(string(b)) - realFilename := filename - if fi, err := fs.Stat(filename); err == nil { - if fim, ok := fi.(hugofs.FileMetaInfo); ok { - realFilename = fim.Meta().Filename - } - } - var isText bool name, isText = t.nameIsText(name) return templateInfo{ - name: name, - isText: isText, - template: s, - filename: filename, - realFilename: realFilename, - fs: fs, + name: name, + isText: isText, + template: s, + meta: meta, }, nil } - tinfo, err := getTemplate(path) + tinfo, err := getTemplate(fim) if err != nil { return err } @@ -741,11 +734,6 @@ func (t *templateHandler) applyTemplateTransformers(ns *templateNamespace, ts *t for k := range c.templateNotFound { t.transformNotFound[k] = ts - t.identityNotFound[k] = append(t.identityNotFound[k], c.t) - } - - for k := range c.identityNotFound { - t.identityNotFound[k] = append(t.identityNotFound[k], c.t) } return c, err @@ -804,9 +792,9 @@ func (t *templateHandler) loadEmbedded() error { } func (t *templateHandler) loadTemplates() error { - walker := func(path string, fi hugofs.FileMetaInfo, err error) error { - if err != nil || fi.IsDir() { - return err + walker := func(path string, fi hugofs.FileMetaInfo) error { + if fi.IsDir() { + return nil } if isDotFile(path) || isBackupFile(path) { @@ -822,14 +810,14 @@ func (t *templateHandler) loadTemplates() error { name = textTmplNamePrefix + name } - if err := t.addTemplateFile(name, path); err != nil { + if err := t.addTemplateFile(name, fi); err != nil { return err } return nil } - if err := helpers.SymbolicWalk(t.Layouts.Fs, "", walker); err != nil { + if err := helpers.Walk(t.Layouts.Fs, "", walker); err != nil { if !herrors.IsNotExist(err) { return err } @@ -861,7 +849,7 @@ func (t *templateHandler) extractPartials(templ tpl.Template) error { continue } - ts := newTemplateState(templ, templateInfo{name: templ.Name()}) + ts := newTemplateState(templ, templateInfo{name: templ.Name()}, nil) ts.typ = templatePartial t.main.mu.RLock() @@ -927,15 +915,6 @@ func (t *templateHandler) postTransform() error { } } - for k, v := range t.identityNotFound { - ts := t.findTemplate(k) - if ts != nil { - for _, im := range v { - im.Add(ts) - } - } - } - for _, v := range t.shortcodes { sort.Slice(v.variants, func(i, j int) bool { v1, v2 := v.variants[i], v.variants[j] @@ -1008,7 +987,7 @@ func (t *templateNamespace) newTemplateLookup(in *templateState) func(name strin return templ } if templ, found := findTemplateIn(name, in); found { - return newTemplateState(templ, templateInfo{name: templ.Name()}) + return newTemplateState(templ, templateInfo{name: templ.Name()}, nil) } return nil } @@ -1026,7 +1005,7 @@ func (t *templateNamespace) parse(info templateInfo) (*templateState, error) { return nil, err } - ts := newTemplateState(templ, info) + ts := newTemplateState(templ, info, nil) t.templates[info.name] = ts @@ -1040,7 +1019,7 @@ func (t *templateNamespace) parse(info templateInfo) (*templateState, error) { return nil, err } - ts := newTemplateState(templ, info) + ts := newTemplateState(templ, info, nil) t.templates[info.name] = ts @@ -1052,12 +1031,16 @@ type templateState struct { typ templateType parseInfo tpl.ParseInfo - identity.Manager + id identity.Identity info templateInfo baseInfo templateInfo // Set when a base template is used. } +func (t *templateState) GetIdentity() identity.Identity { + return t.id +} + func (t *templateState) ParseInfo() tpl.ParseInfo { return t.parseInfo } @@ -1066,6 +1049,10 @@ func (t *templateState) isText() bool { return isText(t.Template) } +func (t *templateState) String() string { + return t.Name() +} + func isText(templ tpl.Template) bool { _, isText := templ.(*texttemplate.Template) return isText @@ -1076,11 +1063,6 @@ type templateStateMap struct { templates map[string]*templateState } -type templateWrapperWithLock struct { - *sync.RWMutex - tpl.Template -} - type textTemplateWrapperWithLock struct { *sync.RWMutex *texttemplate.Template diff --git a/tpl/tplimpl/template_ast_transformers.go b/tpl/tplimpl/template_ast_transformers.go index 8d5d8d1b3..92558a903 100644 --- a/tpl/tplimpl/template_ast_transformers.go +++ b/tpl/tplimpl/template_ast_transformers.go @@ -14,17 +14,14 @@ package tplimpl import ( + "errors" "fmt" - "regexp" - "strings" htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate" texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate" "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse" - "errors" - "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/tpl" "github.com/mitchellh/mapstructure" @@ -41,7 +38,6 @@ const ( type templateContext struct { visited map[string]bool templateNotFound map[string]bool - identityNotFound map[string]bool lookupFn func(name string) *templateState // The last error encountered. @@ -74,19 +70,20 @@ func (c templateContext) getIfNotVisited(name string) *templateState { func newTemplateContext( t *templateState, - lookupFn func(name string) *templateState) *templateContext { + lookupFn func(name string) *templateState, +) *templateContext { return &templateContext{ t: t, lookupFn: lookupFn, visited: make(map[string]bool), templateNotFound: make(map[string]bool), - identityNotFound: make(map[string]bool), } } func applyTemplateTransformers( t *templateState, - lookupFn func(name string) *templateState) (*templateContext, error) { + lookupFn func(name string) *templateState, +) (*templateContext, error) { if t == nil { return nil, errors.New("expected template, but none provided") } @@ -179,7 +176,6 @@ func (c *templateContext) applyTransformations(n parse.Node) (bool, error) { } case *parse.CommandNode: - c.collectPartialInfo(x) c.collectInner(x) keep := c.collectReturnNode(x) @@ -280,39 +276,6 @@ func (c *templateContext) collectInner(n *parse.CommandNode) { } } -var partialRe = regexp.MustCompile(`^partial(Cached)?$|^partials\.Include(Cached)?$`) - -func (c *templateContext) collectPartialInfo(x *parse.CommandNode) { - if len(x.Args) < 2 { - return - } - - first := x.Args[0] - var id string - switch v := first.(type) { - case *parse.IdentifierNode: - id = v.Ident - case *parse.ChainNode: - id = v.String() - } - - if partialRe.MatchString(id) { - partialName := strings.Trim(x.Args[1].String(), "\"") - if !strings.Contains(partialName, ".") { - partialName += ".html" - } - partialName = "partials/" + partialName - info := c.lookupFn(partialName) - - if info != nil { - c.t.Add(info) - } else { - // Delay for later - c.identityNotFound[partialName] = true - } - } -} - func (c *templateContext) collectReturnNode(n *parse.CommandNode) bool { if c.t.typ != templatePartial || c.returnNode != nil { return true diff --git a/tpl/tplimpl/template_ast_transformers_test.go b/tpl/tplimpl/template_ast_transformers_test.go index 90ca325ab..bd889b832 100644 --- a/tpl/tplimpl/template_ast_transformers_test.go +++ b/tpl/tplimpl/template_ast_transformers_test.go @@ -52,6 +52,7 @@ func newTestTemplate(templ tpl.Template) *templateState { templateInfo{ name: templ.Name(), }, + nil, ) } diff --git a/tpl/tplimpl/template_errors.go b/tpl/tplimpl/template_errors.go index ac8a72df5..34e73a07a 100644 --- a/tpl/tplimpl/template_errors.go +++ b/tpl/tplimpl/template_errors.go @@ -17,22 +17,22 @@ import ( "fmt" "github.com/gohugoio/hugo/common/herrors" - "github.com/spf13/afero" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/identity" ) +var _ identity.Identity = (*templateInfo)(nil) + type templateInfo struct { name string template string isText bool // HTML or plain text template. - // Used to create some error context in error situations - fs afero.Fs - - // The filename relative to the fs above. - filename string + meta *hugofs.FileMeta +} - // The real filename (if possible). Used for logging. - realFilename string +func (t templateInfo) IdentifierBase() string { + return t.name } func (t templateInfo) Name() string { @@ -40,7 +40,7 @@ func (t templateInfo) Name() string { } func (t templateInfo) Filename() string { - return t.realFilename + return t.meta.Filename } func (t templateInfo) IsZero() bool { @@ -53,12 +53,11 @@ func (t templateInfo) resolveType() templateType { func (info templateInfo) errWithFileContext(what string, err error) error { err = fmt.Errorf(what+": %w", err) - fe := herrors.NewFileErrorFromName(err, info.realFilename) - f, err := info.fs.Open(info.filename) + fe := herrors.NewFileErrorFromName(err, info.meta.Filename) + f, err := info.meta.Open() if err != nil { return err } defer f.Close() return fe.UpdateContent(f, nil) - } diff --git a/tpl/tplimpl/template_funcs.go b/tpl/tplimpl/template_funcs.go index 97d1b40dd..8997c83d6 100644 --- a/tpl/tplimpl/template_funcs.go +++ b/tpl/tplimpl/template_funcs.go @@ -22,6 +22,7 @@ import ( "github.com/gohugoio/hugo/common/hreflect" "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/tpl" template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate" @@ -65,9 +66,8 @@ import ( ) var ( - _ texttemplate.ExecHelper = (*templateExecHelper)(nil) - zero reflect.Value - contextInterface = reflect.TypeOf((*context.Context)(nil)).Elem() + _ texttemplate.ExecHelper = (*templateExecHelper)(nil) + zero reflect.Value ) type templateExecHelper struct { @@ -81,7 +81,7 @@ func (t *templateExecHelper) GetFunc(ctx context.Context, tmpl texttemplate.Prep if fn, found := t.funcs[name]; found { if fn.Type().NumIn() > 0 { first := fn.Type().In(0) - if first.Implements(contextInterface) { + if hreflect.IsContextType(first) { // TODO(bep) check if we can void this conversion every time -- and if that matters. // The first argument may be context.Context. This is never provided by the end user, but it's used to pass down // contextual information, e.g. the top level data context (e.g. Page). @@ -95,6 +95,13 @@ func (t *templateExecHelper) GetFunc(ctx context.Context, tmpl texttemplate.Prep } func (t *templateExecHelper) Init(ctx context.Context, tmpl texttemplate.Preparer) { + if t.running { + _, ok := tmpl.(identity.IdentityProvider) + if ok { + t.trackDependencies(ctx, tmpl, "", reflect.Value{}) + } + + } } func (t *templateExecHelper) GetMapValue(ctx context.Context, tmpl texttemplate.Preparer, receiver, key reflect.Value) (reflect.Value, bool) { @@ -116,22 +123,14 @@ func (t *templateExecHelper) GetMapValue(ctx context.Context, tmpl texttemplate. var typeParams = reflect.TypeOf(maps.Params{}) func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value) { - if t.running { - switch name { - case "GetPage", "Render": - if info, ok := tmpl.(tpl.Info); ok { - if m := receiver.MethodByName(name + "WithTemplateInfo"); m.IsValid() { - return m, reflect.ValueOf(info) - } - } - } - } - if strings.EqualFold(name, "mainsections") && receiver.Type() == typeParams && receiver.Pointer() == t.siteParams.Pointer() { - // MOved to site.MainSections in Hugo 0.112.0. + // Moved to site.MainSections in Hugo 0.112.0. receiver = t.site name = "MainSections" + } + if t.running { + ctx = t.trackDependencies(ctx, tmpl, name, receiver) } fn := hreflect.GetMethodByName(receiver, name) @@ -141,7 +140,7 @@ func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Pr if fn.Type().NumIn() > 0 { first := fn.Type().In(0) - if first.Implements(contextInterface) { + if hreflect.IsContextType(first) { // The first argument may be context.Context. This is never provided by the end user, but it's used to pass down // contextual information, e.g. the top level data context (e.g. Page). return fn, reflect.ValueOf(ctx) @@ -151,6 +150,43 @@ func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Pr return fn, zero } +func (t *templateExecHelper) trackDependencies(ctx context.Context, tmpl texttemplate.Preparer, name string, receiver reflect.Value) context.Context { + if tmpl == nil { + panic("must provide a template") + } + + idm := tpl.Context.GetDependencyManagerInCurrentScope(ctx) + if idm == nil { + return ctx + } + + if info, ok := tmpl.(identity.IdentityProvider); ok { + idm.AddIdentity(info.GetIdentity()) + } + + // The receive is the "." in the method execution or map lookup, e.g. the Page in .Resources. + if hreflect.IsValid(receiver) { + in := receiver.Interface() + + if idlp, ok := in.(identity.ForEeachIdentityByNameProvider); ok { + // This will skip repeated .RelPermalink usage on transformed resources + // which is not fingerprinted, e.g. to + // prevent all HTML pages to be re-rendered on a small CSS change. + idlp.ForEeachIdentityByName(name, func(id identity.Identity) bool { + idm.AddIdentity(id) + return false + }) + } else { + identity.WalkIdentitiesShallow(in, func(level int, id identity.Identity) bool { + idm.AddIdentity(id) + return false + }) + } + } + + return ctx +} + func newTemplateExecuter(d *deps.Deps) (texttemplate.Executer, map[string]reflect.Value) { funcs := createFuncMap(d) funcsv := make(map[string]reflect.Value) diff --git a/tpl/transform/integration_test.go b/tpl/transform/integration_test.go index f035ec719..351420a67 100644 --- a/tpl/transform/integration_test.go +++ b/tpl/transform/integration_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -77,7 +77,7 @@ disableKinds = ['section','sitemap','taxonomy','term'] --- title: p1 --- -a **b** c +a **b** ` + "\v" + ` c <!--more--> ` b := hugolib.Test(t, files) diff --git a/tpl/transform/transform.go b/tpl/transform/transform.go index 8078bc0ce..7054c6988 100644 --- a/tpl/transform/transform.go +++ b/tpl/transform/transform.go @@ -22,10 +22,11 @@ import ( "html/template" "strings" - "github.com/gohugoio/hugo/cache/namedmemcache" + "github.com/gohugoio/hugo/cache/dynacache" "github.com/gohugoio/hugo/markup/converter/hooks" "github.com/gohugoio/hugo/markup/highlight" "github.com/gohugoio/hugo/markup/highlight/chromalexers" + "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/deps" @@ -35,21 +36,23 @@ import ( // New returns a new instance of the transform-namespaced template functions. func New(deps *deps.Deps) *Namespace { - cache := namedmemcache.New() - deps.BuildStartListeners.Add( - func() { - cache.Clear() - }) + if deps.MemCache == nil { + panic("must provide MemCache") + } return &Namespace{ - cache: cache, - deps: deps, + deps: deps, + cache: dynacache.GetOrCreatePartition[string, *resources.StaleValue[any]]( + deps.MemCache, + "/tmpl/transform", + dynacache.OptionsPartition{Weight: 30, ClearWhen: dynacache.ClearOnChange}, + ), } } // Namespace provides template functions for the "transform" namespace. type Namespace struct { - cache *namedmemcache.Cache + cache *dynacache.Partition[string, *resources.StaleValue[any]] deps *deps.Deps } @@ -154,7 +157,6 @@ func (ns *Namespace) XMLEscape(s any) (string, error) { // Markdownify renders s from Markdown to HTML. func (ns *Namespace) Markdownify(ctx context.Context, s any) (template.HTML, error) { - home := ns.deps.Site.Home() if home == nil { panic("home must not be nil") diff --git a/tpl/transform/unmarshal.go b/tpl/transform/unmarshal.go index 3936126ca..d876c88d7 100644 --- a/tpl/transform/unmarshal.go +++ b/tpl/transform/unmarshal.go @@ -14,18 +14,18 @@ package transform import ( + "errors" "fmt" "io" "strings" + "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/common/types" "github.com/mitchellh/mapstructure" - "errors" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/parser/metadecoders" @@ -71,7 +71,7 @@ func (ns *Namespace) Unmarshal(args ...any) (any, error) { key += decoder.OptionsKey() } - return ns.cache.GetOrCreate(key, func() (any, error) { + v, err := ns.cache.GetOrCreate(key, func(string) (*resources.StaleValue[any], error) { f := metadecoders.FormatFromStrings(r.MediaType().Suffixes()...) if f == "" { return nil, fmt.Errorf("MIME %q not supported", r.MediaType()) @@ -88,8 +88,24 @@ func (ns *Namespace) Unmarshal(args ...any) (any, error) { return nil, err } - return decoder.Unmarshal(b, f) + v, err := decoder.Unmarshal(b, f) + if err != nil { + return nil, err + } + + return &resources.StaleValue[any]{ + Value: v, + IsStaleFunc: func() bool { + return resource.IsStaleAny(r) + }, + }, nil }) + if err != nil { + return nil, err + } + + return v.Value, nil + } dataStr, err := types.ToStringE(data) @@ -103,14 +119,29 @@ func (ns *Namespace) Unmarshal(args ...any) (any, error) { key := helpers.MD5String(dataStr) - return ns.cache.GetOrCreate(key, func() (any, error) { + v, err := ns.cache.GetOrCreate(key, func(string) (*resources.StaleValue[any], error) { f := decoder.FormatFromContentString(dataStr) if f == "" { return nil, errors.New("unknown format") } - return decoder.Unmarshal([]byte(dataStr), f) + v, err := decoder.Unmarshal([]byte(dataStr), f) + if err != nil { + return nil, err + } + + return &resources.StaleValue[any]{ + Value: v, + IsStaleFunc: func() bool { + return false + }, + }, nil }) + if err != nil { + return nil, err + } + + return v.Value, nil } func decodeDecoder(m map[string]any) (metadecoders.Decoder, error) { diff --git a/tpl/transform/unmarshal_test.go b/tpl/transform/unmarshal_test.go index 12774298a..1b976c449 100644 --- a/tpl/transform/unmarshal_test.go +++ b/tpl/transform/unmarshal_test.go @@ -14,6 +14,7 @@ package transform_test import ( + "context" "fmt" "math/rand" "strings" @@ -193,9 +194,11 @@ func BenchmarkUnmarshalString(b *testing.B) { jsons[i] = strings.Replace(testJSON, "ROOT_KEY", fmt.Sprintf("root%d", i), 1) } + ctx := context.Background() + b.ResetTimer() for i := 0; i < b.N; i++ { - result, err := ns.Unmarshal(jsons[rand.Intn(numJsons)]) + result, err := ns.Unmarshal(ctx, jsons[rand.Intn(numJsons)]) if err != nil { b.Fatal(err) } @@ -220,9 +223,11 @@ func BenchmarkUnmarshalResource(b *testing.B) { jsons[i] = testContentResource{key: key, content: strings.Replace(testJSON, "ROOT_KEY", key, 1), mime: media.Builtin.JSONType} } + ctx := context.Background() + b.ResetTimer() for i := 0; i < b.N; i++ { - result, err := ns.Unmarshal(jsons[rand.Intn(numJsons)]) + result, err := ns.Unmarshal(ctx, jsons[rand.Intn(numJsons)]) if err != nil { b.Fatal(err) } diff --git a/transform/livereloadinject/livereloadinject.go b/transform/livereloadinject/livereloadinject.go index 1e21a92e6..e88e3895b 100644 --- a/transform/livereloadinject/livereloadinject.go +++ b/transform/livereloadinject/livereloadinject.go @@ -36,7 +36,7 @@ var ( // New creates a function that can be used to inject a script tag for // the livereload JavaScript at the start of an HTML document's head. -func New(baseURL url.URL) transform.Transformer { +func New(baseURL *url.URL) transform.Transformer { return func(ft transform.FromTo) error { b := ft.From().Bytes() diff --git a/transform/livereloadinject/livereloadinject_test.go b/transform/livereloadinject/livereloadinject_test.go index dc8740208..d406b9c4d 100644 --- a/transform/livereloadinject/livereloadinject_test.go +++ b/transform/livereloadinject/livereloadinject_test.go @@ -37,7 +37,7 @@ func TestLiveReloadInject(t *testing.T) { out := new(bytes.Buffer) in := strings.NewReader(s) - tr := transform.New(New(*lrurl)) + tr := transform.New(New(lrurl)) tr.Apply(out, in) return out.String() @@ -134,7 +134,7 @@ func BenchmarkLiveReloadInject(b *testing.B) { if err != nil { b.Fatalf("Parsing test URL failed") } - tr := transform.New(New(*lrurl)) + tr := transform.New(New(lrurl)) b.ResetTimer() for i := 0; i < b.N; i++ { diff --git a/unused.sh b/unused.sh deleted file mode 100755 index 351892368..000000000 --- a/unused.sh +++ /dev/null @@ -1 +0,0 @@ -deadcode -test ./... | grep -v go_templ
\ No newline at end of file diff --git a/watcher/filenotify/poller_test.go b/watcher/filenotify/poller_test.go index 8dadec1af..9b52b9780 100644 --- a/watcher/filenotify/poller_test.go +++ b/watcher/filenotify/poller_test.go @@ -20,9 +20,8 @@ const ( ) var ( - isMacOs = runtime.GOOS == "darwin" - isWindows = runtime.GOOS == "windows" - isCI = htesting.IsCI() + isMacOs = runtime.GOOS == "darwin" + isCI = htesting.IsCI() ) func TestPollerAddRemove(t *testing.T) { |