summaryrefslogtreecommitdiffhomepage
path: root/cache
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <[email protected]>2018-11-23 09:16:42 +0100
committerBjørn Erik Pedersen <[email protected]>2018-11-23 21:09:00 +0100
commitf9b4eb4f3968d32f45e0168c854e6b0c7f3a90b0 (patch)
treebca87a80e39fe35b57fcb36b75fa34e6a1da677a /cache
parente82b2dc8c1628f2da33e5fb0bae1b03e0594ad2c (diff)
downloadhugo-f9b4eb4f3968d32f45e0168c854e6b0c7f3a90b0.tar.gz
hugo-f9b4eb4f3968d32f45e0168c854e6b0c7f3a90b0.zip
Handle themes in the new file cache (for images, assets)
In the newly consolidated file cache implementation, we forgot that we also look in the theme(s) for assets (SCSS transformations etc.), which is not good for Netlify and the demo sites. Fixes #5460
Diffstat (limited to 'cache')
-rw-r--r--cache/filecache/filecache.go25
-rw-r--r--cache/filecache/filecache_config.go37
-rw-r--r--cache/filecache/filecache_config_test.go61
-rw-r--r--cache/filecache/filecache_pruner_test.go116
-rw-r--r--cache/filecache/filecache_test.go28
5 files changed, 178 insertions, 89 deletions
diff --git a/cache/filecache/filecache.go b/cache/filecache/filecache.go
index 9f50ad6b6..d4e3f5d6a 100644
--- a/cache/filecache/filecache.go
+++ b/cache/filecache/filecache.go
@@ -17,6 +17,7 @@ import (
"bytes"
"io"
"io/ioutil"
+ "os"
"path/filepath"
"strings"
"sync"
@@ -26,8 +27,6 @@ import (
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugolib/paths"
-
"github.com/BurntSushi/locker"
"github.com/spf13/afero"
)
@@ -305,22 +304,28 @@ func (f Caches) Get(name string) *Cache {
return f[strings.ToLower(name)]
}
-// NewCachesFromPaths creates a new set of file caches from the given
+// NewCaches creates a new set of file caches from the given
// configuration.
-func NewCachesFromPaths(p *paths.Paths) (Caches, error) {
+func NewCaches(p *helpers.PathSpec) (Caches, error) {
dcfg, err := decodeConfig(p)
if err != nil {
return nil, err
}
- genDir := filepath.FromSlash("/_gen")
-
fs := p.Fs.Source
m := make(Caches)
for k, v := range dcfg {
+ var cfs afero.Fs
+
+ if v.isResourceDir {
+ cfs = p.BaseFs.Resources.Fs
+ } else {
+ cfs = fs
+ }
+
var baseDir string
- if !strings.Contains(v.Dir, genDir) {
+ if !strings.HasPrefix(v.Dir, "_gen") {
// We do cache eviction (file removes) and since the user can set
// his/hers own cache directory, we really want to make sure
// we do not delete any files that do not belong to this cache.
@@ -331,10 +336,12 @@ func NewCachesFromPaths(p *paths.Paths) (Caches, error) {
} else {
baseDir = filepath.Join(v.Dir, k)
}
- if err = fs.MkdirAll(baseDir, 0777); err != nil {
+ if err = cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
return nil, err
}
- bfs := afero.NewBasePathFs(fs, baseDir)
+
+ bfs := afero.NewBasePathFs(cfs, baseDir)
+
m[k] = NewCache(bfs, v.MaxAge)
}
diff --git a/cache/filecache/filecache_config.go b/cache/filecache/filecache_config.go
index bb2cc36e7..c451eaf85 100644
--- a/cache/filecache/filecache_config.go
+++ b/cache/filecache/filecache_config.go
@@ -20,7 +20,6 @@ import (
"time"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/bep/mapstructure"
"github.com/pkg/errors"
@@ -68,6 +67,10 @@ type cacheConfig struct {
// The directory where files are stored.
Dir string
+
+ // Will resources/_gen will get its own composite filesystem that
+ // also checks any theme.
+ isResourceDir bool
}
// GetJSONCache gets the file cache for getJSON.
@@ -90,7 +93,7 @@ func (f Caches) AssetsCache() *Cache {
return f[cacheKeyAssets]
}
-func decodeConfig(p *paths.Paths) (cachesConfig, error) {
+func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
c := make(cachesConfig)
valid := make(map[string]bool)
// Add defaults
@@ -145,10 +148,13 @@ func decodeConfig(p *paths.Paths) (cachesConfig, error) {
for i, part := range parts {
if strings.HasPrefix(part, ":") {
- resolved, err := resolveDirPlaceholder(p, part)
+ resolved, isResource, err := resolveDirPlaceholder(p, part)
if err != nil {
return c, err
}
+ if isResource {
+ v.isResourceDir = true
+ }
parts[i] = resolved
}
}
@@ -159,13 +165,15 @@ func decodeConfig(p *paths.Paths) (cachesConfig, error) {
}
v.Dir = filepath.Clean(filepath.FromSlash(dir))
- if isOsFs && !filepath.IsAbs(v.Dir) {
- return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
- }
+ if !v.isResourceDir {
+ if isOsFs && !filepath.IsAbs(v.Dir) {
+ return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
+ }
- // Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
- if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
- return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
+ // Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
+ if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
+ return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
+ }
}
if disabled {
@@ -179,15 +187,16 @@ func decodeConfig(p *paths.Paths) (cachesConfig, error) {
}
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
-func resolveDirPlaceholder(p *paths.Paths, placeholder string) (string, error) {
+func resolveDirPlaceholder(p *helpers.PathSpec, placeholder string) (cacheDir string, isResource bool, err error) {
switch strings.ToLower(placeholder) {
case ":resourcedir":
- return p.AbsResourcesDir, nil
+ return "", true, nil
case ":cachedir":
- return helpers.GetCacheDir(p.Fs.Source, p.Cfg)
+ d, err := helpers.GetCacheDir(p.Fs.Source, p.Cfg)
+ return d, false, err
case ":project":
- return filepath.Base(p.WorkingDir), nil
+ return filepath.Base(p.WorkingDir), false, nil
}
- return "", errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
+ return "", false, errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
}
diff --git a/cache/filecache/filecache_config_test.go b/cache/filecache/filecache_config_test.go
index 51126f080..b0f5d2dc0 100644
--- a/cache/filecache/filecache_config_test.go
+++ b/cache/filecache/filecache_config_test.go
@@ -20,9 +20,10 @@ import (
"testing"
"time"
+ "github.com/gohugoio/hugo/helpers"
+
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
@@ -35,6 +36,13 @@ func TestDecodeConfig(t *testing.T) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archetypeDir = "archetypes"
+
[caches]
[caches.getJSON]
maxAge = "10m"
@@ -50,7 +58,7 @@ dir = "/path/to/c3"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p)
@@ -75,6 +83,13 @@ func TestDecodeConfigIgnoreCache(t *testing.T) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
ignoreCache = true
[caches]
[caches.getJSON]
@@ -91,7 +106,7 @@ dir = "/path/to/c3"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p)
@@ -107,8 +122,7 @@ dir = "/path/to/c3"
func TestDecodeConfigDefault(t *testing.T) {
assert := require.New(t)
- cfg := viper.New()
- cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
+ cfg := newTestConfig()
if runtime.GOOS == "windows" {
cfg.Set("resourceDir", "c:\\cache\\resources")
@@ -120,7 +134,7 @@ func TestDecodeConfigDefault(t *testing.T) {
}
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p)
@@ -129,12 +143,18 @@ func TestDecodeConfigDefault(t *testing.T) {
assert.Equal(4, len(decoded))
+ imgConfig := decoded[cacheKeyImages]
+ jsonConfig := decoded[cacheKeyGetJSON]
+
if runtime.GOOS == "windows" {
- assert.Equal("c:\\cache\\resources\\_gen", decoded[cacheKeyImages].Dir)
+ assert.Equal("_gen", imgConfig.Dir)
} else {
- assert.Equal("/cache/resources/_gen", decoded[cacheKeyImages].Dir)
- assert.Equal("/cache/thecache/hugoproject", decoded[cacheKeyGetJSON].Dir)
+ assert.Equal("_gen", imgConfig.Dir)
+ assert.Equal("/cache/thecache/hugoproject", jsonConfig.Dir)
}
+
+ assert.True(imgConfig.isResourceDir)
+ assert.False(jsonConfig.isResourceDir)
}
func TestDecodeConfigInvalidDir(t *testing.T) {
@@ -144,6 +164,13 @@ func TestDecodeConfigInvalidDir(t *testing.T) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getJSON]
maxAge = "10m"
@@ -157,10 +184,24 @@ dir = "/"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
_, err = decodeConfig(p)
assert.Error(err)
}
+
+func newTestConfig() *viper.Viper {
+ cfg := viper.New()
+ cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
+ cfg.Set("contentDir", "content")
+ cfg.Set("dataDir", "data")
+ cfg.Set("resourceDir", "resources")
+ cfg.Set("i18nDir", "i18n")
+ cfg.Set("layoutDir", "layouts")
+ cfg.Set("archetypeDir", "archetypes")
+ cfg.Set("assetDir", "assets")
+
+ return cfg
+}
diff --git a/cache/filecache/filecache_pruner_test.go b/cache/filecache/filecache_pruner_test.go
index 64d10149d..e62a6315a 100644
--- a/cache/filecache/filecache_pruner_test.go
+++ b/cache/filecache/filecache_pruner_test.go
@@ -19,8 +19,8 @@ import (
"time"
"github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/stretchr/testify/require"
)
@@ -32,69 +32,87 @@ func TestPrune(t *testing.T) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getjson]
maxAge = "200ms"
dir = "/cache/c"
-
+[caches.getcsv]
+maxAge = "200ms"
+dir = "/cache/d"
+[caches.assets]
+maxAge = "200ms"
+dir = ":resourceDir/_gen"
+[caches.images]
+maxAge = "200ms"
+dir = ":resourceDir/_gen"
`
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
- fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
- assert.NoError(err)
-
- caches, err := NewCachesFromPaths(p)
- assert.NoError(err)
- jsonCache := caches.GetJSONCache()
- for i := 0; i < 10; i++ {
- id := fmt.Sprintf("i%d", i)
- jsonCache.GetOrCreateBytes(id, func() ([]byte, error) {
- return []byte("abc"), nil
- })
- if i == 4 {
- // This will expire the first 5
- time.Sleep(201 * time.Millisecond)
+ for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
+ msg := fmt.Sprintf("cache: %s", name)
+ fs := hugofs.NewMem(cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
+ assert.NoError(err)
+ caches, err := NewCaches(p)
+ assert.NoError(err)
+ cache := caches[name]
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ cache.GetOrCreateBytes(id, func() ([]byte, error) {
+ return []byte("abc"), nil
+ })
+ if i == 4 {
+ // This will expire the first 5
+ time.Sleep(201 * time.Millisecond)
+ }
}
- }
- count, err := caches.Prune()
- assert.NoError(err)
- assert.Equal(5, count)
-
- for i := 0; i < 10; i++ {
- id := fmt.Sprintf("i%d", i)
- v := jsonCache.getString(id)
- if i < 5 {
- assert.Equal("", v, id)
- } else {
- assert.Equal("abc", v, id)
+ count, err := caches.Prune()
+ assert.NoError(err)
+ assert.Equal(5, count, msg)
+
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ v := cache.getString(id)
+ if i < 5 {
+ assert.Equal("", v, id)
+ } else {
+ assert.Equal("abc", v, id)
+ }
}
- }
- caches, err = NewCachesFromPaths(p)
- assert.NoError(err)
- jsonCache = caches.GetJSONCache()
- // Touch one and then prune.
- jsonCache.GetOrCreateBytes("i5", func() ([]byte, error) {
- return []byte("abc"), nil
- })
+ caches, err = NewCaches(p)
+ assert.NoError(err)
+ cache = caches[name]
+ // Touch one and then prune.
+ cache.GetOrCreateBytes("i5", func() ([]byte, error) {
+ return []byte("abc"), nil
+ })
- count, err = caches.Prune()
- assert.NoError(err)
- assert.Equal(4, count)
-
- // Now only the i5 should be left.
- for i := 0; i < 10; i++ {
- id := fmt.Sprintf("i%d", i)
- v := jsonCache.getString(id)
- if i != 5 {
- assert.Equal("", v, id)
- } else {
- assert.Equal("abc", v, id)
+ count, err = caches.Prune()
+ assert.NoError(err)
+ assert.Equal(4, count)
+
+ // Now only the i5 should be left.
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ v := cache.getString(id)
+ if i != 5 {
+ assert.Equal("", v, id)
+ } else {
+ assert.Equal("abc", v, id)
+ }
}
+
}
}
diff --git a/cache/filecache/filecache_test.go b/cache/filecache/filecache_test.go
index 9696b6143..ff7b989f3 100644
--- a/cache/filecache/filecache_test.go
+++ b/cache/filecache/filecache_test.go
@@ -25,10 +25,10 @@ import (
"time"
"github.com/gohugoio/hugo/common/hugio"
-
"github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
@@ -44,6 +44,13 @@ func TestFileCache(t *testing.T) {
workingDir = "/my/work"
resourceDir = "resources"
cacheDir = "CACHEDIR"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getJSON]
maxAge = "10h"
@@ -56,10 +63,10 @@ dir = ":cacheDir/c"
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
- caches, err := NewCachesFromPaths(p)
+ caches, err := NewCaches(p)
assert.NoError(err)
c := caches.Get("GetJSON")
@@ -83,7 +90,7 @@ dir = ":cacheDir/c"
bfs, ok = c.Fs.(*afero.BasePathFs)
assert.True(ok)
filename, _ = bfs.RealPath("key")
- assert.Equal(filepath.FromSlash("/my/work/resources/_gen/images/key"), filename)
+ assert.Equal(filepath.FromSlash("_gen/images/key"), filename)
rf := func(s string) func() (io.ReadCloser, error) {
return func() (io.ReadCloser, error) {
@@ -160,6 +167,13 @@ func TestFileCacheConcurrent(t *testing.T) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getjson]
maxAge = "1s"
@@ -170,10 +184,10 @@ dir = "/cache/c"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
- caches, err := NewCachesFromPaths(p)
+ caches, err := NewCaches(p)
assert.NoError(err)
const cacheName = "getjson"