diff options
author | Bjørn Erik Pedersen <[email protected]> | 2019-01-02 11:58:32 +0100 |
---|---|---|
committer | Bjørn Erik Pedersen <[email protected]> | 2019-01-02 14:25:37 +0100 |
commit | ce8a09a4c0661dece931ab1173e4f09e8e04aa38 (patch) | |
tree | 870372211e342312d0ab3034ee03285c6f698f9d /resources | |
parent | 669ada436787311cc5d02dae5b88e60a09adda58 (diff) | |
download | hugo-ce8a09a4c0661dece931ab1173e4f09e8e04aa38.tar.gz hugo-ce8a09a4c0661dece931ab1173e4f09e8e04aa38.zip |
resources: Move resource interfaces into its own package
Diffstat (limited to 'resources')
29 files changed, 4786 insertions, 0 deletions
diff --git a/resources/image.go b/resources/image.go new file mode 100644 index 000000000..d46facac5 --- /dev/null +++ b/resources/image.go @@ -0,0 +1,603 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "errors" + "fmt" + "image" + "image/color" + "image/draw" + "image/jpeg" + "io" + "io/ioutil" + "os" + "strconv" + "strings" + "sync" + + "github.com/gohugoio/hugo/resources/resource" + + _errors "github.com/pkg/errors" + + "github.com/disintegration/imaging" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/helpers" + "github.com/mitchellh/mapstructure" + + // Blind import for image.Decode + _ "image/gif" + _ "image/png" + + // Blind import for image.Decode + _ "golang.org/x/image/webp" +) + +var ( + _ resource.Resource = (*Image)(nil) + _ resource.Source = (*Image)(nil) + _ resource.Cloner = (*Image)(nil) +) + +// Imaging contains default image processing configuration. This will be fetched +// from site (or language) config. +type Imaging struct { + // Default image quality setting (1-100). Only used for JPEG images. + Quality int + + // Resample filter used. See https://github.com/disintegration/imaging + ResampleFilter string + + // The anchor used in Fill. Default is "smart", i.e. Smart Crop. + Anchor string +} + +const ( + defaultJPEGQuality = 75 + defaultResampleFilter = "box" +) + +var ( + imageFormats = map[string]imaging.Format{ + ".jpg": imaging.JPEG, + ".jpeg": imaging.JPEG, + ".png": imaging.PNG, + ".tif": imaging.TIFF, + ".tiff": imaging.TIFF, + ".bmp": imaging.BMP, + ".gif": imaging.GIF, + } + + // Add or increment if changes to an image format's processing requires + // re-generation. + imageFormatsVersions = map[imaging.Format]int{ + imaging.PNG: 2, // Floyd Steinberg dithering + } + + // Increment to mark all processed images as stale. Only use when absolutely needed. + // See the finer grained smartCropVersionNumber and imageFormatsVersions. + mainImageVersionNumber = 0 +) + +var anchorPositions = map[string]imaging.Anchor{ + strings.ToLower("Center"): imaging.Center, + strings.ToLower("TopLeft"): imaging.TopLeft, + strings.ToLower("Top"): imaging.Top, + strings.ToLower("TopRight"): imaging.TopRight, + strings.ToLower("Left"): imaging.Left, + strings.ToLower("Right"): imaging.Right, + strings.ToLower("BottomLeft"): imaging.BottomLeft, + strings.ToLower("Bottom"): imaging.Bottom, + strings.ToLower("BottomRight"): imaging.BottomRight, +} + +var imageFilters = map[string]imaging.ResampleFilter{ + strings.ToLower("NearestNeighbor"): imaging.NearestNeighbor, + strings.ToLower("Box"): imaging.Box, + strings.ToLower("Linear"): imaging.Linear, + strings.ToLower("Hermite"): imaging.Hermite, + strings.ToLower("MitchellNetravali"): imaging.MitchellNetravali, + strings.ToLower("CatmullRom"): imaging.CatmullRom, + strings.ToLower("BSpline"): imaging.BSpline, + strings.ToLower("Gaussian"): imaging.Gaussian, + strings.ToLower("Lanczos"): imaging.Lanczos, + strings.ToLower("Hann"): imaging.Hann, + strings.ToLower("Hamming"): imaging.Hamming, + strings.ToLower("Blackman"): imaging.Blackman, + strings.ToLower("Bartlett"): imaging.Bartlett, + strings.ToLower("Welch"): imaging.Welch, + strings.ToLower("Cosine"): imaging.Cosine, +} + +// Image represents an image resource. +type Image struct { + config image.Config + configInit sync.Once + configLoaded bool + + copyToDestinationInit sync.Once + + imaging *Imaging + + format imaging.Format + + *genericResource +} + +// Width returns i's width. +func (i *Image) Width() int { + i.initConfig() + return i.config.Width +} + +// Height returns i's height. +func (i *Image) Height() int { + i.initConfig() + return i.config.Height +} + +// WithNewBase implements the Cloner interface. +func (i *Image) WithNewBase(base string) resource.Resource { + return &Image{ + imaging: i.imaging, + format: i.format, + genericResource: i.genericResource.WithNewBase(base).(*genericResource)} +} + +// Resize resizes the image to the specified width and height using the specified resampling +// filter and returns the transformed image. If one of width or height is 0, the image aspect +// ratio is preserved. +func (i *Image) Resize(spec string) (*Image, error) { + return i.doWithImageConfig("resize", spec, func(src image.Image, conf imageConfig) (image.Image, error) { + return imaging.Resize(src, conf.Width, conf.Height, conf.Filter), nil + }) +} + +// Fit scales down the image using the specified resample filter to fit the specified +// maximum width and height. +func (i *Image) Fit(spec string) (*Image, error) { + return i.doWithImageConfig("fit", spec, func(src image.Image, conf imageConfig) (image.Image, error) { + return imaging.Fit(src, conf.Width, conf.Height, conf.Filter), nil + }) +} + +// Fill scales the image to the smallest possible size that will cover the specified dimensions, +// crops the resized image to the specified dimensions using the given anchor point. +// Space delimited config: 200x300 TopLeft +func (i *Image) Fill(spec string) (*Image, error) { + return i.doWithImageConfig("fill", spec, func(src image.Image, conf imageConfig) (image.Image, error) { + if conf.AnchorStr == smartCropIdentifier { + return smartCrop(src, conf.Width, conf.Height, conf.Anchor, conf.Filter) + } + return imaging.Fill(src, conf.Width, conf.Height, conf.Anchor, conf.Filter), nil + }) +} + +// Holds configuration to create a new image from an existing one, resize etc. +type imageConfig struct { + Action string + + // Quality ranges from 1 to 100 inclusive, higher is better. + // This is only relevant for JPEG images. + // Default is 75. + Quality int + + // Rotate rotates an image by the given angle counter-clockwise. + // The rotation will be performed first. + Rotate int + + Width int + Height int + + Filter imaging.ResampleFilter + FilterStr string + + Anchor imaging.Anchor + AnchorStr string +} + +func (i *Image) isJPEG() bool { + name := strings.ToLower(i.relTargetDirFile.file) + return strings.HasSuffix(name, ".jpg") || strings.HasSuffix(name, ".jpeg") +} + +// Serialize image processing. The imaging library spins up its own set of Go routines, +// so there is not much to gain from adding more load to the mix. That +// can even have negative effect in low resource scenarios. +// Note that this only effects the non-cached scenario. Once the processed +// image is written to disk, everything is fast, fast fast. +const imageProcWorkers = 1 + +var imageProcSem = make(chan bool, imageProcWorkers) + +func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, conf imageConfig) (image.Image, error)) (*Image, error) { + conf, err := parseImageConfig(spec) + if err != nil { + return nil, err + } + conf.Action = action + + if conf.Quality <= 0 && i.isJPEG() { + // We need a quality setting for all JPEGs + conf.Quality = i.imaging.Quality + } + + if conf.FilterStr == "" { + conf.FilterStr = i.imaging.ResampleFilter + conf.Filter = imageFilters[conf.FilterStr] + } + + if conf.AnchorStr == "" { + conf.AnchorStr = i.imaging.Anchor + if !strings.EqualFold(conf.AnchorStr, smartCropIdentifier) { + conf.Anchor = anchorPositions[conf.AnchorStr] + } + } + + return i.spec.imageCache.getOrCreate(i, conf, func() (*Image, image.Image, error) { + imageProcSem <- true + defer func() { + <-imageProcSem + }() + + ci := i.clone() + + errOp := action + errPath := i.sourceFilename + + ci.setBasePath(conf) + + src, err := i.decodeSource() + if err != nil { + return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err} + } + + if conf.Rotate != 0 { + // Rotate it before any scaling to get the dimensions correct. + src = imaging.Rotate(src, float64(conf.Rotate), color.Transparent) + } + + converted, err := f(src, conf) + if err != nil { + return ci, nil, &os.PathError{Op: errOp, Path: errPath, Err: err} + } + + if i.format == imaging.PNG { + // Apply the colour palette from the source + if paletted, ok := src.(*image.Paletted); ok { + tmp := image.NewPaletted(converted.Bounds(), paletted.Palette) + draw.FloydSteinberg.Draw(tmp, tmp.Bounds(), converted, converted.Bounds().Min) + converted = tmp + } + } + + b := converted.Bounds() + ci.config = image.Config{Width: b.Max.X, Height: b.Max.Y} + ci.configLoaded = true + + return ci, converted, nil + }) + +} + +func (i imageConfig) key(format imaging.Format) string { + k := strconv.Itoa(i.Width) + "x" + strconv.Itoa(i.Height) + if i.Action != "" { + k += "_" + i.Action + } + if i.Quality > 0 { + k += "_q" + strconv.Itoa(i.Quality) + } + if i.Rotate != 0 { + k += "_r" + strconv.Itoa(i.Rotate) + } + anchor := i.AnchorStr + if anchor == smartCropIdentifier { + anchor = anchor + strconv.Itoa(smartCropVersionNumber) + } + + k += "_" + i.FilterStr + + if strings.EqualFold(i.Action, "fill") { + k += "_" + anchor + } + + if v, ok := imageFormatsVersions[format]; ok { + k += "_" + strconv.Itoa(v) + } + + if mainImageVersionNumber > 0 { + k += "_" + strconv.Itoa(mainImageVersionNumber) + } + + return k +} + +func newImageConfig(width, height, quality, rotate int, filter, anchor string) imageConfig { + var c imageConfig + + c.Width = width + c.Height = height + c.Quality = quality + c.Rotate = rotate + + if filter != "" { + filter = strings.ToLower(filter) + if v, ok := imageFilters[filter]; ok { + c.Filter = v + c.FilterStr = filter + } + } + + if anchor != "" { + anchor = strings.ToLower(anchor) + if v, ok := anchorPositions[anchor]; ok { + c.Anchor = v + c.AnchorStr = anchor + } + } + + return c +} + +func parseImageConfig(config string) (imageConfig, error) { + var ( + c imageConfig + err error + ) + + if config == "" { + return c, errors.New("image config cannot be empty") + } + + parts := strings.Fields(config) + for _, part := range parts { + part = strings.ToLower(part) + + if part == smartCropIdentifier { + c.AnchorStr = smartCropIdentifier + } else if pos, ok := anchorPositions[part]; ok { + c.Anchor = pos + c.AnchorStr = part + } else if filter, ok := imageFilters[part]; ok { + c.Filter = filter + c.FilterStr = part + } else if part[0] == 'q' { + c.Quality, err = strconv.Atoi(part[1:]) + if err != nil { + return c, err + } + if c.Quality < 1 || c.Quality > 100 { + return c, errors.New("quality ranges from 1 to 100 inclusive") + } + } else if part[0] == 'r' { + c.Rotate, err = strconv.Atoi(part[1:]) + if err != nil { + return c, err + } + } else if strings.Contains(part, "x") { + widthHeight := strings.Split(part, "x") + if len(widthHeight) <= 2 { + first := widthHeight[0] + if first != "" { + c.Width, err = strconv.Atoi(first) + if err != nil { + return c, err + } + } + + if len(widthHeight) == 2 { + second := widthHeight[1] + if second != "" { + c.Height, err = strconv.Atoi(second) + if err != nil { + return c, err + } + } + } + } else { + return c, errors.New("invalid image dimensions") + } + + } + } + + if c.Width == 0 && c.Height == 0 { + return c, errors.New("must provide Width or Height") + } + + return c, nil +} + +func (i *Image) initConfig() error { + var err error + i.configInit.Do(func() { + if i.configLoaded { + return + } + + var ( + f hugio.ReadSeekCloser + config image.Config + ) + + f, err = i.ReadSeekCloser() + if err != nil { + return + } + defer f.Close() + + config, _, err = image.DecodeConfig(f) + if err != nil { + return + } + i.config = config + }) + + if err != nil { + return _errors.Wrap(err, "failed to load image config") + } + + return nil +} + +func (i *Image) decodeSource() (image.Image, error) { + f, err := i.ReadSeekCloser() + if err != nil { + return nil, _errors.Wrap(err, "failed to open image for decode") + } + defer f.Close() + img, _, err := image.Decode(f) + return img, err +} + +func (i *Image) openDestinationsForWriting() (io.WriteCloser, error) { + targetFilenames := i.targetFilenames() + var changedFilenames []string + + // Fast path: + // This is a processed version of the original. + // If it exists on destination with the same filename and file size, it is + // the same file, so no need to transfer it again. + for _, targetFilename := range targetFilenames { + if fi, err := i.spec.BaseFs.PublishFs.Stat(targetFilename); err == nil && fi.Size() == i.osFileInfo.Size() { + continue + } + changedFilenames = append(changedFilenames, targetFilename) + } + + if len(changedFilenames) == 0 { + return struct { + io.Writer + io.Closer + }{ + ioutil.Discard, + ioutil.NopCloser(nil), + }, nil + + } + + return helpers.OpenFilesForWriting(i.spec.BaseFs.PublishFs, changedFilenames...) + +} + +func (i *Image) encodeTo(conf imageConfig, img image.Image, w io.Writer) error { + switch i.format { + case imaging.JPEG: + + var rgba *image.RGBA + quality := conf.Quality + + if nrgba, ok := img.(*image.NRGBA); ok { + if nrgba.Opaque() { + rgba = &image.RGBA{ + Pix: nrgba.Pix, + Stride: nrgba.Stride, + Rect: nrgba.Rect, + } + } + } + if rgba != nil { + return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality}) + } + return jpeg.Encode(w, img, &jpeg.Options{Quality: quality}) + default: + return imaging.Encode(w, img, i.format) + } +} + +func (i *Image) clone() *Image { + g := *i.genericResource + g.resourceContent = &resourceContent{} + + return &Image{ + imaging: i.imaging, + format: i.format, + genericResource: &g} +} + +func (i *Image) setBasePath(conf imageConfig) { + i.relTargetDirFile = i.relTargetPathFromConfig(conf) +} + +func (i *Image) relTargetPathFromConfig(conf imageConfig) dirFile { + p1, p2 := helpers.FileAndExt(i.relTargetDirFile.file) + + idStr := fmt.Sprintf("_hu%s_%d", i.hash, i.osFileInfo.Size()) + + // Do not change for no good reason. + const md5Threshold = 100 + + key := conf.key(i.format) + + // It is useful to have the key in clear text, but when nesting transforms, it + // can easily be too long to read, and maybe even too long + // for the different OSes to handle. + if len(p1)+len(idStr)+len(p2) > md5Threshold { + key = helpers.MD5String(p1 + key + p2) + huIdx := strings.Index(p1, "_hu") + if huIdx != -1 { + p1 = p1[:huIdx] + } else { + // This started out as a very long file name. Making it even longer + // could melt ice in the Arctic. + p1 = "" + } + } else if strings.Contains(p1, idStr) { + // On scaling an already scaled image, we get the file info from the original. + // Repeating the same info in the filename makes it stuttery for no good reason. + idStr = "" + } + + return dirFile{ + dir: i.relTargetDirFile.dir, + file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2), + } + +} + +func decodeImaging(m map[string]interface{}) (Imaging, error) { + var i Imaging + if err := mapstructure.WeakDecode(m, &i); err != nil { + return i, err + } + + if i.Quality == 0 { + i.Quality = defaultJPEGQuality + } else if i.Quality < 0 || i.Quality > 100 { + return i, errors.New("JPEG quality must be a number between 1 and 100") + } + + if i.Anchor == "" || strings.EqualFold(i.Anchor, smartCropIdentifier) { + i.Anchor = smartCropIdentifier + } else { + i.Anchor = strings.ToLower(i.Anchor) + if _, found := anchorPositions[i.Anchor]; !found { + return i, errors.New("invalid anchor value in imaging config") + } + } + + if i.ResampleFilter == "" { + i.ResampleFilter = defaultResampleFilter + } else { + filter := strings.ToLower(i.ResampleFilter) + _, found := imageFilters[filter] + if !found { + return i, fmt.Errorf("%q is not a valid resample filter", filter) + } + i.ResampleFilter = filter + } + + return i, nil +} diff --git a/resources/image_cache.go b/resources/image_cache.go new file mode 100644 index 000000000..58be839b3 --- /dev/null +++ b/resources/image_cache.go @@ -0,0 +1,164 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "fmt" + "image" + "io" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/gohugoio/hugo/common/hugio" + + "github.com/gohugoio/hugo/cache/filecache" + "github.com/gohugoio/hugo/helpers" +) + +type imageCache struct { + pathSpec *helpers.PathSpec + + fileCache *filecache.Cache + + mu sync.RWMutex + store map[string]*Image +} + +func (c *imageCache) isInCache(key string) bool { + c.mu.RLock() + _, found := c.store[c.normalizeKey(key)] + c.mu.RUnlock() + return found +} + +func (c *imageCache) deleteByPrefix(prefix string) { + c.mu.Lock() + defer c.mu.Unlock() + prefix = c.normalizeKey(prefix) + for k := range c.store { + if strings.HasPrefix(k, prefix) { + delete(c.store, k) + } + } +} + +func (c *imageCache) normalizeKey(key string) string { + // It is a path with Unix style slashes and it always starts with a leading slash. + key = filepath.ToSlash(key) + if !strings.HasPrefix(key, "/") { + key = "/" + key + } + + return key +} + +func (c *imageCache) clear() { + c.mu.Lock() + defer c.mu.Unlock() + c.store = make(map[string]*Image) +} + +func (c *imageCache) getOrCreate( + parent *Image, conf imageConfig, createImage func() (*Image, image.Image, error)) (*Image, error) { + + relTarget := parent.relTargetPathFromConfig(conf) + key := parent.relTargetPathForRel(relTarget.path(), false, false, false) + + // First check the in-memory store, then the disk. + c.mu.RLock() + img, found := c.store[key] + c.mu.RUnlock() + + if found { + return img, nil + } + + // These funcs are protected by a named lock. + // read clones the parent to its new name and copies + // the content to the destinations. + read := func(info filecache.ItemInfo, r io.Reader) error { + img = parent.clone() + img.relTargetDirFile.file = relTarget.file + img.sourceFilename = info.Name + + w, err := img.openDestinationsForWriting() + if err != nil { + return err + } + + defer w.Close() + _, err = io.Copy(w, r) + return err + } + + // create creates the image and encodes it to w (cache) and to its destinations. + create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) { + var conv image.Image + img, conv, err = createImage() + if err != nil { + w.Close() + return + } + img.relTargetDirFile.file = relTarget.file + img.sourceFilename = info.Name + + destinations, err := img.openDestinationsForWriting() + if err != nil { + w.Close() + return err + } + + mw := hugio.NewMultiWriteCloser(w, destinations) + defer mw.Close() + + return img.encodeTo(conf, conv, mw) + } + + // Now look in the file cache. + + // The definition of this counter is not that we have processed that amount + // (e.g. resized etc.), it can be fetched from file cache, + // but the count of processed image variations for this site. + c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages) + + _, err := c.fileCache.ReadOrCreate(key, read, create) + if err != nil { + return nil, err + } + + // The file is now stored in this cache. + img.overriddenSourceFs = c.fileCache.Fs + + c.mu.Lock() + if img2, found := c.store[key]; found { + c.mu.Unlock() + return img2, nil + } + c.store[key] = img + c.mu.Unlock() + + return img, nil + +} + +func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *imageCache { + return &imageCache{fileCache: fileCache, pathSpec: ps, store: make(map[string]*Image)} +} + +func timeTrack(start time.Time, name string) { + elapsed := time.Since(start) + fmt.Printf("%s took %s\n", name, elapsed) +} diff --git a/resources/image_test.go b/resources/image_test.go new file mode 100644 index 000000000..ffa482296 --- /dev/null +++ b/resources/image_test.go @@ -0,0 +1,356 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "fmt" + "math/rand" + "path/filepath" + "strconv" + "testing" + + "github.com/disintegration/imaging" + + "sync" + + "github.com/stretchr/testify/require" +) + +func TestParseImageConfig(t *testing.T) { + for i, this := range []struct { + in string + expect interface{} + }{ + {"300x400", newImageConfig(300, 400, 0, 0, "", "")}, + {"100x200 bottomRight", newImageConfig(100, 200, 0, 0, "", "BottomRight")}, + {"10x20 topleft Lanczos", newImageConfig(10, 20, 0, 0, "Lanczos", "topleft")}, + {"linear left 10x r180", newImageConfig(10, 0, 0, 180, "linear", "left")}, + {"x20 riGht Cosine q95", newImageConfig(0, 20, 95, 0, "cosine", "right")}, + + {"", false}, + {"foo", false}, + } { + result, err := parseImageConfig(this.in) + if b, ok := this.expect.(bool); ok && !b { + if err == nil { + t.Errorf("[%d] parseImageConfig didn't return an expected error", i) + } + } else { + if err != nil { + t.Fatalf("[%d] err: %s", i, err) + } + if fmt.Sprint(result) != fmt.Sprint(this.expect) { + t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, this.expect) + } + } + } +} + +func TestImageTransformBasic(t *testing.T) { + + assert := require.New(t) + + image := fetchSunset(assert) + fileCache := image.spec.FileCaches.ImageCache().Fs + + assert.Equal("/a/sunset.jpg", image.RelPermalink()) + assert.Equal("image", image.ResourceType()) + + resized, err := image.Resize("300x200") + assert.NoError(err) + assert.True(image != resized) + assert.True(image.genericResource != resized.genericResource) + assert.True(image.sourceFilename != resized.sourceFilename) + + resized0x, err := image.Resize("x200") + assert.NoError(err) + assert.Equal(320, resized0x.Width()) + assert.Equal(200, resized0x.Height()) + + assertFileCache(assert, fileCache, resized0x.RelPermalink(), 320, 200) + + resizedx0, err := image.Resize("200x") + assert.NoError(err) + assert.Equal(200, resizedx0.Width()) + assert.Equal(125, resizedx0.Height()) + assertFileCache(assert, fileCache, resizedx0.RelPermalink(), 200, 125) + + resizedAndRotated, err := image.Resize("x200 r90") + assert.NoError(err) + assert.Equal(125, resizedAndRotated.Width()) + assert.Equal(200, resizedAndRotated.Height()) + assertFileCache(assert, fileCache, resizedAndRotated.RelPermalink(), 125, 200) + + assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg", resized.RelPermalink()) + assert.Equal(300, resized.Width()) + assert.Equal(200, resized.Height()) + + fitted, err := resized.Fit("50x50") + assert.NoError(err) + assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_625708021e2bb281c9f1002f88e4753f.jpg", fitted.RelPermalink()) + assert.Equal(50, fitted.Width()) + assert.Equal(33, fitted.Height()) + + // Check the MD5 key threshold + fittedAgain, _ := fitted.Fit("10x20") + fittedAgain, err = fittedAgain.Fit("10x20") + assert.NoError(err) + assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f65ba24dc2b7fba0f56d7f104519157.jpg", fittedAgain.RelPermalink()) + assert.Equal(10, fittedAgain.Width()) + assert.Equal(6, fittedAgain.Height()) + + filled, err := image.Fill("200x100 bottomLeft") + assert.NoError(err) + assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg", filled.RelPermalink()) + assert.Equal(200, filled.Width()) + assert.Equal(100, filled.Height()) + assertFileCache(assert, fileCache, filled.RelPermalink(), 200, 100) + + smart, err := image.Fill("200x100 smart") + assert.NoError(err) + assert.Equal(fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", smartCropVersionNumber), smart.RelPermalink()) + assert.Equal(200, smart.Width()) + assert.Equal(100, smart.Height()) + assertFileCache(assert, fileCache, smart.RelPermalink(), 200, 100) + + // Check cache + filledAgain, err := image.Fill("200x100 bottomLeft") + assert.NoError(err) + assert.True(filled == filledAgain) + assert.True(filled.sourceFilename == filledAgain.sourceFilename) + assertFileCache(assert, fileCache, filledAgain.RelPermalink(), 200, 100) + +} + +// https://github.com/gohugoio/hugo/issues/4261 +func TestImageTransformLongFilename(t *testing.T) { + assert := require.New(t) + + image := fetchImage(assert, "1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg") + assert.NotNil(image) + + resized, err := image.Resize("200x") + assert.NoError(err) + assert.NotNil(resized) + assert.Equal(200, resized.Width()) + assert.Equal("/a/_hu59e56ffff1bc1d8d122b1403d34e039f_90587_65b757a6e14debeae720fe8831f0a9bc.jpg", resized.RelPermalink()) + resized, err = resized.Resize("100x") + assert.NoError(err) + assert.NotNil(resized) + assert.Equal(100, resized.Width()) + assert.Equal("/a/_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c876768085288f41211f768147ba2647.jpg", resized.RelPermalink()) +} + +func TestImageTransformConcurrent(t *testing.T) { + + var wg sync.WaitGroup + + assert := require.New(t) + + spec := newTestResourceOsFs(assert) + + image := fetchImageForSpec(spec, assert, "sunset.jpg") + + for i := 0; i < 4; i++ { + wg.Add(1) + go func(id int) { + defer wg.Done() + for j := 0; j < 5; j++ { + img := image + for k := 0; k < 2; k++ { + r1, err := img.Resize(fmt.Sprintf("%dx", id-k)) + if err != nil { + t.Fatal(err) + } + + if r1.Width() != id-k { + t.Fatalf("Width: %d:%d", r1.Width(), j) + } + + r2, err := r1.Resize(fmt.Sprintf("%dx", id-k-1)) + if err != nil { + t.Fatal(err) + } + + _, err = r2.decodeSource() + if err != nil { + t.Fatal("Err decode:", err) + } + + img = r1 + } + } + }(i + 20) + } + + wg.Wait() +} + +func TestDecodeImaging(t *testing.T) { + assert := require.New(t) + m := map[string]interface{}{ + "quality": 42, + "resampleFilter": "NearestNeighbor", + "anchor": "topLeft", + } + + imaging, err := decodeImaging(m) + + assert.NoError(err) + assert.Equal(42, imaging.Quality) + assert.Equal("nearestneighbor", imaging.ResampleFilter) + assert.Equal("topleft", imaging.Anchor) + + m = map[string]interface{}{} + + imaging, err = decodeImaging(m) + assert.NoError(err) + assert.Equal(defaultJPEGQuality, imaging.Quality) + assert.Equal("box", imaging.ResampleFilter) + assert.Equal("smart", imaging.Anchor) + + _, err = decodeImaging(map[string]interface{}{ + "quality": 123, + }) + assert.Error(err) + + _, err = decodeImaging(map[string]interface{}{ + "resampleFilter": "asdf", + }) + assert.Error(err) + + _, err = decodeImaging(map[string]interface{}{ + "anchor": "asdf", + }) + assert.Error(err) + + imaging, err = decodeImaging(map[string]interface{}{ + "anchor": "Smart", + }) + assert.NoError(err) + assert.Equal("smart", imaging.Anchor) + +} + +func TestImageWithMetadata(t *testing.T) { + assert := require.New(t) + + image := fetchSunset(assert) + + var meta = []map[string]interface{}{ + { + "title": "My Sunset", + "name": "Sunset #:counter", + "src": "*.jpg", + }, + } + + assert.NoError(AssignMetadata(meta, image)) + assert.Equal("Sunset #1", image.Name()) + + resized, err := image.Resize("200x") + assert.NoError(err) + assert.Equal("Sunset #1", resized.Name()) + +} + +func TestImageResize8BitPNG(t *testing.T) { + + assert := require.New(t) + + image := fetchImage(assert, "gohugoio.png") + + assert.Equal(imaging.PNG, image.format) + assert.Equal("/a/gohugoio.png", image.RelPermalink()) + assert.Equal("image", image.ResourceType()) + + resized, err := image.Resize("800x") + assert.NoError(err) + assert.Equal(imaging.PNG, resized.format) + assert.Equal("/a/gohugoio_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_800x0_resize_linear_2.png", resized.RelPermalink()) + assert.Equal(800, resized.Width()) + +} + +func TestImageResizeInSubPath(t *testing.T) { + + assert := require.New(t) + + image := fetchImage(assert, "sub/gohugoio2.png") + fileCache := image.spec.FileCaches.ImageCache().Fs + + assert.Equal(imaging.PNG, image.format) + assert.Equal("/a/sub/gohugoio2.png", image.RelPermalink()) + assert.Equal("image", image.ResourceType()) + + resized, err := image.Resize("101x101") + assert.NoError(err) + assert.Equal(imaging.PNG, resized.format) + assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resized.RelPermalink()) + assert.Equal(101, resized.Width()) + + assertFileCache(assert, fileCache, resized.RelPermalink(), 101, 101) + publishedImageFilename := filepath.Clean(resized.RelPermalink()) + assertImageFile(assert, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101) + assert.NoError(image.spec.BaseFs.PublishFs.Remove(publishedImageFilename)) + + // Cleare mem cache to simulate reading from the file cache. + resized.spec.imageCache.clear() + + resizedAgain, err := image.Resize("101x101") + assert.NoError(err) + assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resizedAgain.RelPermalink()) + assert.Equal(101, resizedAgain.Width()) + assertFileCache(assert, fileCache, resizedAgain.RelPermalink(), 101, 101) + assertImageFile(assert, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101) + +} + +func TestSVGImage(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + svg := fetchResourceForSpec(spec, assert, "circle.svg") + assert.NotNil(svg) +} + +func TestSVGImageContent(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + svg := fetchResourceForSpec(spec, assert, "circle.svg") + assert.NotNil(svg) + + content, err := svg.Content() + assert.NoError(err) + assert.IsType("", content) + assert.Contains(content.(string), `<svg height="100" width="100">`) +} + +func BenchmarkResizeParallel(b *testing.B) { + assert := require.New(b) + img := fetchSunset(assert) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + w := rand.Intn(10) + 10 + resized, err := img.Resize(strconv.Itoa(w) + "x") + if err != nil { + b.Fatal(err) + } + _, err = resized.Resize(strconv.Itoa(w-1) + "x") + if err != nil { + b.Fatal(err) + } + } + }) +} diff --git a/resources/internal/glob.go b/resources/internal/glob.go new file mode 100644 index 000000000..a87a23f13 --- /dev/null +++ b/resources/internal/glob.go @@ -0,0 +1,48 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "strings" + "sync" + + "github.com/gobwas/glob" +) + +var ( + globCache = make(map[string]glob.Glob) + globMu sync.RWMutex +) + +func GetGlob(pattern string) (glob.Glob, error) { + var g glob.Glob + + globMu.RLock() + g, found := globCache[pattern] + globMu.RUnlock() + if !found { + var err error + g, err = glob.Compile(strings.ToLower(pattern), '/') + if err != nil { + return nil, err + } + + globMu.Lock() + globCache[pattern] = g + globMu.Unlock() + } + + return g, nil + +} diff --git a/resources/resource.go b/resources/resource.go new file mode 100644 index 000000000..742903e80 --- /dev/null +++ b/resources/resource.go @@ -0,0 +1,741 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "fmt" + "io" + "io/ioutil" + "mime" + "os" + "path" + "path/filepath" + "strings" + "sync" + + "github.com/gohugoio/hugo/media" + + "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/tpl" + "github.com/pkg/errors" + + "github.com/gohugoio/hugo/cache/filecache" + "github.com/gohugoio/hugo/common/collections" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/resources/resource" + + "github.com/spf13/afero" + + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/source" +) + +var ( + _ resource.ContentResource = (*genericResource)(nil) + _ resource.ReadSeekCloserResource = (*genericResource)(nil) + _ resource.Resource = (*genericResource)(nil) + _ resource.Source = (*genericResource)(nil) + _ resource.Cloner = (*genericResource)(nil) + _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil) + _ permalinker = (*genericResource)(nil) + _ collections.Slicer = (*genericResource)(nil) + _ resource.Identifier = (*genericResource)(nil) +) + +var noData = make(map[string]interface{}) + +type permalinker interface { + relPermalinkFor(target string) string + permalinkFor(target string) string + relTargetPathsFor(target string) []string + relTargetPaths() []string + targetPath() string +} + +type Spec struct { + *helpers.PathSpec + + MediaTypes media.Types + OutputFormats output.Formats + + Logger *loggers.Logger + + TextTemplates tpl.TemplateParseFinder + + // Holds default filter settings etc. + imaging *Imaging + + imageCache *imageCache + ResourceCache *ResourceCache + FileCaches filecache.Caches +} + +func NewSpec( + s *helpers.PathSpec, + fileCaches filecache.Caches, + logger *loggers.Logger, + outputFormats output.Formats, + mimeTypes media.Types) (*Spec, error) { + + imaging, err := decodeImaging(s.Cfg.GetStringMap("imaging")) + if err != nil { + return nil, err + } + + if logger == nil { + logger = loggers.NewErrorLogger() + } + + rs := &Spec{PathSpec: s, + Logger: logger, + imaging: &imaging, + MediaTypes: mimeTypes, + OutputFormats: outputFormats, + FileCaches: fileCaches, + imageCache: newImageCache( + fileCaches.ImageCache(), + + s, + )} + + rs.ResourceCache = newResourceCache(rs) + + return rs, nil + +} + +type ResourceSourceDescriptor struct { + // TargetPathBuilder is a callback to create target paths's relative to its owner. + TargetPathBuilder func(base string) string + + // Need one of these to load the resource content. + SourceFile source.File + OpenReadSeekCloser resource.OpenReadSeekCloser + + // If OpenReadSeekerCloser is not set, we use this to open the file. + SourceFilename string + + // The relative target filename without any language code. + RelTargetFilename string + + // Any base path prepeneded to the permalink. + // Typically the language code if this resource should be published to its sub-folder. + URLBase string + + // Any base paths prepended to the target path. This will also typically be the + // language code, but setting it here means that it should not have any effect on + // the permalink. + // This may be several values. In multihost mode we may publish the same resources to + // multiple targets. + TargetBasePaths []string + + // Delay publishing until either Permalink or RelPermalink is called. Maybe never. + LazyPublish bool +} + +func (r ResourceSourceDescriptor) Filename() string { + if r.SourceFile != nil { + return r.SourceFile.Filename() + } + return r.SourceFilename +} + +func (r *Spec) sourceFs() afero.Fs { + return r.PathSpec.BaseFs.Content.Fs +} + +func (r *Spec) New(fd ResourceSourceDescriptor) (resource.Resource, error) { + return r.newResourceForFs(r.sourceFs(), fd) +} + +func (r *Spec) NewForFs(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) { + return r.newResourceForFs(sourceFs, fd) +} + +func (r *Spec) newResourceForFs(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) { + if fd.OpenReadSeekCloser == nil { + if fd.SourceFile != nil && fd.SourceFilename != "" { + return nil, errors.New("both SourceFile and AbsSourceFilename provided") + } else if fd.SourceFile == nil && fd.SourceFilename == "" { + return nil, errors.New("either SourceFile or AbsSourceFilename must be provided") + } + } + + if fd.RelTargetFilename == "" { + fd.RelTargetFilename = fd.Filename() + } + + if len(fd.TargetBasePaths) == 0 { + // If not set, we publish the same resource to all hosts. + fd.TargetBasePaths = r.MultihostTargetBasePaths + } + + return r.newResource(sourceFs, fd) +} + +func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) { + var fi os.FileInfo + var sourceFilename string + + if fd.OpenReadSeekCloser != nil { + + } else if fd.SourceFilename != "" { + var err error + fi, err = sourceFs.Stat(fd.SourceFilename) + if err != nil { + return nil, err + } + sourceFilename = fd.SourceFilename + } else { + fi = fd.SourceFile.FileInfo() + sourceFilename = fd.SourceFile.Filename() + } + + if fd.RelTargetFilename == "" { + fd.RelTargetFilename = sourceFilename + } + + ext := filepath.Ext(fd.RelTargetFilename) + mimeType, found := r.MediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, ".")) + // TODO(bep) we need to handle these ambigous types better, but in this context + // we most likely want the application/xml type. + if mimeType.Suffix() == "xml" && mimeType.SubType == "rss" { + mimeType, found = r.MediaTypes.GetByType("application/xml") + } + + if !found { + mimeStr := mime.TypeByExtension(ext) + if mimeStr != "" { + mimeType, _ = media.FromStringAndExt(mimeStr, ext) + } + } + + gr := r.newGenericResourceWithBase( + sourceFs, + fd.LazyPublish, + fd.OpenReadSeekCloser, + fd.URLBase, + fd.TargetBasePaths, + fd.TargetPathBuilder, + fi, + sourceFilename, + fd.RelTargetFilename, + mimeType) + + if mimeType.MainType == "image" { + ext := strings.ToLower(helpers.Ext(sourceFilename)) + + imgFormat, ok := imageFormats[ext] + if !ok { + // This allows SVG etc. to be used as resources. They will not have the methods of the Image, but + // that would not (currently) have worked. + return gr, nil + } + + if err := gr.initHash(); err != nil { + return nil, err + } + + return &Image{ + format: imgFormat, + imaging: r.imaging, + genericResource: gr}, nil + } + return gr, nil + +} + +// TODO(bep) unify +func (r *Spec) IsInImageCache(key string) bool { + // This is used for cache pruning. We currently only have images, but we could + // imagine expanding on this. + return r.imageCache.isInCache(key) +} + +func (r *Spec) DeleteCacheByPrefix(prefix string) { + r.imageCache.deleteByPrefix(prefix) +} + +func (r *Spec) ClearCaches() { + r.imageCache.clear() + r.ResourceCache.clear() +} + +func (r *Spec) CacheStats() string { + r.imageCache.mu.RLock() + defer r.imageCache.mu.RUnlock() + + s := fmt.Sprintf("Cache entries: %d", len(r.imageCache.store)) + + count := 0 + for k := range r.imageCache.store { + if count > 5 { + break + } + s += "\n" + k + count++ + } + + return s +} + +type dirFile struct { + // This is the directory component with Unix-style slashes. + dir string + // This is the file component. + file string +} + +func (d dirFile) path() string { + return path.Join(d.dir, d.file) +} + +type resourcePathDescriptor struct { + // The relative target directory and filename. + relTargetDirFile dirFile + + // Callback used to construct a target path relative to its owner. + targetPathBuilder func(rel string) string + + // baseURLDir is the fixed sub-folder for a resource in permalinks. This will typically + // be the language code if we publish to the language's sub-folder. + baseURLDir string + + // This will normally be the same as above, but this will only apply to publishing + // of resources. It may be mulltiple values when in multihost mode. + baseTargetPathDirs []string + + // baseOffset is set when the output format's path has a offset, e.g. for AMP. + baseOffset string +} + +type resourceContent struct { + content string + contentInit sync.Once +} + +type resourceHash struct { + hash string + hashInit sync.Once +} + +type publishOnce struct { + publisherInit sync.Once + publisherErr error + logger *loggers.Logger +} + +func (l *publishOnce) publish(s resource.Source) error { + l.publisherInit.Do(func() { + l.publisherErr = s.Publish() + if l.publisherErr != nil { + l.logger.ERROR.Printf("failed to publish Resource: %s", l.publisherErr) + } + }) + return l.publisherErr +} + +// genericResource represents a generic linkable resource. +type genericResource struct { + commonResource + resourcePathDescriptor + + title string + name string + params map[string]interface{} + + // Absolute filename to the source, including any content folder path. + // Note that this is absolute in relation to the filesystem it is stored in. + // It can be a base path filesystem, and then this filename will not match + // the path to the file on the real filesystem. + sourceFilename string + + // Will be set if this resource is backed by something other than a file. + openReadSeekerCloser resource.OpenReadSeekCloser + + // A hash of the source content. Is only calculated in caching situations. + *resourceHash + + // This may be set to tell us to look in another filesystem for this resource. + // We, by default, use the sourceFs filesystem in the spec below. + overriddenSourceFs afero.Fs + + spec *Spec + + resourceType string + mediaType media.Type + + osFileInfo os.FileInfo + + // We create copies of this struct, so this needs to be a pointer. + *resourceContent + + // May be set to signal lazy/delayed publishing. + *publishOnce +} + +type commonResource struct { +} + +func (l *genericResource) Data() interface{} { + return noData +} + +func (l *genericResource) Content() (interface{}, error) { + if err := l.initContent(); err != nil { + return nil, err + } + + return l.content, nil +} + +func (l *genericResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) { + if l.openReadSeekerCloser != nil { + return l.openReadSeekerCloser() + } + f, err := l.sourceFs().Open(l.sourceFilename) + if err != nil { + return nil, err + } + return f, nil + +} + +func (l *genericResource) MediaType() media.Type { + return l.mediaType +} + +// Implement the Cloner interface. +func (l genericResource) WithNewBase(base string) resource.Resource { + l.baseOffset = base + l.resourceContent = &resourceContent{} + return &l +} + +// Slice is not meant to be used externally. It's a bridge function +// for the template functions. See collections.Slice. +func (commonResource) Slice(in interface{}) (interface{}, error) { + switch items := in.(type) { + case resource.Resources: + return items, nil + case []interface{}: + groups := make(resource.Resources, len(items)) + for i, v := range items { + g, ok := v.(resource.Resource) + if !ok { + return nil, fmt.Errorf("type %T is not a Resource", v) + } + groups[i] = g + } + return groups, nil + default: + return nil, fmt.Errorf("invalid slice type %T", items) + } +} + +func (l *genericResource) initHash() error { + var err error + l.hashInit.Do(func() { + var hash string + var f hugio.ReadSeekCloser + f, err = l.ReadSeekCloser() + if err != nil { + err = errors.Wrap(err, "failed to open source file") + return + } + defer f.Close() + + hash, err = helpers.MD5FromFileFast(f) + if err != nil { + return + } + l.hash = hash + + }) + + return err +} + +func (l *genericResource) initContent() error { + var err error + l.contentInit.Do(func() { + var r hugio.ReadSeekCloser + r, err = l.ReadSeekCloser() + if err != nil { + return + } + defer r.Close() + + var b []byte + b, err = ioutil.ReadAll(r) + if err != nil { + return + } + + l.content = string(b) + + }) + + return err +} + +func (l *genericResource) sourceFs() afero.Fs { + if l.overriddenSourceFs != nil { + return l.overriddenSourceFs + } + return l.spec.sourceFs() +} + +func (l *genericResource) publishIfNeeded() { + if l.publishOnce != nil { + l.publishOnce.publish(l) + } +} + +func (l *genericResource) Permalink() string { + l.publishIfNeeded() + return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetDirFile.path(), true), l.spec.BaseURL.HostURL()) +} + +func (l *genericResource) RelPermalink() string { + l.publishIfNeeded() + return l.relPermalinkFor(l.relTargetDirFile.path()) +} + +func (l *genericResource) Key() string { + return l.relTargetDirFile.path() +} + +func (l *genericResource) relPermalinkFor(target string) string { + return l.relPermalinkForRel(target, false) + +} +func (l *genericResource) permalinkFor(target string) string { + return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(target, true), l.spec.BaseURL.HostURL()) + +} +func (l *genericResource) relTargetPathsFor(target string) []string { + return l.relTargetPathsForRel(target) +} + +func (l *genericResource) relTargetPaths() []string { + return l.relTargetPathsForRel(l.targetPath()) +} + +func (l *genericResource) Name() string { + return l.name +} + +func (l *genericResource) Title() string { + return l.title +} + +func (l *genericResource) Params() map[string]interface{} { + return l.params +} + +func (l *genericResource) setTitle(title string) { + l.title = title +} + +func (l *genericResource) setName(name string) { + l.name = name +} + +func (l *genericResource) updateParams(params map[string]interface{}) { + if l.params == nil { + l.params = params + return + } + + // Sets the params not already set + for k, v := range params { + if _, found := l.params[k]; !found { + l.params[k] = v + } + } +} + +func (l *genericResource) relPermalinkForRel(rel string, isAbs bool) string { + return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, false, isAbs, true)) +} + +func (l *genericResource) relTargetPathsForRel(rel string) []string { + if len(l.baseTargetPathDirs) == 0 { + return []string{l.relTargetPathForRelAndBasePath(rel, "", false, false)} + } + + var targetPaths = make([]string, len(l.baseTargetPathDirs)) + for i, dir := range l.baseTargetPathDirs { + targetPaths[i] = l.relTargetPathForRelAndBasePath(rel, dir, false, false) + } + return targetPaths +} + +func (l *genericResource) relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string { + if addBaseTargetPath && len(l.baseTargetPathDirs) > 1 { + panic("multiple baseTargetPathDirs") + } + var basePath string + if addBaseTargetPath && len(l.baseTargetPathDirs) > 0 { + basePath = l.baseTargetPathDirs[0] + } + + return l.relTargetPathForRelAndBasePath(rel, basePath, isAbs, isURL) +} + +func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string { + if l.targetPathBuilder != nil { + rel = l.targetPathBuilder(rel) + } + + if isURL && l.baseURLDir != "" { + rel = path.Join(l.baseURLDir, rel) + } + + if basePath != "" { + rel = path.Join(basePath, rel) + } + + if l.baseOffset != "" { + rel = path.Join(l.baseOffset, rel) + } + + if isURL { + bp := l.spec.PathSpec.GetBasePath(!isAbs) + if bp != "" { + rel = path.Join(bp, rel) + } + } + + if len(rel) == 0 || rel[0] != '/' { + rel = "/" + rel + } + + return rel +} + +func (l *genericResource) ResourceType() string { + return l.resourceType +} + +func (l *genericResource) String() string { + return fmt.Sprintf("Resource(%s: %s)", l.resourceType, l.name) +} + +func (l *genericResource) Publish() error { + fr, err := l.ReadSeekCloser() + if err != nil { + return err + } + defer fr.Close() + fw, err := helpers.OpenFilesForWriting(l.spec.BaseFs.PublishFs, l.targetFilenames()...) + if err != nil { + return err + } + defer fw.Close() + + _, err = io.Copy(fw, fr) + return err +} + +// Path is stored with Unix style slashes. +func (l *genericResource) targetPath() string { + return l.relTargetDirFile.path() +} + +func (l *genericResource) targetFilenames() []string { + paths := l.relTargetPaths() + for i, p := range paths { + paths[i] = filepath.Clean(p) + } + return paths +} + +// TODO(bep) clean up below +func (r *Spec) newGenericResource(sourceFs afero.Fs, + targetPathBuilder func(base string) string, + osFileInfo os.FileInfo, + sourceFilename, + baseFilename string, + mediaType media.Type) *genericResource { + return r.newGenericResourceWithBase( + sourceFs, + false, + nil, + "", + nil, + targetPathBuilder, + osFileInfo, + sourceFilename, + baseFilename, + mediaType, + ) + +} + +func (r *Spec) newGenericResourceWithBase( + sourceFs afero.Fs, + lazyPublish bool, + openReadSeekerCloser resource.OpenReadSeekCloser, + urlBaseDir string, + targetPathBaseDirs []string, + targetPathBuilder func(base string) string, + osFileInfo os.FileInfo, + sourceFilename, + baseFilename string, + mediaType media.Type) *genericResource { + + // This value is used both to construct URLs and file paths, but start + // with a Unix-styled path. + baseFilename = helpers.ToSlashTrimLeading(baseFilename) + fpath, fname := path.Split(baseFilename) + + var resourceType string + if mediaType.MainType == "image" { + resourceType = mediaType.MainType + } else { + resourceType = mediaType.SubType + } + + pathDescriptor := resourcePathDescriptor{ + baseURLDir: urlBaseDir, + baseTargetPathDirs: targetPathBaseDirs, + targetPathBuilder: targetPathBuilder, + relTargetDirFile: dirFile{dir: fpath, file: fname}, + } + + var po *publishOnce + if lazyPublish { + po = &publishOnce{logger: r.Logger} + } + + return &genericResource{ + openReadSeekerCloser: openReadSeekerCloser, + publishOnce: po, + resourcePathDescriptor: pathDescriptor, + overriddenSourceFs: sourceFs, + osFileInfo: osFileInfo, + sourceFilename: sourceFilename, + mediaType: mediaType, + resourceType: resourceType, + spec: r, + params: make(map[string]interface{}), + name: baseFilename, + title: baseFilename, + resourceContent: &resourceContent{}, + resourceHash: &resourceHash{}, + } +} diff --git a/resources/resource/resources.go b/resources/resource/resources.go new file mode 100644 index 000000000..5c661c24e --- /dev/null +++ b/resources/resource/resources.go @@ -0,0 +1,123 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "fmt" + "strings" + + "github.com/gohugoio/hugo/resources/internal" +) + +// Resources represents a slice of resources, which can be a mix of different types. +// I.e. both pages and images etc. +type Resources []Resource + +// ResourcesConverter converts a given slice of Resource objects to Resources. +type ResourcesConverter interface { + ToResources() Resources +} + +// ByType returns resources of a given resource type (ie. "image"). +func (r Resources) ByType(tp string) Resources { + var filtered Resources + + for _, resource := range r { + if resource.ResourceType() == tp { + filtered = append(filtered, resource) + } + } + return filtered +} + +// GetMatch finds the first Resource matching the given pattern, or nil if none found. +// See Match for a more complete explanation about the rules used. +func (r Resources) GetMatch(pattern string) Resource { + g, err := internal.GetGlob(pattern) + if err != nil { + return nil + } + + for _, resource := range r { + if g.Match(strings.ToLower(resource.Name())) { + return resource + } + } + + return nil +} + +// Match gets all resources matching the given base filename prefix, e.g +// "*.png" will match all png files. The "*" does not match path delimiters (/), +// so if you organize your resources in sub-folders, you need to be explicit about it, e.g.: +// "images/*.png". To match any PNG image anywhere in the bundle you can do "**.png", and +// to match all PNG images below the images folder, use "images/**.jpg". +// The matching is case insensitive. +// Match matches by using the value of Resource.Name, which, by default, is a filename with +// path relative to the bundle root with Unix style slashes (/) and no leading slash, e.g. "images/logo.png". +// See https://github.com/gobwas/glob for the full rules set. +func (r Resources) Match(pattern string) Resources { + g, err := internal.GetGlob(pattern) + if err != nil { + return nil + } + + var matches Resources + for _, resource := range r { + if g.Match(strings.ToLower(resource.Name())) { + matches = append(matches, resource) + } + } + return matches +} + +type translatedResource interface { + TranslationKey() string +} + +// MergeByLanguage adds missing translations in r1 from r2. +func (r Resources) MergeByLanguage(r2 Resources) Resources { + result := append(Resources(nil), r...) + m := make(map[string]bool) + for _, rr := range r { + if translated, ok := rr.(translatedResource); ok { + m[translated.TranslationKey()] = true + } + } + + for _, rr := range r2 { + if translated, ok := rr.(translatedResource); ok { + if _, found := m[translated.TranslationKey()]; !found { + result = append(result, rr) + } + } + } + return result +} + +// MergeByLanguageInterface is the generic version of MergeByLanguage. It +// is here just so it can be called from the tpl package. +func (r Resources) MergeByLanguageInterface(in interface{}) (interface{}, error) { + r2, ok := in.(Resources) + if !ok { + return nil, fmt.Errorf("%T cannot be merged by language", in) + } + return r.MergeByLanguage(r2), nil +} + +// Source is an internal template and not meant for use in the templates. It +// may change without notice. +type Source interface { + Publish() error +} diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go new file mode 100644 index 000000000..120d753e4 --- /dev/null +++ b/resources/resource/resourcetypes.go @@ -0,0 +1,106 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "github.com/gohugoio/hugo/media" + + "github.com/gohugoio/hugo/common/hugio" +) + +// Cloner is an internal template and not meant for use in the templates. It +// may change without notice. +type Cloner interface { + WithNewBase(base string) Resource +} + +// Resource represents a linkable resource, i.e. a content page, image etc. +type Resource interface { + resourceBase + + // Permalink represents the absolute link to this resource. + Permalink() string + + // RelPermalink represents the host relative link to this resource. + RelPermalink() string + + // ResourceType is the resource type. For most file types, this is the main + // part of the MIME type, e.g. "image", "application", "text" etc. + // For content pages, this value is "page". + ResourceType() string + + // Name is the logical name of this resource. This can be set in the front matter + // metadata for this resource. If not set, Hugo will assign a value. + // This will in most cases be the base filename. + // So, for the image "/some/path/sunset.jpg" this will be "sunset.jpg". + // The value returned by this method will be used in the GetByPrefix and ByPrefix methods + // on Resources. + Name() string + + // Title returns the title if set in front matter. For content pages, this will be the expected value. + Title() string + + // Resource specific data set by Hugo. + // One example would be.Data.Digest for fingerprinted resources. + Data() interface{} + + // Params set in front matter for this resource. + Params() map[string]interface{} +} + +// resourceBase pulls out the minimal set of operations to define a Resource, +// to simplify testing etc. +type resourceBase interface { + // MediaType is this resource's MIME type. + MediaType() media.Type +} + +// ResourcesLanguageMerger describes an interface for merging resources from a +// different language. +type ResourcesLanguageMerger interface { + MergeByLanguage(other Resources) Resources + // Needed for integration with the tpl package. + MergeByLanguageInterface(other interface{}) (interface{}, error) +} + +// Identifier identifies a resource. +type Identifier interface { + Key() string +} + +// ContentResource represents a Resource that provides a way to get to its content. +// Most Resource types in Hugo implements this interface, including Page. +// This should be used with care, as it will read the file content into memory, but it +// should be cached as effectively as possible by the implementation. +type ContentResource interface { + resourceBase + + // Content returns this resource's content. It will be equivalent to reading the content + // that RelPermalink points to in the published folder. + // The return type will be contextual, and should be what you would expect: + // * Page: template.HTML + // * JSON: String + // * Etc. + Content() (interface{}, error) +} + +// OpenReadSeekCloser allows setting some other way (than reading from a filesystem) +// to open or create a ReadSeekCloser. +type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error) + +// ReadSeekCloserResource is a Resource that supports loading its content. +type ReadSeekCloserResource interface { + resourceBase + ReadSeekCloser() (hugio.ReadSeekCloser, error) +} diff --git a/resources/resource_cache.go b/resources/resource_cache.go new file mode 100644 index 000000000..8ff63beb0 --- /dev/null +++ b/resources/resource_cache.go @@ -0,0 +1,217 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "encoding/json" + "io" + "path" + "path/filepath" + "strings" + "sync" + + "github.com/gohugoio/hugo/resources/resource" + + "github.com/gohugoio/hugo/cache/filecache" + + "github.com/BurntSushi/locker" +) + +const ( + CACHE_CLEAR_ALL = "clear_all" + CACHE_OTHER = "other" +) + +type ResourceCache struct { + rs *Spec + + sync.RWMutex + cache map[string]resource.Resource + + fileCache *filecache.Cache + + // Provides named resource locks. + nlocker *locker.Locker +} + +// ResourceKeyPartition returns a partition name +// to allow for more fine grained cache flushes. +// It will return the file extension without the leading ".". If no +// extension, it will return "other". +func ResourceKeyPartition(filename string) string { + ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(filename)), ".") + if ext == "" { + ext = CACHE_OTHER + } + return ext +} + +func newResourceCache(rs *Spec) *ResourceCache { + return &ResourceCache{ + rs: rs, + fileCache: rs.FileCaches.AssetsCache(), + cache: make(map[string]resource.Resource), + nlocker: locker.NewLocker(), + } +} + +func (c *ResourceCache) clear() { + c.Lock() + defer c.Unlock() + + c.cache = make(map[string]resource.Resource) + c.nlocker = locker.NewLocker() +} + +func (c *ResourceCache) Contains(key string) bool { + key = c.cleanKey(filepath.ToSlash(key)) + _, found := c.get(key) + return found +} + +func (c *ResourceCache) cleanKey(key string) string { + return strings.TrimPrefix(path.Clean(key), "/") +} + +func (c *ResourceCache) get(key string) (resource.Resource, bool) { + c.RLock() + defer c.RUnlock() + r, found := c.cache[key] + return r, found +} + +func (c *ResourceCache) GetOrCreate(partition, key string, f func() (resource.Resource, error)) (resource.Resource, error) { + key = c.cleanKey(path.Join(partition, key)) + // First check in-memory cache. + r, found := c.get(key) + if found { + return r, nil + } + // This is a potentially long running operation, so get a named lock. + c.nlocker.Lock(key) + + // Double check in-memory cache. + r, found = c.get(key) + if found { + c.nlocker.Unlock(key) + return r, nil + } + + defer c.nlocker.Unlock(key) + + r, err := f() + if err != nil { + return nil, err + } + + c.set(key, r) + + return r, nil + +} + +func (c *ResourceCache) getFilenames(key string) (string, string) { + filenameMeta := key + ".json" + filenameContent := key + ".content" + + return filenameMeta, filenameContent +} + +func (c *ResourceCache) getFromFile(key string) (filecache.ItemInfo, io.ReadCloser, transformedResourceMetadata, bool) { + c.RLock() + defer c.RUnlock() + + var meta transformedResourceMetadata + filenameMeta, filenameContent := c.getFilenames(key) + + _, jsonContent, _ := c.fileCache.GetBytes(filenameMeta) + if jsonContent == nil { + return filecache.ItemInfo{}, nil, meta, false + } + + if err := json.Unmarshal(jsonContent, &meta); err != nil { + return filecache.ItemInfo{}, nil, meta, false + } + + fi, rc, _ := c.fileCache.Get(filenameContent) + + return fi, rc, meta, rc != nil + +} + +// writeMeta writes the metadata to file and returns a writer for the content part. +func (c *ResourceCache) writeMeta(key string, meta transformedResourceMetadata) (filecache.ItemInfo, io.WriteCloser, error) { + filenameMeta, filenameContent := c.getFilenames(key) + raw, err := json.Marshal(meta) + if err != nil { + return filecache.ItemInfo{}, nil, err + } + + _, fm, err := c.fileCache.WriteCloser(filenameMeta) + if err != nil { + return filecache.ItemInfo{}, nil, err + } + defer fm.Close() + + if _, err := fm.Write(raw); err != nil { + return filecache.ItemInfo{}, nil, err + } + + fi, fc, err := c.fileCache.WriteCloser(filenameContent) + + return fi, fc, err + +} + +func (c *ResourceCache) set(key string, r resource.Resource) { + c.Lock() + defer c.Unlock() + c.cache[key] = r +} + +func (c *ResourceCache) DeletePartitions(partitions ...string) { + partitionsSet := map[string]bool{ + // Always clear out the resources not matching the partition. + "other": true, + } + for _, p := range partitions { + partitionsSet[p] = true + } + + if partitionsSet[CACHE_CLEAR_ALL] { + c.clear() + return + } + + c.Lock() + defer c.Unlock() + + for k := range c.cache { + clear := false + partIdx := strings.Index(k, "/") + if partIdx == -1 { + clear = true + } else { + partition := k[:partIdx] + if partitionsSet[partition] { + clear = true + } + } + + if clear { + delete(c.cache, k) + } + } + +} diff --git a/resources/resource_factories/bundler/bundler.go b/resources/resource_factories/bundler/bundler.go new file mode 100644 index 000000000..ca0ccf86e --- /dev/null +++ b/resources/resource_factories/bundler/bundler.go @@ -0,0 +1,122 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package bundler contains functions for concatenation etc. of Resource objects. +package bundler + +import ( + "fmt" + "io" + "path/filepath" + + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" +) + +// Client contains methods perform concatenation and other bundling related +// tasks to Resource objects. +type Client struct { + rs *resources.Spec +} + +// New creates a new Client with the given specification. +func New(rs *resources.Spec) *Client { + return &Client{rs: rs} +} + +type multiReadSeekCloser struct { + mr io.Reader + sources []hugio.ReadSeekCloser +} + +func (r *multiReadSeekCloser) Read(p []byte) (n int, err error) { + return r.mr.Read(p) +} + +func (r *multiReadSeekCloser) Seek(offset int64, whence int) (newOffset int64, err error) { + for _, s := range r.sources { + newOffset, err = s.Seek(offset, whence) + if err != nil { + return + } + } + return +} + +func (r *multiReadSeekCloser) Close() error { + for _, s := range r.sources { + s.Close() + } + return nil +} + +// Concat concatenates the list of Resource objects. +func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resource, error) { + // The CACHE_OTHER will make sure this will be re-created and published on rebuilds. + return c.rs.ResourceCache.GetOrCreate(resources.CACHE_OTHER, targetPath, func() (resource.Resource, error) { + var resolvedm media.Type + + // The given set of resources must be of the same Media Type. + // We may improve on that in the future, but then we need to know more. + for i, r := range r { + if i > 0 && r.MediaType().Type() != resolvedm.Type() { + return nil, fmt.Errorf("resources in Concat must be of the same Media Type, got %q and %q", r.MediaType().Type(), resolvedm.Type()) + } + resolvedm = r.MediaType() + } + + concatr := func() (hugio.ReadSeekCloser, error) { + var rcsources []hugio.ReadSeekCloser + for _, s := range r { + rcr, ok := s.(resource.ReadSeekCloserResource) + if !ok { + return nil, fmt.Errorf("resource %T does not implement resource.ReadSeekerCloserResource", s) + } + rc, err := rcr.ReadSeekCloser() + if err != nil { + // Close the already opened. + for _, rcs := range rcsources { + rcs.Close() + } + return nil, err + } + rcsources = append(rcsources, rc) + } + + readers := make([]io.Reader, len(rcsources)) + for i := 0; i < len(rcsources); i++ { + readers[i] = rcsources[i] + } + + mr := io.MultiReader(readers...) + + return &multiReadSeekCloser{mr: mr, sources: rcsources}, nil + } + + composite, err := c.rs.NewForFs( + c.rs.FileCaches.AssetsCache().Fs, + resources.ResourceSourceDescriptor{ + LazyPublish: true, + OpenReadSeekCloser: concatr, + RelTargetFilename: filepath.Clean(targetPath)}) + + if err != nil { + return nil, err + } + + return composite, nil + }) + +} diff --git a/resources/resource_factories/create/create.go b/resources/resource_factories/create/create.go new file mode 100644 index 000000000..dc565056d --- /dev/null +++ b/resources/resource_factories/create/create.go @@ -0,0 +1,65 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package create contains functions for to create Resource objects. This will +// typically non-files. +package create + +import ( + "path/filepath" + + "github.com/spf13/afero" + + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" +) + +// Client contains methods to create Resource objects. +// tasks to Resource objects. +type Client struct { + rs *resources.Spec +} + +// New creates a new Client with the given specification. +func New(rs *resources.Spec) *Client { + return &Client{rs: rs} +} + +// Get creates a new Resource by opening the given filename in the given filesystem. +func (c *Client) Get(fs afero.Fs, filename string) (resource.Resource, error) { + filename = filepath.Clean(filename) + return c.rs.ResourceCache.GetOrCreate(resources.ResourceKeyPartition(filename), filename, func() (resource.Resource, error) { + return c.rs.NewForFs(fs, + resources.ResourceSourceDescriptor{ + LazyPublish: true, + SourceFilename: filename}) + }) + +} + +// FromString creates a new Resource from a string with the given relative target path. +func (c *Client) FromString(targetPath, content string) (resource.Resource, error) { + return c.rs.ResourceCache.GetOrCreate(resources.CACHE_OTHER, targetPath, func() (resource.Resource, error) { + return c.rs.NewForFs( + c.rs.FileCaches.AssetsCache().Fs, + resources.ResourceSourceDescriptor{ + LazyPublish: true, + OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) { + return hugio.NewReadSeekerNoOpCloserFromString(content), nil + }, + RelTargetFilename: filepath.Clean(targetPath)}) + + }) + +} diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go new file mode 100644 index 000000000..0830dfc59 --- /dev/null +++ b/resources/resource_metadata.go @@ -0,0 +1,133 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "fmt" + "strconv" + + "github.com/gohugoio/hugo/resources/internal" + "github.com/gohugoio/hugo/resources/resource" + + "github.com/pkg/errors" + "github.com/spf13/cast" + + "strings" + + "github.com/gohugoio/hugo/common/maps" +) + +var ( + _ metaAssigner = (*genericResource)(nil) +) + +// metaAssigner allows updating metadata in resources that supports it. +type metaAssigner interface { + setTitle(title string) + setName(name string) + updateParams(params map[string]interface{}) +} + +const counterPlaceHolder = ":counter" + +// AssignMetadata assigns the given metadata to those resources that supports updates +// and matching by wildcard given in `src` using `filepath.Match` with lower cased values. +// This assignment is additive, but the most specific match needs to be first. +// The `name` and `title` metadata field support shell-matched collection it got a match in. +// See https://golang.org/pkg/path/#Match +func AssignMetadata(metadata []map[string]interface{}, resources ...resource.Resource) error { + + counters := make(map[string]int) + + for _, r := range resources { + if _, ok := r.(metaAssigner); !ok { + continue + } + + var ( + nameSet, titleSet bool + nameCounter, titleCounter = 0, 0 + nameCounterFound, titleCounterFound bool + resourceSrcKey = strings.ToLower(r.Name()) + ) + + ma := r.(metaAssigner) + for _, meta := range metadata { + src, found := meta["src"] + if !found { + return fmt.Errorf("missing 'src' in metadata for resource") + } + + srcKey := strings.ToLower(cast.ToString(src)) + + glob, err := internal.GetGlob(srcKey) + if err != nil { + return errors.Wrap(err, "failed to match resource with metadata") + } + + match := glob.Match(resourceSrcKey) + + if match { + if !nameSet { + name, found := meta["name"] + if found { + name := cast.ToString(name) + if !nameCounterFound { + nameCounterFound = strings.Contains(name, counterPlaceHolder) + } + if nameCounterFound && nameCounter == 0 { + counterKey := "name_" + srcKey + nameCounter = counters[counterKey] + 1 + counters[counterKey] = nameCounter + } + + ma.setName(replaceResourcePlaceholders(name, nameCounter)) + nameSet = true + } + } + + if !titleSet { + title, found := meta["title"] + if found { + title := cast.ToString(title) + if !titleCounterFound { + titleCounterFound = strings.Contains(title, counterPlaceHolder) + } + if titleCounterFound && titleCounter == 0 { + counterKey := "title_" + srcKey + titleCounter = counters[counterKey] + 1 + counters[counterKey] = titleCounter + } + ma.setTitle((replaceResourcePlaceholders(title, titleCounter))) + titleSet = true + } + } + + params, found := meta["params"] + if found { + m := cast.ToStringMap(params) + // Needed for case insensitive fetching of params values + maps.ToLower(m) + ma.updateParams(m) + } + } + } + } + + return nil +} + +func replaceResourcePlaceholders(in string, counter int) string { + return strings.Replace(in, counterPlaceHolder, strconv.Itoa(counter), -1) +} diff --git a/resources/resource_metadata_test.go b/resources/resource_metadata_test.go new file mode 100644 index 000000000..a1a2a738c --- /dev/null +++ b/resources/resource_metadata_test.go @@ -0,0 +1,231 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "testing" + + "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/resources/resource" + + "github.com/stretchr/testify/require" +) + +func TestAssignMetadata(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + var foo1, foo2, foo3, logo1, logo2, logo3 resource.Resource + var resources resource.Resources + + for _, this := range []struct { + metaData []map[string]interface{} + assertFunc func(err error) + }{ + {[]map[string]interface{}{ + { + "title": "My Resource", + "name": "My Name", + "src": "*", + }, + }, func(err error) { + assert.Equal("My Resource", logo1.Title()) + assert.Equal("My Name", logo1.Name()) + assert.Equal("My Name", foo2.Name()) + + }}, + {[]map[string]interface{}{ + { + "title": "My Logo", + "src": "*loGo*", + }, + { + "title": "My Resource", + "name": "My Name", + "src": "*", + }, + }, func(err error) { + assert.Equal("My Logo", logo1.Title()) + assert.Equal("My Logo", logo2.Title()) + assert.Equal("My Name", logo1.Name()) + assert.Equal("My Name", foo2.Name()) + assert.Equal("My Name", foo3.Name()) + assert.Equal("My Resource", foo3.Title()) + + }}, + {[]map[string]interface{}{ + { + "title": "My Logo", + "src": "*loGo*", + "params": map[string]interface{}{ + "Param1": true, + "icon": "logo", + }, + }, + { + "title": "My Resource", + "src": "*", + "params": map[string]interface{}{ + "Param2": true, + "icon": "resource", + }, + }, + }, func(err error) { + assert.NoError(err) + assert.Equal("My Logo", logo1.Title()) + assert.Equal("My Resource", foo3.Title()) + _, p1 := logo2.Params()["param1"] + _, p2 := foo2.Params()["param2"] + _, p1_2 := foo2.Params()["param1"] + _, p2_2 := logo2.Params()["param2"] + + icon1, _ := logo2.Params()["icon"] + icon2, _ := foo2.Params()["icon"] + + assert.True(p1) + assert.True(p2) + + // Check merge + assert.True(p2_2) + assert.False(p1_2) + + assert.Equal("logo", icon1) + assert.Equal("resource", icon2) + + }}, + {[]map[string]interface{}{ + { + "name": "Logo Name #:counter", + "src": "*logo*", + }, + { + "title": "Resource #:counter", + "name": "Name #:counter", + "src": "*", + }, + }, func(err error) { + assert.NoError(err) + assert.Equal("Resource #2", logo2.Title()) + assert.Equal("Logo Name #1", logo2.Name()) + assert.Equal("Resource #4", logo1.Title()) + assert.Equal("Logo Name #2", logo1.Name()) + assert.Equal("Resource #1", foo2.Title()) + assert.Equal("Resource #3", foo1.Title()) + assert.Equal("Name #2", foo1.Name()) + assert.Equal("Resource #5", foo3.Title()) + + assert.Equal(logo2, resources.GetMatch("logo name #1*")) + + }}, + {[]map[string]interface{}{ + { + "title": "Third Logo #:counter", + "src": "logo3.png", + }, + { + "title": "Other Logo #:counter", + "name": "Name #:counter", + "src": "logo*", + }, + }, func(err error) { + assert.NoError(err) + assert.Equal("Third Logo #1", logo3.Title()) + assert.Equal("Name #3", logo3.Name()) + assert.Equal("Other Logo #1", logo2.Title()) + assert.Equal("Name #1", logo2.Name()) + assert.Equal("Other Logo #2", logo1.Title()) + assert.Equal("Name #2", logo1.Name()) + + }}, + {[]map[string]interface{}{ + { + "title": "Third Logo", + "src": "logo3.png", + }, + { + "title": "Other Logo #:counter", + "name": "Name #:counter", + "src": "logo*", + }, + }, func(err error) { + assert.NoError(err) + assert.Equal("Third Logo", logo3.Title()) + assert.Equal("Name #3", logo3.Name()) + assert.Equal("Other Logo #1", logo2.Title()) + assert.Equal("Name #1", logo2.Name()) + assert.Equal("Other Logo #2", logo1.Title()) + assert.Equal("Name #2", logo1.Name()) + + }}, + {[]map[string]interface{}{ + { + "name": "third-logo", + "src": "logo3.png", + }, + { + "title": "Logo #:counter", + "name": "Name #:counter", + "src": "logo*", + }, + }, func(err error) { + assert.NoError(err) + assert.Equal("Logo #3", logo3.Title()) + assert.Equal("third-logo", logo3.Name()) + assert.Equal("Logo #1", logo2.Title()) + assert.Equal("Name #1", logo2.Name()) + assert.Equal("Logo #2", logo1.Title()) + assert.Equal("Name #2", logo1.Name()) + + }}, + {[]map[string]interface{}{ + { + "title": "Third Logo #:counter", + }, + }, func(err error) { + // Missing src + assert.Error(err) + + }}, + {[]map[string]interface{}{ + { + "title": "Title", + "src": "[]", + }, + }, func(err error) { + // Invalid pattern + assert.Error(err) + + }}, + } { + + foo2 = spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType) + logo2 = spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType) + foo1 = spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType) + logo1 = spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType) + foo3 = spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType) + logo3 = spec.newGenericResource(nil, nil, nil, "/b/logo3.png", "logo3.png", pngType) + + resources = resource.Resources{ + foo2, + logo2, + foo1, + logo1, + foo3, + logo3, + } + + this.assertFunc(AssignMetadata(this.metaData, resources...)) + } + +} diff --git a/resources/resource_test.go b/resources/resource_test.go new file mode 100644 index 000000000..be2706e45 --- /dev/null +++ b/resources/resource_test.go @@ -0,0 +1,279 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "fmt" + "math/rand" + "path" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/gohugoio/hugo/resources/resource" + + "github.com/gohugoio/hugo/media" + + "github.com/stretchr/testify/require" +) + +func TestGenericResource(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + r := spec.newGenericResource(nil, nil, nil, "/a/foo.css", "foo.css", media.CSSType) + + assert.Equal("https://example.com/foo.css", r.Permalink()) + assert.Equal("/foo.css", r.RelPermalink()) + assert.Equal("css", r.ResourceType()) + +} + +func TestGenericResourceWithLinkFacory(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + factory := func(s string) string { + return path.Join("/foo", s) + } + r := spec.newGenericResource(nil, factory, nil, "/a/foo.css", "foo.css", media.CSSType) + + assert.Equal("https://example.com/foo/foo.css", r.Permalink()) + assert.Equal("/foo/foo.css", r.RelPermalink()) + assert.Equal("foo.css", r.Key()) + assert.Equal("css", r.ResourceType()) +} + +func TestNewResourceFromFilename(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + writeSource(t, spec.Fs, "content/a/b/logo.png", "image") + writeSource(t, spec.Fs, "content/a/b/data.json", "json") + + r, err := spec.New(ResourceSourceDescriptor{SourceFilename: "a/b/logo.png"}) + + assert.NoError(err) + assert.NotNil(r) + assert.Equal("image", r.ResourceType()) + assert.Equal("/a/b/logo.png", r.RelPermalink()) + assert.Equal("https://example.com/a/b/logo.png", r.Permalink()) + + r, err = spec.New(ResourceSourceDescriptor{SourceFilename: "a/b/data.json"}) + + assert.NoError(err) + assert.NotNil(r) + assert.Equal("json", r.ResourceType()) + + cloned := r.(resource.Cloner).WithNewBase("aceof") + assert.Equal(r.ResourceType(), cloned.ResourceType()) + assert.Equal("/aceof/a/b/data.json", cloned.RelPermalink()) +} + +func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpecForBaseURL(assert, "https://example.com/docs") + + writeSource(t, spec.Fs, "content/a/b/logo.png", "image") + + r, err := spec.New(ResourceSourceDescriptor{SourceFilename: filepath.FromSlash("a/b/logo.png")}) + + assert.NoError(err) + assert.NotNil(r) + assert.Equal("image", r.ResourceType()) + assert.Equal("/docs/a/b/logo.png", r.RelPermalink()) + assert.Equal("https://example.com/docs/a/b/logo.png", r.Permalink()) + img := r.(*Image) + assert.Equal(filepath.FromSlash("/a/b/logo.png"), img.targetFilenames()[0]) + +} + +var pngType, _ = media.FromStringAndExt("image/png", "png") + +func TestResourcesByType(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + resources := resource.Resources{ + spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/a/logo.png", "logo.css", pngType), + spec.newGenericResource(nil, nil, nil, "/a/foo2.css", "foo2.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/a/foo3.css", "foo3.css", media.CSSType)} + + assert.Len(resources.ByType("css"), 3) + assert.Len(resources.ByType("image"), 1) + +} + +func TestResourcesGetByPrefix(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + resources := resource.Resources{ + spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType), + spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType), + spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType)} + + assert.Nil(resources.GetMatch("asdf*")) + assert.Equal("/logo1.png", resources.GetMatch("logo*").RelPermalink()) + assert.Equal("/logo1.png", resources.GetMatch("loGo*").RelPermalink()) + assert.Equal("/Logo2.png", resources.GetMatch("logo2*").RelPermalink()) + assert.Equal("/foo2.css", resources.GetMatch("foo2*").RelPermalink()) + assert.Equal("/foo1.css", resources.GetMatch("foo1*").RelPermalink()) + assert.Equal("/foo1.css", resources.GetMatch("foo1*").RelPermalink()) + assert.Nil(resources.GetMatch("asdfasdf*")) + + assert.Equal(2, len(resources.Match("logo*"))) + assert.Equal(1, len(resources.Match("logo2*"))) + + logo := resources.GetMatch("logo*") + assert.NotNil(logo.Params()) + assert.Equal("logo1.png", logo.Name()) + assert.Equal("logo1.png", logo.Title()) + +} + +func TestResourcesGetMatch(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + resources := resource.Resources{ + spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType), + spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType), + spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/b/c/foo4.css", "c/foo4.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/b/c/foo5.css", "c/foo5.css", media.CSSType), + spec.newGenericResource(nil, nil, nil, "/b/c/d/foo6.css", "c/d/foo6.css", media.CSSType), + } + + assert.Equal("/logo1.png", resources.GetMatch("logo*").RelPermalink()) + assert.Equal("/logo1.png", resources.GetMatch("loGo*").RelPermalink()) + assert.Equal("/Logo2.png", resources.GetMatch("logo2*").RelPermalink()) + assert.Equal("/foo2.css", resources.GetMatch("foo2*").RelPermalink()) + assert.Equal("/foo1.css", resources.GetMatch("foo1*").RelPermalink()) + assert.Equal("/foo1.css", resources.GetMatch("foo1*").RelPermalink()) + assert.Equal("/c/foo4.css", resources.GetMatch("*/foo*").RelPermalink()) + + assert.Nil(resources.GetMatch("asdfasdf")) + + assert.Equal(2, len(resources.Match("Logo*"))) + assert.Equal(1, len(resources.Match("logo2*"))) + assert.Equal(2, len(resources.Match("c/*"))) + + assert.Equal(6, len(resources.Match("**.css"))) + assert.Equal(3, len(resources.Match("**/*.css"))) + assert.Equal(1, len(resources.Match("c/**/*.css"))) + + // Matches only CSS files in c/ + assert.Equal(3, len(resources.Match("c/**.css"))) + + // Matches all CSS files below c/ (including in c/d/) + assert.Equal(3, len(resources.Match("c/**.css"))) + + // Patterns beginning with a slash will not match anything. + // We could maybe consider trimming that slash, but let's be explicit about this. + // (it is possible for users to do a rename) + // This is analogous to standing in a directory and doing "ls *.*". + assert.Equal(0, len(resources.Match("/c/**.css"))) + +} + +func BenchmarkResourcesMatch(b *testing.B) { + resources := benchResources(b) + prefixes := []string{"abc*", "jkl*", "nomatch*", "sub/*"} + rnd := rand.New(rand.NewSource(time.Now().Unix())) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + resources.Match(prefixes[rnd.Intn(len(prefixes))]) + } + }) +} + +// This adds a benchmark for the a100 test case as described by Russ Cox here: +// https://research.swtch.com/glob (really interesting article) +// I don't expect Hugo users to "stumble upon" this problem, so this is more to satisfy +// my own curiosity. +func BenchmarkResourcesMatchA100(b *testing.B) { + assert := require.New(b) + spec := newTestResourceSpec(assert) + a100 := strings.Repeat("a", 100) + pattern := "a*a*a*a*a*a*a*a*b" + + resources := resource.Resources{spec.newGenericResource(nil, nil, nil, "/a/"+a100, a100, media.CSSType)} + + b.ResetTimer() + for i := 0; i < b.N; i++ { + resources.Match(pattern) + } + +} + +func benchResources(b *testing.B) resource.Resources { + assert := require.New(b) + spec := newTestResourceSpec(assert) + var resources resource.Resources + + for i := 0; i < 30; i++ { + name := fmt.Sprintf("abcde%d_%d.css", i%5, i) + resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType)) + } + + for i := 0; i < 30; i++ { + name := fmt.Sprintf("efghi%d_%d.css", i%5, i) + resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType)) + } + + for i := 0; i < 30; i++ { + name := fmt.Sprintf("jklmn%d_%d.css", i%5, i) + resources = append(resources, spec.newGenericResource(nil, nil, nil, "/b/sub/"+name, "sub/"+name, media.CSSType)) + } + + return resources + +} + +func BenchmarkAssignMetadata(b *testing.B) { + assert := require.New(b) + spec := newTestResourceSpec(assert) + + for i := 0; i < b.N; i++ { + b.StopTimer() + var resources resource.Resources + var meta = []map[string]interface{}{ + { + "title": "Foo #:counter", + "name": "Foo Name #:counter", + "src": "foo1*", + }, + { + "title": "Rest #:counter", + "name": "Rest Name #:counter", + "src": "*", + }, + } + for i := 0; i < 20; i++ { + name := fmt.Sprintf("foo%d_%d.css", i%5, i) + resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType)) + } + b.StartTimer() + + if err := AssignMetadata(meta, resources...); err != nil { + b.Fatal(err) + } + + } +} diff --git a/resources/resource_transformers/integrity/integrity.go b/resources/resource_transformers/integrity/integrity.go new file mode 100644 index 000000000..90afafb88 --- /dev/null +++ b/resources/resource_transformers/integrity/integrity.go @@ -0,0 +1,106 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package integrity + +import ( + "crypto/md5" + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "encoding/hex" + "fmt" + "hash" + "html/template" + "io" + + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" +) + +const defaultHashAlgo = "sha256" + +// Client contains methods to fingerprint (cachebusting) and other integrity-related +// methods. +type Client struct { + rs *resources.Spec +} + +// New creates a new Client with the given specification. +func New(rs *resources.Spec) *Client { + return &Client{rs: rs} +} + +type fingerprintTransformation struct { + algo string +} + +func (t *fingerprintTransformation) Key() resources.ResourceTransformationKey { + return resources.NewResourceTransformationKey("fingerprint", t.algo) +} + +// Transform creates a MD5 hash of the Resource content and inserts that hash before +// the extension in the filename. +func (t *fingerprintTransformation) Transform(ctx *resources.ResourceTransformationCtx) error { + algo := t.algo + + var h hash.Hash + + switch algo { + case "md5": + h = md5.New() + case "sha256": + h = sha256.New() + case "sha512": + h = sha512.New() + default: + return fmt.Errorf("unsupported crypto algo: %q, use either md5, sha256 or sha512", algo) + } + + io.Copy(io.MultiWriter(h, ctx.To), ctx.From) + d, err := digest(h) + if err != nil { + return err + } + + ctx.Data["Integrity"] = integrity(algo, d) + ctx.AddOutPathIdentifier("." + hex.EncodeToString(d[:])) + return nil +} + +// Fingerprint applies fingerprinting of the given resource and hash algorithm. +// It defaults to sha256 if none given, and the options are md5, sha256 or sha512. +// The same algo is used for both the fingerprinting part (aka cache busting) and +// the base64-encoded Subresource Integrity hash, so you will have to stay away from +// md5 if you plan to use both. +// See https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity +func (c *Client) Fingerprint(res resource.Resource, algo string) (resource.Resource, error) { + if algo == "" { + algo = defaultHashAlgo + } + + return c.rs.Transform( + res, + &fingerprintTransformation{algo: algo}, + ) +} + +func integrity(algo string, sum []byte) template.HTMLAttr { + encoded := base64.StdEncoding.EncodeToString(sum) + return template.HTMLAttr(algo + "-" + encoded) +} + +func digest(h hash.Hash) ([]byte, error) { + sum := h.Sum(nil) + return sum, nil +} diff --git a/resources/resource_transformers/minifier/minify.go b/resources/resource_transformers/minifier/minify.go new file mode 100644 index 000000000..952c6a99c --- /dev/null +++ b/resources/resource_transformers/minifier/minify.go @@ -0,0 +1,59 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package minifier + +import ( + "github.com/gohugoio/hugo/minifiers" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" +) + +// Client for minification of Resource objects. Supported minfiers are: +// css, html, js, json, svg and xml. +type Client struct { + rs *resources.Spec + m minifiers.Client +} + +// New creates a new Client given a specification. Note that it is the media types +// configured for the site that is used to match files to the correct minifier. +func New(rs *resources.Spec) *Client { + return &Client{rs: rs, m: minifiers.New(rs.MediaTypes, rs.OutputFormats)} +} + +type minifyTransformation struct { + rs *resources.Spec + m minifiers.Client +} + +func (t *minifyTransformation) Key() resources.ResourceTransformationKey { + return resources.NewResourceTransformationKey("minify") +} + +func (t *minifyTransformation) Transform(ctx *resources.ResourceTransformationCtx) error { + if err := t.m.Minify(ctx.InMediaType, ctx.To, ctx.From); err != nil { + return err + } + ctx.AddOutPathIdentifier(".min") + return nil +} + +func (c *Client) Minify(res resource.Resource) (resource.Resource, error) { + return c.rs.Transform( + res, + &minifyTransformation{ + rs: c.rs, + m: c.m}, + ) +} diff --git a/resources/resource_transformers/postcss/postcss.go b/resources/resource_transformers/postcss/postcss.go new file mode 100644 index 000000000..5350eebc5 --- /dev/null +++ b/resources/resource_transformers/postcss/postcss.go @@ -0,0 +1,185 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package postcss + +import ( + "io" + "path/filepath" + + "github.com/gohugoio/hugo/hugofs" + "github.com/pkg/errors" + + "os" + "os/exec" + + "github.com/mitchellh/mapstructure" + + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" +) + +// Some of the options from https://github.com/postcss/postcss-cli +type Options struct { + + // Set a custom path to look for a config file. + Config string + + NoMap bool `mapstructure:"no-map"` // Disable the default inline sourcemaps + + // Options for when not using a config file + Use string // List of postcss plugins to use + Parser string // Custom postcss parser + Stringifier string // Custom postcss stringifier + Syntax string // Custom postcss syntax +} + +func DecodeOptions(m map[string]interface{}) (opts Options, err error) { + if m == nil { + return + } + err = mapstructure.WeakDecode(m, &opts) + return +} + +func (opts Options) toArgs() []string { + var args []string + if opts.NoMap { + args = append(args, "--no-map") + } + if opts.Use != "" { + args = append(args, "--use", opts.Use) + } + if opts.Parser != "" { + args = append(args, "--parser", opts.Parser) + } + if opts.Stringifier != "" { + args = append(args, "--stringifier", opts.Stringifier) + } + if opts.Syntax != "" { + args = append(args, "--syntax", opts.Syntax) + } + return args +} + +// Client is the client used to do PostCSS transformations. +type Client struct { + rs *resources.Spec +} + +// New creates a new Client with the given specification. +func New(rs *resources.Spec) *Client { + return &Client{rs: rs} +} + +type postcssTransformation struct { + options Options + rs *resources.Spec +} + +func (t *postcssTransformation) Key() resources.ResourceTransformationKey { + return resources.NewResourceTransformationKey("postcss", t.options) +} + +// Transform shells out to postcss-cli to do the heavy lifting. +// For this to work, you need some additional tools. To install them globally: +// npm install -g postcss-cli +// npm install -g autoprefixer +func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationCtx) error { + + const localPostCSSPath = "node_modules/postcss-cli/bin/" + const binaryName = "postcss" + + // Try first in the project's node_modules. + csiBinPath := filepath.Join(t.rs.WorkingDir, localPostCSSPath, binaryName) + + binary := csiBinPath + + if _, err := exec.LookPath(binary); err != nil { + // Try PATH + binary = binaryName + if _, err := exec.LookPath(binary); err != nil { + // This may be on a CI server etc. Will fall back to pre-built assets. + return herrors.ErrFeatureNotAvailable + } + } + + var configFile string + logger := t.rs.Logger + + if t.options.Config != "" { + configFile = t.options.Config + } else { + configFile = "postcss.config.js" + } + + configFile = filepath.Clean(configFile) + + // We need an abolute filename to the config file. + if !filepath.IsAbs(configFile) { + // We resolve this against the virtual Work filesystem, to allow + // this config file to live in one of the themes if needed. + fi, err := t.rs.BaseFs.Work.Fs.Stat(configFile) + if err != nil { + if t.options.Config != "" { + // Only fail if the user specificed config file is not found. + return errors.Wrapf(err, "postcss config %q not found:", configFile) + } + configFile = "" + } else { + configFile = fi.(hugofs.RealFilenameInfo).RealFilename() + } + } + + var cmdArgs []string + + if configFile != "" { + logger.INFO.Println("postcss: use config file", configFile) + cmdArgs = []string{"--config", configFile} + } + + if optArgs := t.options.toArgs(); len(optArgs) > 0 { + cmdArgs = append(cmdArgs, optArgs...) + } + + cmd := exec.Command(binary, cmdArgs...) + + cmd.Stdout = ctx.To + cmd.Stderr = os.Stderr + + stdin, err := cmd.StdinPipe() + if err != nil { + return err + } + + go func() { + defer stdin.Close() + io.Copy(stdin, ctx.From) + }() + + err = cmd.Run() + if err != nil { + return err + } + + return nil +} + +// Process transforms the given Resource with the PostCSS processor. +func (c *Client) Process(res resource.Resource, options Options) (resource.Resource, error) { + return c.rs.Transform( + res, + &postcssTransformation{rs: c.rs, options: options}, + ) +} diff --git a/resources/resource_transformers/templates/execute_as_template.go b/resources/resource_transformers/templates/execute_as_template.go new file mode 100644 index 000000000..b3ec3cf43 --- /dev/null +++ b/resources/resource_transformers/templates/execute_as_template.go @@ -0,0 +1,76 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package templates contains functions for template processing of Resource objects. +package templates + +import ( + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" + "github.com/gohugoio/hugo/tpl" + "github.com/pkg/errors" +) + +// Client contains methods to perform template processing of Resource objects. +type Client struct { + rs *resources.Spec + + textTemplate tpl.TemplateParseFinder +} + +// New creates a new Client with the given specification. +func New(rs *resources.Spec, textTemplate tpl.TemplateParseFinder) *Client { + if rs == nil { + panic("must provice a resource Spec") + } + if textTemplate == nil { + panic("must provide a textTemplate") + } + return &Client{rs: rs, textTemplate: textTemplate} +} + +type executeAsTemplateTransform struct { + rs *resources.Spec + textTemplate tpl.TemplateParseFinder + targetPath string + data interface{} +} + +func (t *executeAsTemplateTransform) Key() resources.ResourceTransformationKey { + return resources.NewResourceTransformationKey("execute-as-template", t.targetPath) +} + +func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransformationCtx) error { + tplStr := helpers.ReaderToString(ctx.From) + templ, err := t.textTemplate.Parse(ctx.InPath, tplStr) + if err != nil { + return errors.Wrapf(err, "failed to parse Resource %q as Template:", ctx.InPath) + } + + ctx.OutPath = t.targetPath + + return templ.Execute(ctx.To, t.data) +} + +func (c *Client) ExecuteAsTemplate(res resource.Resource, targetPath string, data interface{}) (resource.Resource, error) { + return c.rs.Transform( + res, + &executeAsTemplateTransform{ + rs: c.rs, + targetPath: helpers.ToSlashTrimLeading(targetPath), + textTemplate: c.textTemplate, + data: data, + }, + ) +} diff --git a/resources/resource_transformers/tocss/scss/client.go b/resources/resource_transformers/tocss/scss/client.go new file mode 100644 index 000000000..41ff67433 --- /dev/null +++ b/resources/resource_transformers/tocss/scss/client.go @@ -0,0 +1,111 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scss + +import ( + "github.com/bep/go-tocss/scss" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/hugolib/filesystems" + "github.com/gohugoio/hugo/resources" + "github.com/gohugoio/hugo/resources/resource" + + "github.com/mitchellh/mapstructure" +) + +type Client struct { + rs *resources.Spec + sfs *filesystems.SourceFilesystem + workFs *filesystems.SourceFilesystem +} + +func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) { + return &Client{sfs: fs, workFs: rs.BaseFs.Work, rs: rs}, nil +} + +type Options struct { + + // Hugo, will by default, just replace the extension of the source + // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can + // control this by setting this, e.g. "styles/main.css" will create + // a Resource with that as a base for RelPermalink etc. + TargetPath string + + // Hugo automatically adds the entry directories (where the main.scss lives) + // for project and themes to the list of include paths sent to LibSASS. + // Any paths set in this setting will be appended. Note that these will be + // treated as relative to the working dir, i.e. no include paths outside the + // project/themes. + IncludePaths []string + + // Default is nested. + // One of nested, expanded, compact, compressed. + OutputStyle string + + // Precision of floating point math. + Precision int + + // When enabled, Hugo will generate a source map. + EnableSourceMap bool +} + +type options struct { + // The options we receive from the end user. + from Options + + // The options we send to the SCSS library. + to scss.Options +} + +func (c *Client) ToCSS(res resource.Resource, opts Options) (resource.Resource, error) { + internalOptions := options{ + from: opts, + } + + // Transfer values from client. + internalOptions.to.Precision = opts.Precision + internalOptions.to.OutputStyle = scss.OutputStyleFromString(opts.OutputStyle) + + if internalOptions.to.Precision == 0 { + // bootstrap-sass requires 8 digits precision. The libsass default is 5. + // https://github.com/twbs/bootstrap-sass/blob/master/README.md#sass-number-precision + internalOptions.to.Precision = 8 + } + + return c.rs.Transform( + res, + &toCSSTransformation{c: c, options: internalOptions}, + ) +} + +type toCSSTransformation struct { + c *Client + options options +} + +func (t *toCSSTransformation) Key() resources.ResourceTransformationKey { + return resources.NewResourceTransformationKey("tocss", t.options.from) +} + +func DecodeOptions(m map[string]interface{}) (opts Options, err error) { + if m == nil { + return + } + err = mapstructure.WeakDecode(m, &opts) + + if opts.TargetPath != "" { + opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath) + } + + return +} diff --git a/resources/resource_transformers/tocss/scss/tocss.go b/resources/resource_transformers/tocss/scss/tocss.go new file mode 100644 index 000000000..17c32ea8e --- /dev/null +++ b/resources/resource_transformers/tocss/scss/tocss.go @@ -0,0 +1,173 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build extended + +package scss + +import ( + "fmt" + "io" + "path" + "path/filepath" + "strings" + + "github.com/bep/go-tocss/scss" + "github.com/bep/go-tocss/scss/libsass" + "github.com/bep/go-tocss/tocss" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/resources" + "github.com/pkg/errors" +) + +// Used in tests. This feature requires Hugo to be built with the extended tag. +func Supports() bool { + return true +} + +func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx) error { + ctx.OutMediaType = media.CSSType + + var outName string + if t.options.from.TargetPath != "" { + ctx.OutPath = t.options.from.TargetPath + } else { + ctx.ReplaceOutPathExtension(".css") + } + + outName = path.Base(ctx.OutPath) + + options := t.options + baseDir := path.Dir(ctx.SourcePath) + options.to.IncludePaths = t.c.sfs.RealDirs(baseDir) + + // Append any workDir relative include paths + for _, ip := range options.from.IncludePaths { + options.to.IncludePaths = append(options.to.IncludePaths, t.c.workFs.RealDirs(filepath.Clean(ip))...) + } + + // To allow for overrides of SCSS files anywhere in the project/theme hierarchy, we need + // to help libsass revolve the filename by looking in the composite filesystem first. + // We add the entry directories for both project and themes to the include paths list, but + // that only work for overrides on the top level. + options.to.ImportResolver = func(url string, prev string) (newUrl string, body string, resolved bool) { + // We get URL paths from LibSASS, but we need file paths. + url = filepath.FromSlash(url) + prev = filepath.FromSlash(prev) + + var basePath string + urlDir := filepath.Dir(url) + var prevDir string + if prev == "stdin" { + prevDir = baseDir + } else { + prevDir = t.c.sfs.MakePathRelative(filepath.Dir(prev)) + if prevDir == "" { + // Not a member of this filesystem. Let LibSASS handle it. + return "", "", false + } + } + + basePath = filepath.Join(prevDir, urlDir) + name := filepath.Base(url) + + // Libsass throws an error in cases where you have several possible candidates. + // We make this simpler and pick the first match. + var namePatterns []string + if strings.Contains(name, ".") { + namePatterns = []string{"_%s", "%s"} + } else if strings.HasPrefix(name, "_") { + namePatterns = []string{"_%s.scss", "_%s.sass"} + } else { + namePatterns = []string{"_%s.scss", "%s.scss", "_%s.sass", "%s.sass"} + } + + name = strings.TrimPrefix(name, "_") + + for _, namePattern := range namePatterns { + filenameToCheck := filepath.Join(basePath, fmt.Sprintf(namePattern, name)) + fi, err := t.c.sfs.Fs.Stat(filenameToCheck) + if err == nil { + if fir, ok := fi.(hugofs.RealFilenameInfo); ok { + return fir.RealFilename(), "", true + } + } + } + + // Not found, let LibSASS handle it + return "", "", false + } + + if ctx.InMediaType.SubType == media.SASSType.SubType { + options.to.SassSyntax = true + } + + if options.from.EnableSourceMap { + + options.to.SourceMapFilename = outName + ".map" + options.to.SourceMapRoot = t.c.rs.WorkingDir + + // Setting this to the relative input filename will get the source map + // more correct for the main entry path (main.scss typically), but + // it will mess up the import mappings. As a workaround, we do a replacement + // in the source map itself (see below). + //options.InputPath = inputPath + options.to.OutputPath = outName + options.to.SourceMapContents = true + options.to.OmitSourceMapURL = false + options.to.EnableEmbeddedSourceMap = false + } + + res, err := t.c.toCSS(options.to, ctx.To, ctx.From) + if err != nil { + return err + } + + if options.from.EnableSourceMap && res.SourceMapContent != "" { + sourcePath := t.c.sfs.RealFilename(ctx.SourcePath) + + if strings.HasPrefix(sourcePath, t.c.rs.WorkingDir) { + sourcePath = strings.TrimPrefix(sourcePath, t.c.rs.WorkingDir+helpers.FilePathSeparator) + } + + // This needs to be Unix-style slashes, even on Windows. + // See https://github.com/gohugoio/hugo/issues/4968 + sourcePath = filepath.ToSlash(sourcePath) + + // This is a workaround for what looks like a bug in Libsass. But + // getting this resolution correct in tools like Chrome Workspaces + // is important enough to go this extra mile. + mapContent := strings.Replace(res.SourceMapContent, `stdin",`, fmt.Sprintf("%s\",", sourcePath), 1) + + return ctx.PublishSourceMap(mapContent) + } + return nil +} + +func (c *Client) toCSS(options scss.Options, dst io.Writer, src io.Reader) (tocss.Result, error) { + var res tocss.Result + + transpiler, err := libsass.New(options) + if err != nil { + return res, err + } + + res, err = transpiler.Execute(dst, src) + if err != nil { + return res, errors.Wrap(err, "SCSS processing failed") + } + + return res, nil +} diff --git a/resources/resource_transformers/tocss/scss/tocss_notavailable.go b/resources/resource_transformers/tocss/scss/tocss_notavailable.go new file mode 100644 index 000000000..ad6b42b98 --- /dev/null +++ b/resources/resource_transformers/tocss/scss/tocss_notavailable.go @@ -0,0 +1,30 @@ +// Copyright 2018 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build !extended + +package scss + +import ( + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/resources" +) + +// Used in tests. +func Supports() bool { + return false +} + +func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx) error { + return herrors.ErrFeatureNotAvailable +} diff --git a/resources/smartcrop.go b/resources/smartcrop.go new file mode 100644 index 000000000..05bc55cd7 --- /dev/null +++ b/resources/smartcrop.go @@ -0,0 +1,80 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "image" + + "github.com/disintegration/imaging" + "github.com/muesli/smartcrop" +) + +const ( + // Do not change. + smartCropIdentifier = "smart" + + // This is just a increment, starting on 1. If Smart Crop improves its cropping, we + // need a way to trigger a re-generation of the crops in the wild, so increment this. + smartCropVersionNumber = 1 +) + +// Needed by smartcrop +type imagingResizer struct { + filter imaging.ResampleFilter +} + +func (r imagingResizer) Resize(img image.Image, width, height uint) image.Image { + return imaging.Resize(img, int(width), int(height), r.filter) +} + +func newSmartCropAnalyzer(filter imaging.ResampleFilter) smartcrop.Analyzer { + return smartcrop.NewAnalyzer(imagingResizer{filter: filter}) +} + +func smartCrop(img image.Image, width, height int, anchor imaging.Anchor, filter imaging.ResampleFilter) (*image.NRGBA, error) { + + if width <= 0 || height <= 0 { + return &image.NRGBA{}, nil + } + + srcBounds := img.Bounds() + srcW := srcBounds.Dx() + srcH := srcBounds.Dy() + + if srcW <= 0 || srcH <= 0 { + return &image.NRGBA{}, nil + } + + if srcW == width && srcH == height { + return imaging.Clone(img), nil + } + + smart := newSmartCropAnalyzer(filter) + + rect, err := smart.FindBestCrop(img, width, height) + + if err != nil { + return nil, err + } + + b := img.Bounds().Intersect(rect) + + cropped, err := imaging.Crop(img, b), nil + if err != nil { + return nil, err + } + + return imaging.Resize(cropped, width, height, filter), nil + +} diff --git a/resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg b/resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg Binary files differnew file mode 100644 index 000000000..7d7307bed --- /dev/null +++ b/resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg diff --git a/resources/testdata/circle.svg b/resources/testdata/circle.svg new file mode 100644 index 000000000..2759ae703 --- /dev/null +++ b/resources/testdata/circle.svg @@ -0,0 +1,5 @@ +<svg height="100" width="100"> + <circle cx="50" cy="50" r="40" stroke="black" stroke-width="3" fill="red" /> + Sorry, your browser does not support inline SVG. +</svg> +
\ No newline at end of file diff --git a/resources/testdata/gohugoio.png b/resources/testdata/gohugoio.png Binary files differnew file mode 100644 index 000000000..0591db959 --- /dev/null +++ b/resources/testdata/gohugoio.png diff --git a/resources/testdata/sub/gohugoio2.png b/resources/testdata/sub/gohugoio2.png Binary files differnew file mode 100644 index 000000000..0591db959 --- /dev/null +++ b/resources/testdata/sub/gohugoio2.png diff --git a/resources/testdata/sunset.jpg b/resources/testdata/sunset.jpg Binary files differnew file mode 100644 index 000000000..7d7307bed --- /dev/null +++ b/resources/testdata/sunset.jpg diff --git a/resources/testhelpers_test.go b/resources/testhelpers_test.go new file mode 100644 index 000000000..d0fcb59e7 --- /dev/null +++ b/resources/testhelpers_test.go @@ -0,0 +1,183 @@ +package resources + +import ( + "path/filepath" + "testing" + + "fmt" + "image" + "io" + "io/ioutil" + "os" + "path" + "runtime" + "strings" + + "github.com/gohugoio/hugo/cache/filecache" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/resources/resource" + "github.com/spf13/afero" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" +) + +func newTestResourceSpec(assert *require.Assertions) *Spec { + return newTestResourceSpecForBaseURL(assert, "https://example.com/") +} + +func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *Spec { + cfg := viper.New() + cfg.Set("baseURL", baseURL) + cfg.Set("resourceDir", "resources") + cfg.Set("contentDir", "content") + cfg.Set("dataDir", "data") + cfg.Set("i18nDir", "i18n") + cfg.Set("layoutDir", "layouts") + cfg.Set("assetDir", "assets") + cfg.Set("archetypeDir", "archetypes") + cfg.Set("publishDir", "public") + + imagingCfg := map[string]interface{}{ + "resampleFilter": "linear", + "quality": 68, + "anchor": "left", + } + + cfg.Set("imaging", imagingCfg) + + fs := hugofs.NewMem(cfg) + + s, err := helpers.NewPathSpec(fs, cfg) + assert.NoError(err) + + filecaches, err := filecache.NewCaches(s) + assert.NoError(err) + + spec, err := NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes) + assert.NoError(err) + return spec +} + +func newTestResourceOsFs(assert *require.Assertions) *Spec { + cfg := viper.New() + cfg.Set("baseURL", "https://example.com") + + workDir, err := ioutil.TempDir("", "hugores") + + if runtime.GOOS == "darwin" && !strings.HasPrefix(workDir, "/private") { + // To get the entry folder in line with the rest. This its a little bit + // mysterious, but so be it. + workDir = "/private" + workDir + } + + cfg.Set("workingDir", workDir) + cfg.Set("resourceDir", "resources") + cfg.Set("contentDir", "content") + cfg.Set("dataDir", "data") + cfg.Set("i18nDir", "i18n") + cfg.Set("layoutDir", "layouts") + cfg.Set("assetDir", "assets") + cfg.Set("archetypeDir", "archetypes") + cfg.Set("publishDir", "public") + + fs := hugofs.NewFrom(hugofs.Os, cfg) + fs.Destination = &afero.MemMapFs{} + + s, err := helpers.NewPathSpec(fs, cfg) + assert.NoError(err) + + filecaches, err := filecache.NewCaches(s) + assert.NoError(err) + + spec, err := NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes) + assert.NoError(err) + return spec + +} + +func fetchSunset(assert *require.Assertions) *Image { + return fetchImage(assert, "sunset.jpg") +} + +func fetchImage(assert *require.Assertions, name string) *Image { + spec := newTestResourceSpec(assert) + return fetchImageForSpec(spec, assert, name) +} + +func fetchImageForSpec(spec *Spec, assert *require.Assertions, name string) *Image { + r := fetchResourceForSpec(spec, assert, name) + assert.IsType(&Image{}, r) + return r.(*Image) +} + +func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) resource.ContentResource { + src, err := os.Open(filepath.FromSlash("testdata/" + name)) + assert.NoError(err) + + out, err := helpers.OpenFileForWriting(spec.BaseFs.Content.Fs, name) + assert.NoError(err) + _, err = io.Copy(out, src) + out.Close() + src.Close() + assert.NoError(err) + + factory := func(s string) string { + return path.Join("/a", s) + } + + r, err := spec.New(ResourceSourceDescriptor{TargetPathBuilder: factory, SourceFilename: name}) + assert.NoError(err) + + return r.(resource.ContentResource) +} + +func assertImageFile(assert *require.Assertions, fs afero.Fs, filename string, width, height int) { + f, err := fs.Open(filename) + if err != nil { + printFs(fs, "", os.Stdout) + } + assert.NoError(err) + defer f.Close() + + config, _, err := image.DecodeConfig(f) + assert.NoError(err) + + assert.Equal(width, config.Width) + assert.Equal(height, config.Height) +} + +func assertFileCache(assert *require.Assertions, fs afero.Fs, filename string, width, height int) { + assertImageFile(assert, fs, filepath.Clean(filename), width, height) +} + +func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { + writeToFs(t, fs.Source, filename, content) +} + +func writeToFs(t testing.TB, fs afero.Fs, filename, content string) { + if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil { + t.Fatalf("Failed to write file: %s", err) + } +} + +func printFs(fs afero.Fs, path string, w io.Writer) { + if fs == nil { + return + } + afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error { + if info != nil && !info.IsDir() { + s := path + if lang, ok := info.(hugofs.LanguageAnnouncer); ok { + s = s + "\t" + lang.Lang() + } + if fp, ok := info.(hugofs.FilePather); ok { + s += "\tFilename: " + fp.Filename() + "\tBase: " + fp.BaseDir() + } + fmt.Fprintln(w, " ", s) + } + return nil + }) +} diff --git a/resources/transform.go b/resources/transform.go new file mode 100644 index 000000000..fd3ae1ae6 --- /dev/null +++ b/resources/transform.go @@ -0,0 +1,554 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "bytes" + "path" + "strconv" + "strings" + + "github.com/gohugoio/hugo/common/collections" + "github.com/gohugoio/hugo/common/herrors" + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/resources/resource" + "github.com/mitchellh/hashstructure" + + "fmt" + "io" + "sync" + + "github.com/gohugoio/hugo/media" + + bp "github.com/gohugoio/hugo/bufferpool" +) + +var ( + _ resource.ContentResource = (*transformedResource)(nil) + _ resource.ReadSeekCloserResource = (*transformedResource)(nil) + _ collections.Slicer = (*transformedResource)(nil) + _ resource.Identifier = (*transformedResource)(nil) +) + +func (s *Spec) Transform(r resource.Resource, t ResourceTransformation) (resource.Resource, error) { + return &transformedResource{ + Resource: r, + transformation: t, + transformedResourceMetadata: transformedResourceMetadata{MetaData: make(map[string]interface{})}, + cache: s.ResourceCache}, nil +} + +type ResourceTransformationCtx struct { + // The content to transform. + From io.Reader + + // The target of content transformation. + // The current implementation requires that r is written to w + // even if no transformation is performed. + To io.Writer + + // This is the relative path to the original source. Unix styled slashes. + SourcePath string + + // This is the relative target path to the resource. Unix styled slashes. + InPath string + + // The relative target path to the transformed resource. Unix styled slashes. + OutPath string + + // The input media type + InMediaType media.Type + + // The media type of the transformed resource. + OutMediaType media.Type + + // Data data can be set on the transformed Resource. Not that this need + // to be simple types, as it needs to be serialized to JSON and back. + Data map[string]interface{} + + // This is used to publis additional artifacts, e.g. source maps. + // We may improve this. + OpenResourcePublisher func(relTargetPath string) (io.WriteCloser, error) +} + +// AddOutPathIdentifier transforming InPath to OutPath adding an identifier, +// eg '.min' before any extension. +func (ctx *ResourceTransformationCtx) AddOutPathIdentifier(identifier string) { + ctx.OutPath = ctx.addPathIdentifier(ctx.InPath, identifier) +} + +func (ctx *ResourceTransformationCtx) addPathIdentifier(inPath, identifier string) string { + dir, file := path.Split(inPath) + base, ext := helpers.PathAndExt(file) + return path.Join(dir, (base + identifier + ext)) +} + +// ReplaceOutPathExtension transforming InPath to OutPath replacing the file +// extension, e.g. ".scss" +func (ctx *ResourceTransformationCtx) ReplaceOutPathExtension(newExt string) { + dir, file := path.Split(ctx.InPath) + base, _ := helpers.PathAndExt(file) + ctx.OutPath = path.Join(dir, (base + newExt)) +} + +// PublishSourceMap writes the content to the target folder of the main resource +// with the ".map" extension added. +func (ctx *ResourceTransformationCtx) PublishSourceMap(content string) error { + target := ctx.OutPath + ".map" + f, err := ctx.OpenResourcePublisher(target) + if err != nil { + return err + } + defer f.Close() + _, err = f.Write([]byte(content)) + return err +} + +// ResourceTransformationKey are provided by the different transformation implementations. +// It identifies the transformation (name) and its configuration (elements). +// We combine this in a chain with the rest of the transformations +// with the target filename and a content hash of the origin to use as cache key. +type ResourceTransformationKey struct { + name string + elements []interface{} +} + +// NewResourceTransformationKey creates a new ResourceTransformationKey from the transformation +// name and elements. We will create a 64 bit FNV hash from the elements, which when combined +// with the other key elements should be unique for all practical applications. +func NewResourceTransformationKey(name string, elements ...interface{}) ResourceTransformationKey { + return ResourceTransformationKey{name: name, elements: elements} +} + +// Do not change this without good reasons. +func (k ResourceTransformationKey) key() string { + if len(k.elements) == 0 { + return k.name + } + + sb := bp.GetBuffer() + defer bp.PutBuffer(sb) + + sb.WriteString(k.name) + for _, element := range k.elements { + hash, err := hashstructure.Hash(element, nil) + if err != nil { + panic(err) + } + sb.WriteString("_") + sb.WriteString(strconv.FormatUint(hash, 10)) + } + + return sb.String() +} + +// ResourceTransformation is the interface that a resource transformation step +// needs to implement. +type ResourceTransformation interface { + Key() ResourceTransformationKey + Transform(ctx *ResourceTransformationCtx) error +} + +// We will persist this information to disk. +type transformedResourceMetadata struct { + Target string `json:"Target"` + MediaTypeV string `json:"MediaType"` + MetaData map[string]interface{} `json:"Data"` +} + +type transformedResource struct { + commonResource + + cache *ResourceCache + + // This is the filename inside resources/_gen/assets + sourceFilename string + + linker permalinker + + // The transformation to apply. + transformation ResourceTransformation + + // We apply the tranformations lazily. + transformInit sync.Once + transformErr error + + // We delay publishing until either .RelPermalink or .Permalink + // is invoked. + publishInit sync.Once + published bool + + // The transformed values + content string + contentInit sync.Once + transformedResourceMetadata + + // The source + resource.Resource +} + +func (r *transformedResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) { + if err := r.initContent(); err != nil { + return nil, err + } + return hugio.NewReadSeekerNoOpCloserFromString(r.content), nil +} + +func (r *transformedResource) transferTransformedValues(another *transformedResource) { + if another.content != "" { + r.contentInit.Do(func() { + r.content = another.content + }) + } + r.transformedResourceMetadata = another.transformedResourceMetadata +} + +func (r *transformedResource) tryTransformedFileCache(key string) io.ReadCloser { + fi, f, meta, found := r.cache.getFromFile(key) + if !found { + return nil + } + r.transformedResourceMetadata = meta + r.sourceFilename = fi.Name + + return f +} + +func (r *transformedResource) Content() (interface{}, error) { + if err := r.initTransform(true, false); err != nil { + return nil, err + } + if err := r.initContent(); err != nil { + return "", err + } + return r.content, nil +} + +func (r *transformedResource) Data() interface{} { + if err := r.initTransform(false, false); err != nil { + return noData + } + return r.MetaData +} + +func (r *transformedResource) MediaType() media.Type { + if err := r.initTransform(false, false); err != nil { + return media.Type{} + } + m, _ := r.cache.rs.MediaTypes.GetByType(r.MediaTypeV) + return m +} + +func (r *transformedResource) Key() string { + if err := r.initTransform(false, false); err != nil { + return "" + } + return r.linker.relPermalinkFor(r.Target) +} + +func (r *transformedResource) Permalink() string { + if err := r.initTransform(false, true); err != nil { + return "" + } + return r.linker.permalinkFor(r.Target) +} + +func (r *transformedResource) RelPermalink() string { + if err := r.initTransform(false, true); err != nil { + return "" + } + return r.linker.relPermalinkFor(r.Target) +} + +func (r *transformedResource) initContent() error { + var err error + r.contentInit.Do(func() { + var b []byte + _, b, err = r.cache.fileCache.GetBytes(r.sourceFilename) + if err != nil { + return + } + r.content = string(b) + }) + return err +} + +func (r *transformedResource) openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) { + return helpers.OpenFilesForWriting(r.cache.rs.PublishFs, r.linker.relTargetPathsFor(relTargetPath)...) +} + +func (r *transformedResource) transform(setContent, publish bool) (err error) { + + // This can be the last resource in a chain. + // Rewind and create a processing chain. + var chain []resource.Resource + current := r + for { + rr := current.Resource + chain = append(chain[:0], append([]resource.Resource{rr}, chain[0:]...)...) + if tr, ok := rr.(*transformedResource); ok { + current = tr + } else { + break + } + } + + // Append the current transformer at the end + chain = append(chain, r) + + first := chain[0] + + // Files with a suffix will be stored in cache (both on disk and in memory) + // partitioned by their suffix. There will be other files below /other. + // This partition is also how we determine what to delete on server reloads. + var key, base string + for _, element := range chain { + switch v := element.(type) { + case *transformedResource: + key = key + "_" + v.transformation.Key().key() + case permalinker: + r.linker = v + p := v.targetPath() + if p == "" { + panic("target path needed for key creation") + } + partition := ResourceKeyPartition(p) + base = partition + "/" + p + default: + return fmt.Errorf("transformation not supported for type %T", element) + } + } + + key = r.cache.cleanKey(base + "_" + helpers.MD5String(key)) + + cached, found := r.cache.get(key) + if found { + r.transferTransformedValues(cached.(*transformedResource)) + return + } + + // Acquire a write lock for the named transformation. + r.cache.nlocker.Lock(key) + // Check the cache again. + cached, found = r.cache.get(key) + if found { + r.transferTransformedValues(cached.(*transformedResource)) + r.cache.nlocker.Unlock(key) + return + } + + defer r.cache.nlocker.Unlock(key) + defer r.cache.set(key, r) + + b1 := bp.GetBuffer() + b2 := bp.GetBuffer() + defer bp.PutBuffer(b1) + defer bp.PutBuffer(b2) + + tctx := &ResourceTransformationCtx{ + Data: r.transformedResourceMetadata.MetaData, + OpenResourcePublisher: r.openPublishFileForWriting, + } + + tctx.InMediaType = first.MediaType() + tctx.OutMediaType = first.MediaType() + + contentrc, err := contentReadSeekerCloser(first) + if err != nil { + return err + } + defer contentrc.Close() + + tctx.From = contentrc + tctx.To = b1 + + if r.linker != nil { + tctx.InPath = r.linker.targetPath() + tctx.SourcePath = tctx.InPath + } + + counter := 0 + + var transformedContentr io.Reader + + for _, element := range chain { + tr, ok := element.(*transformedResource) + if !ok { + continue + } + counter++ + if counter != 1 { + tctx.InMediaType = tctx.OutMediaType + } + if counter%2 == 0 { + tctx.From = b1 + b2.Reset() + tctx.To = b2 + } else { + if counter != 1 { + // The first reader is the file. + tctx.From = b2 + } + b1.Reset() + tctx.To = b1 + } + + if err := tr.transformation.Transform(tctx); err != nil { + if err == herrors.ErrFeatureNotAvailable { + // This transformation is not available in this + // Hugo installation (scss not compiled in, PostCSS not available etc.) + // If a prepared bundle for this transformation chain is available, use that. + f := r.tryTransformedFileCache(key) + if f == nil { + errMsg := err.Error() + if tr.transformation.Key().name == "postcss" { + errMsg = "PostCSS not found; install with \"npm install postcss-cli\". See https://gohugo.io/hugo-pipes/postcss/" + } + return fmt.Errorf("%s: failed to transform %q (%s): %s", strings.ToUpper(tr.transformation.Key().name), tctx.InPath, tctx.InMediaType.Type(), errMsg) + } + transformedContentr = f + defer f.Close() + + // The reader above is all we need. + break + } + + // Abort. + return err + } + + if tctx.OutPath != "" { + tctx.InPath = tctx.OutPath + tctx.OutPath = "" + } + } + + if transformedContentr == nil { + r.Target = tctx.InPath + r.MediaTypeV = tctx.OutMediaType.Type() + } + + var publishwriters []io.WriteCloser + + if publish { + publicw, err := r.openPublishFileForWriting(r.Target) + if err != nil { + r.transformErr = err + return err + } + defer publicw.Close() + + publishwriters = append(publishwriters, publicw) + } + + if transformedContentr == nil { + // Also write it to the cache + fi, metaw, err := r.cache.writeMeta(key, r.transformedResourceMetadata) + if err != nil { + return err + } + r.sourceFilename = fi.Name + + publishwriters = append(publishwriters, metaw) + + if counter > 0 { + transformedContentr = tctx.To.(*bytes.Buffer) + } else { + transformedContentr = contentrc + } + } + + // Also write it to memory + var contentmemw *bytes.Buffer + + if setContent { + contentmemw = bp.GetBuffer() + defer bp.PutBuffer(contentmemw) + publishwriters = append(publishwriters, hugio.ToWriteCloser(contentmemw)) + } + + publishw := hugio.NewMultiWriteCloser(publishwriters...) + _, r.transformErr = io.Copy(publishw, transformedContentr) + publishw.Close() + + if setContent { + r.contentInit.Do(func() { + r.content = contentmemw.String() + }) + } + + return nil +} + +func (r *transformedResource) initTransform(setContent, publish bool) error { + r.transformInit.Do(func() { + r.published = publish + if err := r.transform(setContent, publish); err != nil { + r.transformErr = err + r.cache.rs.Logger.ERROR.Println("error: failed to transform resource:", err) + } + + }) + + if !publish { + return r.transformErr + } + + r.publishInit.Do(func() { + if r.published { + return + } + + r.published = true + + // Copy the file from cache to /public + _, src, err := r.cache.fileCache.Get(r.sourceFilename) + + if err == nil { + defer src.Close() + + var dst io.WriteCloser + dst, err = r.openPublishFileForWriting(r.Target) + if err == nil { + defer dst.Close() + io.Copy(dst, src) + } + } + + if err != nil { + r.transformErr = err + r.cache.rs.Logger.ERROR.Println("error: failed to publish resource:", err) + return + } + + }) + + return r.transformErr +} + +// contentReadSeekerCloser returns a ReadSeekerCloser if possible for a given Resource. +func contentReadSeekerCloser(r resource.Resource) (hugio.ReadSeekCloser, error) { + switch rr := r.(type) { + case resource.ReadSeekCloserResource: + rc, err := rr.ReadSeekCloser() + if err != nil { + return nil, err + } + return rc, nil + default: + return nil, fmt.Errorf("cannot transform content of Resource of type %T", r) + + } +} diff --git a/resources/transform_test.go b/resources/transform_test.go new file mode 100644 index 000000000..ed462cd2a --- /dev/null +++ b/resources/transform_test.go @@ -0,0 +1,36 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +type testStruct struct { + Name string + V1 int64 + V2 int32 + V3 int + V4 uint64 +} + +func TestResourceTransformationKey(t *testing.T) { + // We really need this key to be portable across OSes. + key := NewResourceTransformationKey("testing", + testStruct{Name: "test", V1: int64(10), V2: int32(20), V3: 30, V4: uint64(40)}) + assert := require.New(t) + assert.Equal(key.key(), "testing_518996646957295636") +} |