summaryrefslogtreecommitdiffstats
path: root/resources
diff options
context:
space:
mode:
Diffstat (limited to 'resources')
-rw-r--r--resources/docs.go2
-rw-r--r--resources/image.go61
-rw-r--r--resources/image_cache.go185
-rw-r--r--resources/image_extended_test.go2
-rw-r--r--resources/image_test.go77
-rw-r--r--resources/images/auto_orient.go2
-rw-r--r--resources/images/exif/exif.go2
-rw-r--r--resources/images/exif/exif_test.go15
-rw-r--r--resources/images/image_resource.go2
-rw-r--r--resources/images/opacity.go2
-rw-r--r--resources/images/padding.go2
-rw-r--r--resources/images/process.go2
-rw-r--r--resources/integration_test.go4
-rw-r--r--resources/internal/resourcepaths.go107
-rw-r--r--resources/kinds/kinds.go23
-rw-r--r--resources/kinds/kinds_test.go2
-rw-r--r--resources/page/page.go47
-rw-r--r--resources/page/page_generate/generate_page_wrappers.go96
-rw-r--r--resources/page/page_lazy_contentprovider.go4
-rw-r--r--resources/page/page_marshaljson.autogen.go180
-rw-r--r--resources/page/page_matcher.go4
-rw-r--r--resources/page/page_nop.go42
-rw-r--r--resources/page/page_paths.go381
-rw-r--r--resources/page/page_paths_test.go295
-rw-r--r--resources/page/pagegroup.go4
-rw-r--r--resources/page/pagemeta/page_frontmatter.go12
-rw-r--r--resources/page/pagemeta/page_frontmatter_test.go69
-rw-r--r--resources/page/pages.go4
-rw-r--r--resources/page/pages_related.go3
-rw-r--r--resources/page/pages_sort.go26
-rw-r--r--resources/page/pages_sort_test.go3
-rw-r--r--resources/page/permalinks.go14
-rw-r--r--resources/page/permalinks_integration_test.go5
-rw-r--r--resources/page/permalinks_test.go5
-rw-r--r--resources/page/site.go50
-rw-r--r--resources/page/siteidentities/identities.go34
-rw-r--r--resources/page/taxonomy.go2
-rw-r--r--resources/page/testhelpers_page_test.go38
-rw-r--r--resources/page/testhelpers_test.go40
-rw-r--r--resources/page/zero_file.autogen.go72
-rw-r--r--resources/postpub/postpub.go2
-rw-r--r--resources/resource.go626
-rw-r--r--resources/resource/dates.go4
-rw-r--r--resources/resource/resources.go25
-rw-r--r--resources/resource/resourcetypes.go74
-rw-r--r--resources/resource_cache.go242
-rw-r--r--resources/resource_cache_test.go58
-rw-r--r--resources/resource_factories/bundler/bundler.go10
-rw-r--r--resources/resource_factories/create/create.go70
-rw-r--r--resources/resource_factories/create/integration_test.go7
-rw-r--r--resources/resource_factories/create/remote.go12
-rw-r--r--resources/resource_metadata.go200
-rw-r--r--resources/resource_spec.go285
-rw-r--r--resources/resource_spec_test.go48
-rw-r--r--resources/resource_test.go48
-rw-r--r--resources/resource_transformers/babel/babel.go4
-rw-r--r--resources/resource_transformers/htesting/testhelpers.go45
-rw-r--r--resources/resource_transformers/integrity/integrity.go3
-rw-r--r--resources/resource_transformers/integrity/integrity_test.go10
-rw-r--r--resources/resource_transformers/js/build.go5
-rw-r--r--resources/resource_transformers/js/integration_test.go8
-rw-r--r--resources/resource_transformers/js/options.go10
-rw-r--r--resources/resource_transformers/js/options_test.go24
-rw-r--r--resources/resource_transformers/minifier/minify_test.go9
-rw-r--r--resources/resource_transformers/postcss/integration_test.go10
-rw-r--r--resources/resource_transformers/postcss/postcss.go42
-rw-r--r--resources/resource_transformers/postcss/postcss_test.go7
-rw-r--r--resources/resource_transformers/templates/execute_as_template.go3
-rw-r--r--resources/resource_transformers/tocss/dartsass/client.go7
-rw-r--r--resources/resource_transformers/tocss/dartsass/transform.go18
-rw-r--r--resources/resource_transformers/tocss/internal/sass/helpers.go3
-rw-r--r--resources/resource_transformers/tocss/internal/sass/helpers_test.go3
-rw-r--r--resources/resource_transformers/tocss/scss/client.go5
-rw-r--r--resources/resource_transformers/tocss/scss/tocss.go3
-rw-r--r--resources/testhelpers_test.go63
-rw-r--r--resources/transform.go159
-rw-r--r--resources/transform_integration_test.go50
-rw-r--r--resources/transform_test.go21
78 files changed, 1712 insertions, 2431 deletions
diff --git a/resources/docs.go b/resources/docs.go
index f992893da..16fe34027 100644
--- a/resources/docs.go
+++ b/resources/docs.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/image.go b/resources/image.go
index 6c34795f8..2e351bd28 100644
--- a/resources/image.go
+++ b/resources/image.go
@@ -20,25 +20,23 @@ import (
"image/color"
"image/draw"
"image/gif"
- _ "image/gif"
_ "image/png"
"io"
"os"
- "path"
- "path/filepath"
"strings"
"sync"
color_extractor "github.com/marekm4/color-extractor"
+ "github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/identity"
"github.com/disintegration/gift"
- "github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/resources/images/exif"
+ "github.com/gohugoio/hugo/resources/internal"
"github.com/gohugoio/hugo/resources/resource"
@@ -50,9 +48,10 @@ import (
)
var (
- _ images.ImageResource = (*imageResource)(nil)
- _ resource.Source = (*imageResource)(nil)
- _ resource.Cloner = (*imageResource)(nil)
+ _ images.ImageResource = (*imageResource)(nil)
+ _ resource.Source = (*imageResource)(nil)
+ _ resource.Cloner = (*imageResource)(nil)
+ _ resource.NameOriginalProvider = (*imageResource)(nil)
)
// imageResource represents an image resource.
@@ -107,6 +106,7 @@ func (i *imageResource) getExif() *exif.ExifInfo {
}
create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
+ defer w.Close()
f, err := i.root.ReadSeekCloser()
if err != nil {
i.metaInitErr = err
@@ -127,7 +127,7 @@ func (i *imageResource) getExif() *exif.ExifInfo {
return enc.Encode(i.meta)
}
- _, i.metaInitErr = i.getSpec().ImageCache.fileCache.ReadOrCreate(key, read, create)
+ _, i.metaInitErr = i.getSpec().ImageCache.fcache.ReadOrCreate(key, read, create)
})
if i.metaInitErr != nil {
@@ -369,17 +369,14 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im
<-imageProcSem
}()
- errOp := conf.Action
- errPath := i.getSourceFilename()
-
src, err := i.DecodeImage()
if err != nil {
- return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err}
+ return nil, nil, &os.PathError{Op: conf.Action, Path: i.TargetPath(), Err: err}
}
converted, err := f(src)
if err != nil {
- return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err}
+ return nil, nil, &os.PathError{Op: conf.Action, Path: i.TargetPath(), Err: err}
}
hasAlpha := !images.IsOpaque(converted)
@@ -414,16 +411,15 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im
}
ci := i.clone(converted)
- ci.setBasePath(conf)
+ targetPath := i.relTargetPathFromConfig(conf)
+ ci.setTargetPath(targetPath)
ci.Format = conf.TargetFormat
ci.setMediaType(conf.TargetFormat.MediaType())
return ci, converted, nil
})
if err != nil {
- if i.root != nil && i.root.getFileInfo() != nil {
- return nil, fmt.Errorf("image %q: %w", i.root.getFileInfo().Meta().Filename, err)
- }
+ return nil, err
}
return img, nil
}
@@ -474,32 +470,25 @@ func (i *imageResource) clone(img image.Image) *imageResource {
}
}
-func (i *imageResource) setBasePath(conf images.ImageConfig) {
- i.getResourcePaths().relTargetDirFile = i.relTargetPathFromConfig(conf)
-}
-
func (i *imageResource) getImageMetaCacheTargetPath() string {
const imageMetaVersionNumber = 1 // Increment to invalidate the meta cache
cfgHash := i.getSpec().imaging.Cfg.SourceHash
- df := i.getResourcePaths().relTargetDirFile
- if fi := i.getFileInfo(); fi != nil {
- df.dir = filepath.Dir(fi.Meta().Path)
- }
- p1, _ := paths.FileAndExt(df.file)
- h, _ := i.hash()
+ df := i.getResourcePaths()
+ p1, _ := paths.FileAndExt(df.File)
+ h := i.hash()
idStr := identity.HashString(h, i.size(), imageMetaVersionNumber, cfgHash)
- p := path.Join(df.dir, fmt.Sprintf("%s_%s.json", p1, idStr))
- return p
+ df.File = fmt.Sprintf("%s_%s.json", p1, idStr)
+ return df.TargetPath()
}
-func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile {
- p1, p2 := paths.FileAndExt(i.getResourcePaths().relTargetDirFile.file)
+func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) internal.ResourcePaths {
+ p1, p2 := paths.FileAndExt(i.getResourcePaths().File)
if conf.TargetFormat != i.Format {
p2 = conf.TargetFormat.DefaultExtension()
}
- h, _ := i.hash()
+ h := i.hash()
idStr := fmt.Sprintf("_hu%s_%d", h, i.size())
// Do not change for no good reason.
@@ -526,8 +515,8 @@ func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile
idStr = ""
}
- return dirFile{
- dir: i.getResourcePaths().relTargetDirFile.dir,
- file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2),
- }
+ rp := i.getResourcePaths()
+ rp.File = fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2)
+
+ return rp
}
diff --git a/resources/image_cache.go b/resources/image_cache.go
index f416f0230..f9770ffc1 100644
--- a/resources/image_cache.go
+++ b/resources/image_cache.go
@@ -16,12 +16,11 @@ package resources
import (
"image"
"io"
- "path/filepath"
- "strings"
- "sync"
+ "github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/resources/images"
+ "github.com/gohugoio/hugo/cache/dynacache"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/helpers"
)
@@ -30,132 +29,88 @@ import (
type ImageCache struct {
pathSpec *helpers.PathSpec
- fileCache *filecache.Cache
-
- *imageCacheStore
-}
-
-type imageCacheStore struct {
- mu sync.RWMutex
- store map[string]*resourceAdapter
-}
-
-// WithPathSpec returns a copy of the ImageCache with the given PathSpec set.
-func (c ImageCache) WithPathSpec(ps *helpers.PathSpec) *ImageCache {
- c.pathSpec = ps
- return &c
-}
-
-func (c *ImageCache) deleteIfContains(s string) {
- c.mu.Lock()
- defer c.mu.Unlock()
- s = c.normalizeKeyBase(s)
- for k := range c.store {
- if strings.Contains(k, s) {
- delete(c.store, k)
- }
- }
-}
-
-// The cache key is a lowercase path with Unix style slashes and it always starts with
-// a leading slash.
-func (c *ImageCache) normalizeKey(key string) string {
- return "/" + c.normalizeKeyBase(key)
-}
-
-func (c *ImageCache) normalizeKeyBase(key string) string {
- return strings.Trim(strings.ToLower(filepath.ToSlash(key)), "/")
-}
-
-func (c *ImageCache) clear() {
- c.mu.Lock()
- defer c.mu.Unlock()
- c.store = make(map[string]*resourceAdapter)
+ fcache *filecache.Cache
+ mcache *dynacache.Partition[string, *resourceAdapter]
}
func (c *ImageCache) getOrCreate(
parent *imageResource, conf images.ImageConfig,
- createImage func() (*imageResource, image.Image, error)) (*resourceAdapter, error) {
+ createImage func() (*imageResource, image.Image, error),
+) (*resourceAdapter, error) {
relTarget := parent.relTargetPathFromConfig(conf)
- memKey := parent.relTargetPathForRel(relTarget.path(), false, false, false)
- memKey = c.normalizeKey(memKey)
-
- // For the file cache we want to generate and store it once if possible.
- fileKeyPath := relTarget
- if fi := parent.root.getFileInfo(); fi != nil {
- fileKeyPath.dir = filepath.ToSlash(filepath.Dir(fi.Meta().Path))
- }
- fileKey := fileKeyPath.path()
-
- // First check the in-memory store, then the disk.
- c.mu.RLock()
- cachedImage, found := c.store[memKey]
- c.mu.RUnlock()
-
- if found {
- return cachedImage, nil
- }
-
- var img *imageResource
+ relTargetPath := relTarget.TargetPath()
+ memKey := dynacache.CleanKey(relTargetPath)
+
+ v, err := c.mcache.GetOrCreate(memKey, func(key string) (*resourceAdapter, error) {
+ var img *imageResource
+
+ // These funcs are protected by a named lock.
+ // read clones the parent to its new name and copies
+ // the content to the destinations.
+ read := func(info filecache.ItemInfo, r io.ReadSeeker) error {
+ img = parent.clone(nil)
+ targetPath := img.getResourcePaths()
+ targetPath.File = relTarget.File
+ img.setTargetPath(targetPath)
+ img.setOpenSource(func() (hugio.ReadSeekCloser, error) {
+ return c.fcache.Fs.Open(info.Name)
+ })
+ img.setSourceFilenameIsHash(true)
+ img.setMediaType(conf.TargetFormat.MediaType())
+
+ if err := img.InitConfig(r); err != nil {
+ return err
+ }
+
+ return nil
+ }
- // These funcs are protected by a named lock.
- // read clones the parent to its new name and copies
- // the content to the destinations.
- read := func(info filecache.ItemInfo, r io.ReadSeeker) error {
- img = parent.clone(nil)
- rp := img.getResourcePaths()
- rp.relTargetDirFile.file = relTarget.file
- img.setSourceFilename(info.Name)
- img.setSourceFilenameIsHash(true)
- img.setMediaType(conf.TargetFormat.MediaType())
+ // create creates the image and encodes it to the cache (w).
+ create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
+ defer w.Close()
+
+ var conv image.Image
+ img, conv, err = createImage()
+ if err != nil {
+ return
+ }
+ targetPath := img.getResourcePaths()
+ targetPath.File = relTarget.File
+ img.setTargetPath(targetPath)
+ img.setOpenSource(func() (hugio.ReadSeekCloser, error) {
+ return c.fcache.Fs.Open(info.Name)
+ })
+ return img.EncodeTo(conf, conv, w)
+ }
- return img.InitConfig(r)
- }
+ // Now look in the file cache.
- // create creates the image and encodes it to the cache (w).
- create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
- defer w.Close()
+ // The definition of this counter is not that we have processed that amount
+ // (e.g. resized etc.), it can be fetched from file cache,
+ // but the count of processed image variations for this site.
+ c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
- var conv image.Image
- img, conv, err = createImage()
+ _, err := c.fcache.ReadOrCreate(relTargetPath, read, create)
if err != nil {
- return
+ return nil, err
}
- rp := img.getResourcePaths()
- rp.relTargetDirFile.file = relTarget.file
- img.setSourceFilename(info.Name)
-
- return img.EncodeTo(conf, conv, w)
- }
-
- // Now look in the file cache.
-
- // The definition of this counter is not that we have processed that amount
- // (e.g. resized etc.), it can be fetched from file cache,
- // but the count of processed image variations for this site.
- c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
-
- _, err := c.fileCache.ReadOrCreate(fileKey, read, create)
- if err != nil {
- return nil, err
- }
-
- // The file is now stored in this cache.
- img.setSourceFs(c.fileCache.Fs)
- c.mu.Lock()
- if cachedImage, found = c.store[memKey]; found {
- c.mu.Unlock()
- return cachedImage, nil
- }
+ imgAdapter := newResourceAdapter(parent.getSpec(), true, img)
- imgAdapter := newResourceAdapter(parent.getSpec(), true, img)
- c.store[memKey] = imgAdapter
- c.mu.Unlock()
+ return imgAdapter, nil
+ })
- return imgAdapter, nil
+ return v, err
}
-func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *ImageCache {
- return &ImageCache{fileCache: fileCache, pathSpec: ps, imageCacheStore: &imageCacheStore{store: make(map[string]*resourceAdapter)}}
+func newImageCache(fileCache *filecache.Cache, memCache *dynacache.Cache, ps *helpers.PathSpec) *ImageCache {
+ return &ImageCache{
+ fcache: fileCache,
+ mcache: dynacache.GetOrCreatePartition[string, *resourceAdapter](
+ memCache,
+ "/imgs",
+ dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 70},
+ ),
+ pathSpec: ps,
+ }
}
diff --git a/resources/image_extended_test.go b/resources/image_extended_test.go
index 4da603fc4..429e51fb6 100644
--- a/resources/image_extended_test.go
+++ b/resources/image_extended_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/image_test.go b/resources/image_test.go
index 96cc07b3b..44861d629 100644
--- a/resources/image_test.go
+++ b/resources/image_test.go
@@ -22,7 +22,6 @@ import (
"math/big"
"math/rand"
"os"
- "path"
"path/filepath"
"runtime"
"strconv"
@@ -31,7 +30,6 @@ import (
"testing"
"time"
- "github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/resources/images/webp"
"github.com/gohugoio/hugo/common/paths"
@@ -80,8 +78,7 @@ var eq = qt.CmpEquals(
func TestImageTransformBasic(t *testing.T) {
c := qt.New(t)
- spec, image := fetchSunset(c)
- fileCache := spec.FileCaches.ImageCache().Fs
+ _, image := fetchSunset(c)
assertWidthHeight := func(img images.ImageResource, w, h int) {
assertWidthHeight(c, img, w, h)
@@ -104,12 +101,10 @@ func TestImageTransformBasic(t *testing.T) {
resized0x, err := image.Resize("x200")
c.Assert(err, qt.IsNil)
assertWidthHeight(resized0x, 320, 200)
- assertFileCache(c, fileCache, path.Base(resized0x.RelPermalink()), 320, 200)
resizedx0, err := image.Resize("200x")
c.Assert(err, qt.IsNil)
assertWidthHeight(resizedx0, 200, 125)
- assertFileCache(c, fileCache, path.Base(resizedx0.RelPermalink()), 200, 125)
resizedAndRotated, err := image.Resize("x200 r90")
c.Assert(err, qt.IsNil)
@@ -203,8 +198,7 @@ func TestImageProcess(t *testing.T) {
func TestImageTransformFormat(t *testing.T) {
c := qt.New(t)
- spec, image := fetchSunset(c)
- fileCache := spec.FileCaches.ImageCache().Fs
+ _, image := fetchSunset(c)
assertExtWidthHeight := func(img images.ImageResource, ext string, w, h int) {
c.Helper()
@@ -226,8 +220,6 @@ func TestImageTransformFormat(t *testing.T) {
c.Assert(imagePng.Name(), qt.Equals, "sunset.jpg")
c.Assert(imagePng.MediaType().String(), qt.Equals, "image/png")
- assertFileCache(c, fileCache, path.Base(imagePng.RelPermalink()), 450, 281)
-
imageGif, err := image.Resize("225x gif")
c.Assert(err, qt.IsNil)
c.Assert(imageGif.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_225x0_resize_linear.gif")
@@ -235,8 +227,6 @@ func TestImageTransformFormat(t *testing.T) {
assertExtWidthHeight(imageGif, ".gif", 225, 141)
c.Assert(imageGif.Name(), qt.Equals, "sunset.jpg")
c.Assert(imageGif.MediaType().String(), qt.Equals, "image/gif")
-
- assertFileCache(c, fileCache, path.Base(imageGif.RelPermalink()), 225, 141)
}
// https://github.com/gohugoio/hugo/issues/5730
@@ -275,7 +265,7 @@ func TestImagePermalinkPublishOrder(t *testing.T) {
resized, err := original.Resize("100x50")
c.Assert(err, qt.IsNil)
- check1(resized.(images.ImageResource))
+ check1(resized)
if !checkOriginalFirst {
check2(original)
@@ -386,27 +376,6 @@ func TestImageTransformConcurrent(t *testing.T) {
wg.Wait()
}
-func TestImageWithMetadata(t *testing.T) {
- c := qt.New(t)
-
- _, image := fetchSunset(c)
-
- meta := []map[string]any{
- {
- "title": "My Sunset",
- "name": "Sunset #:counter",
- "src": "*.jpg",
- },
- }
-
- c.Assert(resources.AssignMetadata(meta, image), qt.IsNil)
- c.Assert(image.Name(), qt.Equals, "Sunset #1")
-
- resized, err := image.Resize("200x")
- c.Assert(err, qt.IsNil)
- c.Assert(resized.Name(), qt.Equals, "Sunset #1")
-}
-
func TestImageResize8BitPNG(t *testing.T) {
c := qt.New(t)
@@ -424,38 +393,6 @@ func TestImageResize8BitPNG(t *testing.T) {
c.Assert(resized.Width(), qt.Equals, 800)
}
-func TestImageResizeInSubPath(t *testing.T) {
- c := qt.New(t)
-
- spec, image := fetchImage(c, "sub/gohugoio2.png")
-
- c.Assert(image.MediaType(), eq, media.Builtin.PNGType)
- c.Assert(image.RelPermalink(), qt.Equals, "/a/sub/gohugoio2.png")
- c.Assert(image.ResourceType(), qt.Equals, "image")
- c.Assert(image.Exif(), qt.IsNil)
-
- resized, err := image.Resize("101x101")
- c.Assert(err, qt.IsNil)
- c.Assert(resized.MediaType().Type, qt.Equals, "image/png")
- c.Assert(resized.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png")
- c.Assert(resized.Width(), qt.Equals, 101)
- c.Assert(resized.Exif(), qt.IsNil)
-
- publishedImageFilename := filepath.Clean(resized.RelPermalink())
-
- assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
- c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil)
-
- // Clear mem cache to simulate reading from the file cache.
- spec.ClearCaches()
-
- resizedAgain, err := image.Resize("101x101")
- c.Assert(err, qt.IsNil)
- c.Assert(resizedAgain.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png")
- c.Assert(resizedAgain.Width(), qt.Equals, 101)
- assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
-}
-
func TestSVGImage(t *testing.T) {
c := qt.New(t)
spec := newTestResourceSpec(specDescriptor{c: c})
@@ -640,7 +577,7 @@ func TestImageOperationsGoldenWebp(t *testing.T) {
return
}
- dir1 := filepath.Join(workDir, "resources/_gen/images")
+ dir1 := filepath.Join(workDir, "resources/_gen/images/a")
dir2 := filepath.FromSlash("testdata/golden_webp")
assetGoldenDirs(c, dir1, dir2)
@@ -694,8 +631,10 @@ func TestImageOperationsGolden(t *testing.T) {
opacity30, err := orig.Filter(f.Opacity(30))
c.Assert(err, qt.IsNil)
overlay, err := sunset.Filter(f.Overlay(opacity30.(images.ImageSource), 20, 20))
+ c.Assert(err, qt.IsNil)
rel := overlay.RelPermalink()
c.Assert(rel, qt.Not(qt.Equals), "")
+
}
// A simple Gif file (no animation).
@@ -782,7 +721,7 @@ func TestImageOperationsGolden(t *testing.T) {
return
}
- dir1 := filepath.Join(workDir, "resources/_gen/images")
+ dir1 := filepath.Join(workDir, "resources/_gen/images/a/")
dir2 := filepath.FromSlash("testdata/golden")
assetGoldenDirs(c, dir1, dir2)
@@ -798,7 +737,7 @@ func assetGoldenDirs(c *qt.C, dir1, dir2 string) {
for i, fi1 := range dirinfos1 {
fi2 := dirinfos2[i]
- c.Assert(fi1.Name(), qt.Equals, fi2.Name())
+ c.Assert(fi1.Name(), qt.Equals, fi2.Name(), qt.Commentf("i=%d", i))
f1, err := os.Open(filepath.Join(dir1, fi1.Name()))
c.Assert(err, qt.IsNil)
diff --git a/resources/images/auto_orient.go b/resources/images/auto_orient.go
index 194efefb5..ed86979e1 100644
--- a/resources/images/auto_orient.go
+++ b/resources/images/auto_orient.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/images/exif/exif.go b/resources/images/exif/exif.go
index 90198eeed..af92366ca 100644
--- a/resources/images/exif/exif.go
+++ b/resources/images/exif/exif.go
@@ -117,7 +117,7 @@ func NewDecoder(options ...func(*Decoder) error) (*Decoder, error) {
func (d *Decoder) Decode(r io.Reader) (ex *ExifInfo, err error) {
defer func() {
if r := recover(); r != nil {
- err = fmt.Errorf("Exif failed: %v", r)
+ err = fmt.Errorf("exif failed: %v", r)
}
}()
diff --git a/resources/images/exif/exif_test.go b/resources/images/exif/exif_test.go
index 821367550..64c5a39e3 100644
--- a/resources/images/exif/exif_test.go
+++ b/resources/images/exif/exif_test.go
@@ -58,6 +58,7 @@ func TestExif(t *testing.T) {
c.Assert(err, qt.IsNil)
x2 := &ExifInfo{}
err = json.Unmarshal(data, x2)
+ c.Assert(err, qt.IsNil)
c.Assert(x2, eq, x)
}
@@ -135,7 +136,6 @@ var eq = qt.CmpEquals(
)
func TestIssue10738(t *testing.T) {
-
c := qt.New(t)
testFunc := func(path, include string) any {
@@ -153,6 +153,7 @@ func TestIssue10738(t *testing.T) {
c.Assert(err, qt.IsNil)
x2 := &ExifInfo{}
err = json.Unmarshal(data, x2)
+ c.Assert(err, qt.IsNil)
c.Assert(x2, eq, x)
@@ -300,15 +301,13 @@ func TestIssue10738(t *testing.T) {
for _, tt := range tests {
c.Run(tt.name, func(c *qt.C) {
got := testFunc(tt.args.path, tt.args.include)
- switch got.(type) {
+ switch v := got.(type) {
case float64:
- eTime, ok := got.(float64)
- c.Assert(ok, qt.Equals, true)
- c.Assert(eTime, qt.Equals, float64(tt.want.vN))
+ c.Assert(v, qt.Equals, float64(tt.want.vN))
case *big.Rat:
- eTime, ok := got.(*big.Rat)
- c.Assert(ok, qt.Equals, true)
- c.Assert(eTime, eq, big.NewRat(tt.want.vN, tt.want.vD))
+ c.Assert(v, eq, big.NewRat(tt.want.vN, tt.want.vD))
+ default:
+ c.Fatalf("unexpected type: %T", got)
}
})
}
diff --git a/resources/images/image_resource.go b/resources/images/image_resource.go
index be40418b1..e6be757c2 100644
--- a/resources/images/image_resource.go
+++ b/resources/images/image_resource.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/images/opacity.go b/resources/images/opacity.go
index 4b60e30a4..482476c5b 100644
--- a/resources/images/opacity.go
+++ b/resources/images/opacity.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/images/padding.go b/resources/images/padding.go
index 153d0bd82..4399312f8 100644
--- a/resources/images/padding.go
+++ b/resources/images/padding.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/images/process.go b/resources/images/process.go
index 984ac3c8f..fb2e995ce 100644
--- a/resources/images/process.go
+++ b/resources/images/process.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/integration_test.go b/resources/integration_test.go
index 51a003625..9540b0976 100644
--- a/resources/integration_test.go
+++ b/resources/integration_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -69,7 +69,7 @@ anigif: {{ $anigif.RelPermalink }}|{{ $anigif.Width }}|{{ $anigif.Height }}|{{ $
assertImages()
- b.EditFileReplace("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") })
+ b.EditFileReplaceFunc("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") })
b.Build()
assertImages()
diff --git a/resources/internal/resourcepaths.go b/resources/internal/resourcepaths.go
new file mode 100644
index 000000000..21c65e2ca
--- /dev/null
+++ b/resources/internal/resourcepaths.go
@@ -0,0 +1,107 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import (
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/paths"
+)
+
+// ResourcePaths holds path information for a resouce.
+// All directories in here have Unix-style slashes, with leading slash, but no trailing slash.
+// Empty directories are represented with an empty string.
+type ResourcePaths struct {
+ // This is the directory component for the target file or link.
+ Dir string
+
+ // Any base directory for the target file. Will be prepended to Dir.
+ BaseDirTarget string
+
+ // This is the directory component for the link will be prepended to Dir.
+ BaseDirLink string
+
+ // Set when publishing in a multihost setup.
+ TargetBasePaths []string
+
+ // This is the File component, e.g. "data.json".
+ File string
+}
+
+func (d ResourcePaths) join(p ...string) string {
+ var s string
+ for i, pp := range p {
+ if pp == "" {
+ continue
+ }
+ if i > 0 && !strings.HasPrefix(pp, "/") {
+ pp = "/" + pp
+ }
+ s += pp
+
+ }
+ if !strings.HasPrefix(s, "/") {
+ s = "/" + s
+ }
+ return s
+}
+
+func (d ResourcePaths) TargetLink() string {
+ return d.join(d.BaseDirLink, d.Dir, d.File)
+}
+
+func (d ResourcePaths) TargetPath() string {
+ return d.join(d.BaseDirTarget, d.Dir, d.File)
+}
+
+func (d ResourcePaths) Path() string {
+ return d.join(d.Dir, d.File)
+}
+
+func (d ResourcePaths) TargetPaths() []string {
+ if len(d.TargetBasePaths) == 0 {
+ return []string{d.TargetPath()}
+ }
+
+ var paths []string
+ for _, p := range d.TargetBasePaths {
+ paths = append(paths, p+d.TargetPath())
+ }
+ return paths
+}
+
+func (d ResourcePaths) TargetFilenames() []string {
+ filenames := d.TargetPaths()
+ for i, p := range filenames {
+ filenames[i] = filepath.FromSlash(p)
+ }
+ return filenames
+}
+
+func (d ResourcePaths) FromTargetPath(targetPath string) ResourcePaths {
+ targetPath = filepath.ToSlash(targetPath)
+ dir, file := path.Split(targetPath)
+ dir = paths.ToSlashPreserveLeading(dir)
+ if dir == "/" {
+ dir = ""
+ }
+ d.Dir = dir
+ d.File = file
+ d.BaseDirLink = ""
+ d.BaseDirTarget = ""
+
+ return d
+}
diff --git a/resources/kinds/kinds.go b/resources/kinds/kinds.go
index b035cdd29..2660ec719 100644
--- a/resources/kinds/kinds.go
+++ b/resources/kinds/kinds.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -34,10 +34,11 @@ const (
// The following are (currently) temporary nodes,
// i.e. nodes we create just to render in isolation.
- KindRSS = "rss"
- KindSitemap = "sitemap"
- KindRobotsTXT = "robotstxt"
- Kind404 = "404"
+ KindRSS = "rss"
+ KindSitemap = "sitemap"
+ KindSitemapIndex = "sitemapindex"
+ KindRobotsTXT = "robotstxt"
+ KindStatus404 = "404"
)
var (
@@ -77,7 +78,7 @@ var kindMapTemporary = map[string]string{
KindRSS: KindRSS,
KindSitemap: KindSitemap,
KindRobotsTXT: KindRobotsTXT,
- Kind404: Kind404,
+ KindStatus404: KindStatus404,
}
// GetKindMain gets the page kind given a string, empty if not found.
@@ -94,6 +95,16 @@ func GetKindAny(s string) string {
return kindMapTemporary[strings.ToLower(s)]
}
+// IsBranch returns whether the given kind is a branch node.
+func IsBranch(kind string) bool {
+ switch kind {
+ case KindHome, KindSection, KindTaxonomy, KindTerm:
+ return true
+ default:
+ return false
+ }
+}
+
// IsDeprecatedAndReplacedWith returns the new kind if the given kind is deprecated.
func IsDeprecatedAndReplacedWith(s string) string {
s = strings.ToLower(s)
diff --git a/resources/kinds/kinds_test.go b/resources/kinds/kinds_test.go
index c2868d617..a0fe42ff8 100644
--- a/resources/kinds/kinds_test.go
+++ b/resources/kinds/kinds_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/page/page.go b/resources/page/page.go
index b5af489f1..56ba04d74 100644
--- a/resources/page/page.go
+++ b/resources/page/page.go
@@ -19,16 +19,14 @@ import (
"context"
"html/template"
- "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/tableofcontents"
"github.com/gohugoio/hugo/config"
- "github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/compare"
- "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/related"
@@ -122,7 +120,7 @@ type ContentRenderer interface {
type FileProvider interface {
// File returns the source file for this Page,
// or a zero File if this Page is not backed by a file.
- File() source.File
+ File() *source.File
}
// GetPageProvider provides the GetPage method.
@@ -133,9 +131,6 @@ type GetPageProvider interface {
// This will return nil when no page could be found, and will return
// an error if the ref is ambiguous.
GetPage(ref string) (Page, error)
-
- // GetPageWithTemplateInfo is for internal use only.
- GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error)
}
// GitInfoProvider provides Git info.
@@ -166,6 +161,12 @@ type OutputFormatsProvider interface {
OutputFormats() OutputFormats
}
+// PageProvider provides access to a Page.
+// Implemented by shortcodes and others.
+type PageProvider interface {
+ Page() Page
+}
+
// Page is the core interface in Hugo and what you get as the top level data context in your templates.
type Page interface {
ContentProvider
@@ -175,7 +176,7 @@ type Page interface {
type PageFragment interface {
resource.ResourceLinksProvider
- resource.ResourceMetaProvider
+ resource.ResourceNameTitleProvider
}
// PageMetaProvider provides page metadata, typically provided via front matter.
@@ -187,7 +188,7 @@ type PageMetaProvider interface {
Aliases() []string
// BundleType returns the bundle type: `leaf`, `branch` or an empty string.
- BundleType() files.ContentClass
+ BundleType() string
// A configured description.
Description() string
@@ -224,9 +225,8 @@ type PageMetaProvider interface {
// to the source of this Page. It will be relative to any content root.
Path() string
- // This is just a temporary bridge method. Use Path in templates.
- // Pathc is for internal usage only.
- Pathc() string
+ // This is for internal use only.
+ PathInfo() *paths.Path
// The slug, typically defined in front matter.
Slug() string
@@ -240,13 +240,6 @@ type PageMetaProvider interface {
// Section returns the first path element below the content root.
Section() string
- // Returns a slice of sections (directories if it's a file) to this
- // Page.
- SectionsEntries() []string
-
- // SectionsPath is SectionsEntries joined with a /.
- SectionsPath() string
-
// Sitemap returns the sitemap configuration for this page.
// This is for internal use only.
Sitemap() config.SitemapConfig
@@ -332,9 +325,6 @@ type PageWithoutContent interface {
// e.g. GetTerms("categories")
GetTerms(taxonomy string) Pages
- // Used in change/dependency tracking.
- identity.Provider
-
// HeadingsFiltered returns the headings for this page when a filter is set.
// This is currently only triggered with the Related content feature
// and the "fragments" type of index.
@@ -430,7 +420,7 @@ type TranslationsProvider interface {
type TreeProvider interface {
// IsAncestor returns whether the current page is an ancestor of other.
// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
- IsAncestor(other any) (bool, error)
+ IsAncestor(other any) bool
// CurrentSection returns the page's current section or the page itself if home or a section.
// Note that this will return nil for pages that is not regular, home or section pages.
@@ -438,7 +428,7 @@ type TreeProvider interface {
// IsDescendant returns whether the current page is a descendant of other.
// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
- IsDescendant(other any) (bool, error)
+ IsDescendant(other any) bool
// FirstSection returns the section on level 1 below home, e.g. "/docs".
// For the home page, this will return itself.
@@ -447,7 +437,7 @@ type TreeProvider interface {
// InSection returns whether other is in the current section.
// Note that this will always return false for pages that are
// not either regular, home or section pages.
- InSection(other any) (bool, error)
+ InSection(other any) bool
// Parent returns a section's parent section or a page's section.
// To get a section's subsections, see Page's Sections method.
@@ -463,6 +453,13 @@ type TreeProvider interface {
// Page returns a reference to the Page itself, kept here mostly
// for legacy reasons.
Page() Page
+
+ // Returns a slice of sections (directories if it's a file) to this
+ // Page.
+ SectionsEntries() []string
+
+ // SectionsPath is SectionsEntries joined with a /.
+ SectionsPath() string
}
// PageWithContext is a Page with a context.Context.
diff --git a/resources/page/page_generate/generate_page_wrappers.go b/resources/page/page_generate/generate_page_wrappers.go
index 2449cf28d..d720b8a42 100644
--- a/resources/page/page_generate/generate_page_wrappers.go
+++ b/resources/page/page_generate/generate_page_wrappers.go
@@ -14,19 +14,14 @@
package page_generate
import (
- "bytes"
"errors"
"fmt"
"os"
"path/filepath"
"reflect"
- "github.com/gohugoio/hugo/common/maps"
-
"github.com/gohugoio/hugo/codegen"
"github.com/gohugoio/hugo/resources/page"
- "github.com/gohugoio/hugo/resources/resource"
- "github.com/gohugoio/hugo/source"
)
const header = `// Copyright 2019 The Hugo Authors. All rights reserved.
@@ -46,7 +41,7 @@ const header = `// Copyright 2019 The Hugo Authors. All rights reserved.
`
var (
- pageInterface = reflect.TypeOf((*page.Page)(nil)).Elem()
+ pageInterface = reflect.TypeOf((*page.PageMetaProvider)(nil)).Elem()
packageDir = filepath.FromSlash("resources/page")
)
@@ -56,10 +51,6 @@ func Generate(c *codegen.Inspector) error {
return fmt.Errorf("failed to generate JSON marshaler: %w", err)
}
- if err := generateFileIsZeroWrappers(c); err != nil {
- return fmt.Errorf("failed to generate file wrappers: %w", err)
- }
-
return nil
}
@@ -73,25 +64,7 @@ func generateMarshalJSON(c *codegen.Inspector) error {
includes := []reflect.Type{pageInterface}
- // Exclude these methods
- excludes := []reflect.Type{
- // Leave this out for now. We need to revisit the author issue.
- reflect.TypeOf((*page.AuthorProvider)(nil)).Elem(),
-
- reflect.TypeOf((*resource.ErrProvider)(nil)).Elem(),
-
- // navigation.PageMenus
-
- // Prevent loops.
- reflect.TypeOf((*page.SitesProvider)(nil)).Elem(),
- reflect.TypeOf((*page.Positioner)(nil)).Elem(),
-
- reflect.TypeOf((*page.ChildCareProvider)(nil)).Elem(),
- reflect.TypeOf((*page.TreeProvider)(nil)).Elem(),
- reflect.TypeOf((*page.InSectionPositioner)(nil)).Elem(),
- reflect.TypeOf((*page.PaginatorProvider)(nil)).Elem(),
- reflect.TypeOf((*maps.Scratcher)(nil)).Elem(),
- }
+ excludes := []reflect.Type{}
methods := c.MethodsFromTypes(
includes,
@@ -123,71 +96,6 @@ package page
return nil
}
-func generateFileIsZeroWrappers(c *codegen.Inspector) error {
- filename := filepath.Join(c.ProjectRootDir, packageDir, "zero_file.autogen.go")
- f, err := os.Create(filename)
- if err != nil {
- return err
- }
- defer f.Close()
-
- // Generate warnings for zero file access
-
- warning := func(name string, tp reflect.Type) string {
- msg := fmt.Sprintf(".File.%s on zero object. Wrap it in if or with: {{ with .File }}{{ .%s }}{{ end }}", name, name)
-
- // We made this a Warning in 0.92.0.
- // When we remove this construct in 0.93.0, people will get a nil pointer.
- return fmt.Sprintf("z.log.Warnln(%q)", msg)
- }
-
- var buff bytes.Buffer
-
- methods := c.MethodsFromTypes([]reflect.Type{reflect.TypeOf((*source.File)(nil)).Elem()}, nil)
-
- for _, m := range methods {
- if m.Name == "IsZero" || m.Name == "Classifier" {
- continue
- }
- fmt.Fprint(&buff, m.DeclarationNamed("zeroFile"))
- fmt.Fprintln(&buff, " {")
- fmt.Fprintf(&buff, "\t%s\n", warning(m.Name, m.Owner))
- if len(m.Out) > 0 {
- fmt.Fprintln(&buff, "\treturn")
- }
- fmt.Fprintln(&buff, "}")
-
- }
-
- pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/common/loggers", "github.com/gohugoio/hugo/source")
-
- fmt.Fprintf(f, `%s
-
-package page
-
-%s
-
-// ZeroFile represents a zero value of source.File with warnings if invoked.
-type zeroFile struct {
- log loggers.Logger
-}
-
-func NewZeroFile(log loggers.Logger) source.File {
- return zeroFile{log: log}
-}
-
-func (zeroFile) IsZero() bool {
- return true
-}
-
-
-%s
-
-`, header, importsString(pkgImports), buff.String())
-
- return nil
-}
-
func importsString(imps []string) string {
if len(imps) == 0 {
return ""
diff --git a/resources/page/page_lazy_contentprovider.go b/resources/page/page_lazy_contentprovider.go
index 2d647e90c..665b2d003 100644
--- a/resources/page/page_lazy_contentprovider.go
+++ b/resources/page/page_lazy_contentprovider.go
@@ -77,7 +77,6 @@ func (lcp *LazyContentProvider) Reset() {
func (lcp *LazyContentProvider) TableOfContents(ctx context.Context) template.HTML {
lcp.init.Do(ctx)
return lcp.cp.TableOfContents(ctx)
-
}
func (lcp *LazyContentProvider) Fragments(ctx context.Context) *tableofcontents.Fragments {
@@ -131,7 +130,7 @@ func (lcp *LazyContentProvider) Len(ctx context.Context) int {
}
func (lcp *LazyContentProvider) Render(ctx context.Context, layout ...string) (template.HTML, error) {
- lcp.init.Do(context.TODO())
+ lcp.init.Do(ctx)
return lcp.cp.Render(ctx, layout...)
}
@@ -149,6 +148,7 @@ func (lcp *LazyContentProvider) ParseContent(ctx context.Context, content []byte
lcp.init.Do(ctx)
return lcp.cp.ParseContent(ctx, content)
}
+
func (lcp *LazyContentProvider) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) {
lcp.init.Do(ctx)
return lcp.cp.RenderContent(ctx, content, doc)
diff --git a/resources/page/page_marshaljson.autogen.go b/resources/page/page_marshaljson.autogen.go
index bc9b5cc0f..18ed2a75d 100644
--- a/resources/page/page_marshaljson.autogen.go
+++ b/resources/page/page_marshaljson.autogen.go
@@ -17,27 +17,12 @@ package page
import (
"encoding/json"
- "github.com/gohugoio/hugo/common/maps"
- "github.com/gohugoio/hugo/config"
- "github.com/gohugoio/hugo/hugofs/files"
- "github.com/gohugoio/hugo/identity"
- "github.com/gohugoio/hugo/langs"
- "github.com/gohugoio/hugo/media"
- "github.com/gohugoio/hugo/navigation"
- "github.com/gohugoio/hugo/source"
"time"
+
+ "github.com/gohugoio/hugo/config"
)
func MarshalPageToJSON(p Page) ([]byte, error) {
- rawContent := p.RawContent()
- resourceType := p.ResourceType()
- mediaType := p.MediaType()
- permalink := p.Permalink()
- relPermalink := p.RelPermalink()
- name := p.Name()
- title := p.Title()
- params := p.Params()
- data := p.Data()
date := p.Date()
lastmod := p.Lastmod()
publishDate := p.PublishDate()
@@ -54,128 +39,65 @@ func MarshalPageToJSON(p Page) ([]byte, error) {
isNode := p.IsNode()
isPage := p.IsPage()
path := p.Path()
- pathc := p.Pathc()
+ pathc := p.Path()
slug := p.Slug()
lang := p.Lang()
isSection := p.IsSection()
section := p.Section()
- sectionsEntries := p.SectionsEntries()
- sectionsPath := p.SectionsPath()
sitemap := p.Sitemap()
typ := p.Type()
weight := p.Weight()
- language := p.Language()
- file := p.File()
- gitInfo := p.GitInfo()
- codeOwners := p.CodeOwners()
- outputFormats := p.OutputFormats()
- alternativeOutputFormats := p.AlternativeOutputFormats()
- menus := p.Menus()
- translationKey := p.TranslationKey()
- isTranslated := p.IsTranslated()
- allTranslations := p.AllTranslations()
- translations := p.Translations()
- store := p.Store()
- getIdentity := p.GetIdentity()
s := struct {
- RawContent string
- ResourceType string
- MediaType media.Type
- Permalink string
- RelPermalink string
- Name string
- Title string
- Params maps.Params
- Data interface{}
- Date time.Time
- Lastmod time.Time
- PublishDate time.Time
- ExpiryDate time.Time
- Aliases []string
- BundleType files.ContentClass
- Description string
- Draft bool
- IsHome bool
- Keywords []string
- Kind string
- Layout string
- LinkTitle string
- IsNode bool
- IsPage bool
- Path string
- Pathc string
- Slug string
- Lang string
- IsSection bool
- Section string
- SectionsEntries []string
- SectionsPath string
- Sitemap config.SitemapConfig
- Type string
- Weight int
- Language *langs.Language
- File source.File
- GitInfo source.GitInfo
- CodeOwners []string
- OutputFormats OutputFormats
- AlternativeOutputFormats OutputFormats
- Menus navigation.PageMenus
- TranslationKey string
- IsTranslated bool
- AllTranslations Pages
- Translations Pages
- Store *maps.Scratch
- GetIdentity identity.Identity
+ Date time.Time
+ Lastmod time.Time
+ PublishDate time.Time
+ ExpiryDate time.Time
+ Aliases []string
+ BundleType string
+ Description string
+ Draft bool
+ IsHome bool
+ Keywords []string
+ Kind string
+ Layout string
+ LinkTitle string
+ IsNode bool
+ IsPage bool
+ Path string
+ Pathc string
+ Slug string
+ Lang string
+ IsSection bool
+ Section string
+ Sitemap config.SitemapConfig
+ Type string
+ Weight int
}{
- RawContent: rawContent,
- ResourceType: resourceType,
- MediaType: mediaType,
- Permalink: permalink,
- RelPermalink: relPermalink,
- Name: name,
- Title: title,
- Params: params,
- Data: data,
- Date: date,
- Lastmod: lastmod,
- PublishDate: publishDate,
- ExpiryDate: expiryDate,
- Aliases: aliases,
- BundleType: bundleType,
- Description: description,
- Draft: draft,
- IsHome: isHome,
- Keywords: keywords,
- Kind: kind,
- Layout: layout,
- LinkTitle: linkTitle,
- IsNode: isNode,
- IsPage: isPage,
- Path: path,
- Pathc: pathc,
- Slug: slug,
- Lang: lang,
- IsSection: isSection,
- Section: section,
- SectionsEntries: sectionsEntries,
- SectionsPath: sectionsPath,
- Sitemap: sitemap,
- Type: typ,
- Weight: weight,
- Language: language,
- File: file,
- GitInfo: gitInfo,
- CodeOwners: codeOwners,
- OutputFormats: outputFormats,
- AlternativeOutputFormats: alternativeOutputFormats,
- Menus: menus,
- TranslationKey: translationKey,
- IsTranslated: isTranslated,
- AllTranslations: allTranslations,
- Translations: translations,
- Store: store,
- GetIdentity: getIdentity,
+ Date: date,
+ Lastmod: lastmod,
+ PublishDate: publishDate,
+ ExpiryDate: expiryDate,
+ Aliases: aliases,
+ BundleType: bundleType,
+ Description: description,
+ Draft: draft,
+ IsHome: isHome,
+ Keywords: keywords,
+ Kind: kind,
+ Layout: layout,
+ LinkTitle: linkTitle,
+ IsNode: isNode,
+ IsPage: isPage,
+ Path: path,
+ Pathc: pathc,
+ Slug: slug,
+ Lang: lang,
+ IsSection: isSection,
+ Section: section,
+ Sitemap: sitemap,
+ Type: typ,
+ Weight: weight,
}
return json.Marshal(&s)
diff --git a/resources/page/page_matcher.go b/resources/page/page_matcher.go
index 4c861cbd7..f5e8e2697 100644
--- a/resources/page/page_matcher.go
+++ b/resources/page/page_matcher.go
@@ -63,7 +63,7 @@ func (m PageMatcher) Matches(p Page) bool {
if m.Path != "" {
g, err := glob.GetGlob(m.Path)
// TODO(bep) Path() vs filepath vs leading slash.
- p := strings.ToLower(filepath.ToSlash(p.Pathc()))
+ p := strings.ToLower(filepath.ToSlash(p.Path()))
if !(strings.HasPrefix(p, "/")) {
p = "/" + p
}
@@ -123,7 +123,6 @@ func DecodeCascadeConfig(in any) (*config.ConfigNamespace[[]PageMatcherParamsCon
}
return config.DecodeNamespace[[]PageMatcherParamsConfig](in, buildConfig)
-
}
// DecodeCascade decodes in which could be either a map or a slice of maps.
@@ -161,7 +160,6 @@ func mapToPageMatcherParamsConfig(m map[string]any) (PageMatcherParamsConfig, er
}
}
return pcfg, pcfg.init()
-
}
// decodePageMatcher decodes m into v.
diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go
index 735d6eea8..a8f42e4d3 100644
--- a/resources/page/page_nop.go
+++ b/resources/page/page_nop.go
@@ -21,19 +21,17 @@ import (
"html/template"
"time"
- "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/tableofcontents"
- "github.com/gohugoio/hugo/hugofs/files"
- "github.com/gohugoio/hugo/tpl"
-
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/config"
@@ -59,6 +57,8 @@ var (
// PageNop implements Page, but does nothing.
type nopPage int
+var noOpPathInfo = paths.Parse(files.ComponentFolderContent, "no-op.md")
+
func (p *nopPage) Err() resource.ResourceError {
return nil
}
@@ -103,7 +103,7 @@ func (p *nopPage) BaseFileName() string {
return ""
}
-func (p *nopPage) BundleType() files.ContentClass {
+func (p *nopPage) BundleType() string {
return ""
}
@@ -163,10 +163,8 @@ func (p *nopPage) Extension() string {
return ""
}
-var nilFile *source.FileInfo
-
-func (p *nopPage) File() source.File {
- return nilFile
+func (p *nopPage) File() *source.File {
+ return nil
}
func (p *nopPage) FileInfo() hugofs.FileMetaInfo {
@@ -189,10 +187,6 @@ func (p *nopPage) GetPage(ref string) (Page, error) {
return nil, nil
}
-func (p *nopPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) {
- return nil, nil
-}
-
func (p *nopPage) GetParam(key string) any {
return nil
}
@@ -221,16 +215,16 @@ func (p *nopPage) Hugo() (h hugo.HugoInfo) {
return
}
-func (p *nopPage) InSection(other any) (bool, error) {
- return false, nil
+func (p *nopPage) InSection(other any) bool {
+ return false
}
-func (p *nopPage) IsAncestor(other any) (bool, error) {
- return false, nil
+func (p *nopPage) IsAncestor(other any) bool {
+ return false
}
-func (p *nopPage) IsDescendant(other any) (bool, error) {
- return false, nil
+func (p *nopPage) IsDescendant(other any) bool {
+ return false
}
func (p *nopPage) IsDraft() bool {
@@ -357,8 +351,8 @@ func (p *nopPage) Path() string {
return ""
}
-func (p *nopPage) Pathc() string {
- return ""
+func (p *nopPage) PathInfo() *paths.Path {
+ return noOpPathInfo
}
func (p *nopPage) Permalink() string {
@@ -529,13 +523,10 @@ func (p *nopPage) WordCount(context.Context) int {
return 0
}
-func (p *nopPage) GetIdentity() identity.Identity {
- return identity.NewPathIdentity("content", "foo/bar.md")
-}
-
func (p *nopPage) Fragments(context.Context) *tableofcontents.Fragments {
return nil
}
+
func (p *nopPage) HeadingsFiltered(context.Context) tableofcontents.Headings {
return nil
}
@@ -550,6 +541,7 @@ func (r *nopContentRenderer) ParseAndRenderContent(ctx context.Context, content
func (r *nopContentRenderer) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) {
return nil, false, nil
}
+
func (r *nopContentRenderer) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) {
return nil, false, nil
}
diff --git a/resources/page/page_paths.go b/resources/page/page_paths.go
index 1bc16fe35..8052287c6 100644
--- a/resources/page/page_paths.go
+++ b/resources/page/page_paths.go
@@ -17,7 +17,9 @@ import (
"path"
"path/filepath"
"strings"
+ "sync"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/common/urls"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/output"
@@ -39,16 +41,14 @@ type TargetPathDescriptor struct {
Type output.Format
Kind string
- Sections []string
+ Path *paths.Path
+ Section *paths.Path
// For regular content pages this is either
// 1) the Slug, if set,
// 2) the file base name (TranslationBaseName).
BaseName string
- // Source directory.
- Dir string
-
// Typically a language prefix added to file paths.
PrefixFilePath string
@@ -74,7 +74,6 @@ type TargetPathDescriptor struct {
// TODO(bep) move this type.
type TargetPaths struct {
-
// Where to store the file on disk relative to the publish dir. OS slashes.
TargetFilename string
@@ -107,237 +106,347 @@ func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Form
return s.PermalinkForBaseURL(p.Link, baseURLstr)
}
-func isHtmlIndex(s string) bool {
- return strings.HasSuffix(s, "/index.html")
-}
-
func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) {
- if d.Type.Name == "" {
- panic("CreateTargetPath: missing type")
- }
-
// Normalize all file Windows paths to simplify what's next.
- if helpers.FilePathSeparator != slash {
- d.Dir = filepath.ToSlash(d.Dir)
+ if helpers.FilePathSeparator != "/" {
d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath)
-
}
- if d.URL != "" && !strings.HasPrefix(d.URL, "/") {
+ if !d.Type.Root && d.URL != "" && !strings.HasPrefix(d.URL, "/") {
// Treat this as a context relative URL
d.ForcePrefix = true
}
- pagePath := slash
- fullSuffix := d.Type.MediaType.FirstSuffix.FullSuffix
+ if d.URL != "" {
+ d.URL = filepath.ToSlash(d.URL)
+ if strings.Contains(d.URL, "..") {
+ d.URL = path.Join("/", d.URL)
+ }
+ }
+
+ if d.Type.Root && !d.ForcePrefix {
+ d.PrefixFilePath = ""
+ d.PrefixLink = ""
+ }
+
+ pb := getPagePathBuilder(d)
+ defer putPagePathBuilder(pb)
- var (
- pagePathDir string
- link string
- linkDir string
- )
+ pb.fullSuffix = d.Type.MediaType.FirstSuffix.FullSuffix
// The top level index files, i.e. the home page etc., needs
// the index base even when uglyURLs is enabled.
needsBase := true
- isUgly := d.UglyURLs && !d.Type.NoUgly
- baseNameSameAsType := d.BaseName != "" && d.BaseName == d.Type.BaseName
+ pb.isUgly = (d.UglyURLs || d.Type.Ugly) && !d.Type.NoUgly
+ pb.baseNameSameAsType = !d.Path.IsBundle() && d.BaseName != "" && d.BaseName == d.Type.BaseName
- if d.ExpandedPermalink == "" && baseNameSameAsType {
- isUgly = true
+ if d.ExpandedPermalink == "" && pb.baseNameSameAsType {
+ pb.isUgly = true
}
- if d.Kind != kinds.KindPage && d.URL == "" && len(d.Sections) > 0 {
+ if d.Type == output.HTTPStatusHTMLFormat || d.Type == output.SitemapFormat || d.Type == output.RobotsTxtFormat {
+ pb.noSubResources = true
+ } else if d.Kind != kinds.KindPage && d.URL == "" && d.Section.Base() != "/" {
if d.ExpandedPermalink != "" {
- pagePath = pjoin(pagePath, d.ExpandedPermalink)
+ pb.Add(d.ExpandedPermalink)
} else {
- pagePath = pjoin(d.Sections...)
+ pb.Add(d.Section.Base())
}
needsBase = false
}
if d.Type.Path != "" {
- pagePath = pjoin(pagePath, d.Type.Path)
+ pb.Add(d.Type.Path)
}
if d.Kind != kinds.KindHome && d.URL != "" {
- pagePath = pjoin(pagePath, d.URL)
+ pb.Add(paths.FieldsSlash(d.URL)...)
if d.Addends != "" {
- pagePath = pjoin(pagePath, d.Addends)
+ pb.Add(d.Addends)
}
- pagePathDir = pagePath
- link = pagePath
hasDot := strings.Contains(d.URL, ".")
- hasSlash := strings.HasSuffix(d.URL, slash)
+ hasSlash := strings.HasSuffix(d.URL, "/")
if hasSlash || !hasDot {
- pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix)
+ pb.Add(d.Type.BaseName + pb.fullSuffix)
} else if hasDot {
- pagePathDir = path.Dir(pagePathDir)
+ pb.fullSuffix = paths.Ext(d.URL)
}
- if !isHtmlIndex(pagePath) {
- link = pagePath
- } else if !hasSlash {
- link += slash
+ if pb.IsHtmlIndex() {
+ pb.linkUpperOffset = 1
}
- linkDir = pagePathDir
-
if d.ForcePrefix {
// Prepend language prefix if not already set in URL
- if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, slash+d.PrefixFilePath) {
- pagePath = pjoin(d.PrefixFilePath, pagePath)
- pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixFilePath) {
+ pb.prefixPath = d.PrefixFilePath
}
- if d.PrefixLink != "" && !strings.HasPrefix(d.URL, slash+d.PrefixLink) {
- link = pjoin(d.PrefixLink, link)
- linkDir = pjoin(d.PrefixLink, linkDir)
+ if d.PrefixLink != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixLink) {
+ pb.prefixLink = d.PrefixLink
}
}
-
- } else if d.Kind == kinds.KindPage {
-
+ } else if !kinds.IsBranch(d.Kind) {
if d.ExpandedPermalink != "" {
- pagePath = pjoin(pagePath, d.ExpandedPermalink)
+ pb.Add(d.ExpandedPermalink)
} else {
- if d.Dir != "" {
- pagePath = pjoin(pagePath, d.Dir)
+ if dir := d.Path.ContainerDir(); dir != "" {
+ pb.Add(dir)
}
if d.BaseName != "" {
- pagePath = pjoin(pagePath, d.BaseName)
+ pb.Add(d.BaseName)
+ } else {
+ pb.Add(d.Path.BaseNameNoIdentifier())
}
}
if d.Addends != "" {
- pagePath = pjoin(pagePath, d.Addends)
- }
-
- link = pagePath
-
- // TODO(bep) this should not happen after the fix in https://github.com/gohugoio/hugo/issues/4870
- // but we may need some more testing before we can remove it.
- if baseNameSameAsType {
- link = strings.TrimSuffix(link, d.BaseName)
+ pb.Add(d.Addends)
}
- pagePathDir = link
- link = link + slash
- linkDir = pagePathDir
-
- if isUgly {
- pagePath = addSuffix(pagePath, fullSuffix)
+ if pb.isUgly {
+ pb.ConcatLast(pb.fullSuffix)
} else {
- pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix)
+ pb.Add(d.Type.BaseName + pb.fullSuffix)
}
- if !isHtmlIndex(pagePath) {
- link = pagePath
+ if pb.IsHtmlIndex() {
+ pb.linkUpperOffset = 1
}
if d.PrefixFilePath != "" {
- pagePath = pjoin(d.PrefixFilePath, pagePath)
- pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ pb.prefixPath = d.PrefixFilePath
}
if d.PrefixLink != "" {
- link = pjoin(d.PrefixLink, link)
- linkDir = pjoin(d.PrefixLink, linkDir)
+ pb.prefixLink = d.PrefixLink
}
-
} else {
if d.Addends != "" {
- pagePath = pjoin(pagePath, d.Addends)
+ pb.Add(d.Addends)
}
needsBase = needsBase && d.Addends == ""
- // No permalink expansion etc. for node type pages (for now)
- base := ""
-
- if needsBase || !isUgly {
- base = d.Type.BaseName
- }
-
- pagePathDir = pagePath
- link = pagePath
- linkDir = pagePathDir
-
- if base != "" {
- pagePath = path.Join(pagePath, addSuffix(base, fullSuffix))
+ if needsBase || !pb.isUgly {
+ pb.Add(d.Type.BaseName + pb.fullSuffix)
} else {
- pagePath = addSuffix(pagePath, fullSuffix)
+ pb.ConcatLast(pb.fullSuffix)
}
- if !isHtmlIndex(pagePath) {
- link = pagePath
- } else {
- link += slash
+ if pb.IsHtmlIndex() {
+ pb.linkUpperOffset = 1
}
if d.PrefixFilePath != "" {
- pagePath = pjoin(d.PrefixFilePath, pagePath)
- pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ pb.prefixPath = d.PrefixFilePath
}
if d.PrefixLink != "" {
- link = pjoin(d.PrefixLink, link)
- linkDir = pjoin(d.PrefixLink, linkDir)
+ pb.prefixLink = d.PrefixLink
}
}
- pagePath = pjoin(slash, pagePath)
- pagePathDir = strings.TrimSuffix(path.Join(slash, pagePathDir), slash)
-
- hadSlash := strings.HasSuffix(link, slash)
- link = strings.Trim(link, slash)
- if hadSlash {
- link += slash
- }
-
- if !strings.HasPrefix(link, slash) {
- link = slash + link
- }
-
- linkDir = strings.TrimSuffix(path.Join(slash, linkDir), slash)
-
// if page URL is explicitly set in frontmatter,
// preserve its value without sanitization
if d.Kind != kinds.KindPage || d.URL == "" {
// Note: MakePathSanitized will lower case the path if
// disablePathToLower isn't set.
- pagePath = d.PathSpec.MakePathSanitized(pagePath)
- pagePathDir = d.PathSpec.MakePathSanitized(pagePathDir)
- link = d.PathSpec.MakePathSanitized(link)
- linkDir = d.PathSpec.MakePathSanitized(linkDir)
+ pb.Sanitize()
}
+ link := pb.Link()
+ pagePath := pb.PathFile()
+
tp.TargetFilename = filepath.FromSlash(pagePath)
- tp.SubResourceBaseTarget = filepath.FromSlash(pagePathDir)
- tp.SubResourceBaseLink = linkDir
- tp.Link = d.PathSpec.URLizeFilename(link)
+ if !pb.noSubResources {
+ tp.SubResourceBaseTarget = pb.PathDir()
+ tp.SubResourceBaseLink = pb.LinkDir()
+ }
+ if d.URL != "" {
+ tp.Link = paths.URLEscape(link)
+ } else {
+ // This is slightly faster for when we know we don't have any
+ // query or scheme etc.
+ tp.Link = paths.PathEscape(link)
+ }
if tp.Link == "" {
- tp.Link = slash
+ tp.Link = "/"
}
return
}
-func addSuffix(s, suffix string) string {
- return strings.Trim(s, slash) + suffix
+// When adding state here, remember to update putPagePathBuilder.
+type pagePathBuilder struct {
+ els []string
+
+ d TargetPathDescriptor
+
+ // Builder state.
+ isUgly bool
+ baseNameSameAsType bool
+ noSubResources bool
+ fullSuffix string // File suffix including any ".".
+ prefixLink string
+ prefixPath string
+ linkUpperOffset int
+}
+
+func (p *pagePathBuilder) Add(el ...string) {
+ // Filter empty and slashes.
+ n := 0
+ for _, e := range el {
+ if e != "" && e != slash {
+ el[n] = e
+ n++
+ }
+ }
+ el = el[:n]
+
+ p.els = append(p.els, el...)
}
-// Like path.Join, but preserves one trailing slash if present.
-func pjoin(elem ...string) string {
- hadSlash := strings.HasSuffix(elem[len(elem)-1], slash)
- joined := path.Join(elem...)
- if hadSlash && !strings.HasSuffix(joined, slash) {
- return joined + slash
+func (p *pagePathBuilder) ConcatLast(s string) {
+ if len(p.els) == 0 {
+ p.Add(s)
+ return
}
- return joined
+ old := p.els[len(p.els)-1]
+ if old == "" {
+ p.els[len(p.els)-1] = s
+ return
+ }
+ if old[len(old)-1] == '/' {
+ old = old[:len(old)-1]
+ }
+ p.els[len(p.els)-1] = old + s
+}
+
+func (p *pagePathBuilder) IsHtmlIndex() bool {
+ return p.Last() == "index.html"
+}
+
+func (p *pagePathBuilder) Last() string {
+ if p.els == nil {
+ return ""
+ }
+ return p.els[len(p.els)-1]
+}
+
+func (p *pagePathBuilder) Link() string {
+ link := p.Path(p.linkUpperOffset)
+
+ if p.baseNameSameAsType {
+ link = strings.TrimSuffix(link, p.d.BaseName)
+ }
+
+ if p.prefixLink != "" {
+ link = "/" + p.prefixLink + link
+ }
+
+ if p.linkUpperOffset > 0 && !strings.HasSuffix(link, "/") {
+ link += "/"
+ }
+
+ return link
+}
+
+func (p *pagePathBuilder) LinkDir() string {
+ if p.noSubResources {
+ return ""
+ }
+
+ pathDir := p.PathDirBase()
+
+ if p.prefixLink != "" {
+ pathDir = "/" + p.prefixLink + pathDir
+ }
+
+ return pathDir
+}
+
+func (p *pagePathBuilder) Path(upperOffset int) string {
+ upper := len(p.els)
+ if upperOffset > 0 {
+ upper -= upperOffset
+ }
+ pth := path.Join(p.els[:upper]...)
+ return paths.AddLeadingSlash(pth)
+}
+
+func (p *pagePathBuilder) PathDir() string {
+ dir := p.PathDirBase()
+ if p.prefixPath != "" {
+ dir = "/" + p.prefixPath + dir
+ }
+ return dir
+}
+
+func (p *pagePathBuilder) PathDirBase() string {
+ if p.noSubResources {
+ return ""
+ }
+
+ dir := p.Path(0)
+ isIndex := strings.HasPrefix(p.Last(), p.d.Type.BaseName+".")
+
+ if isIndex {
+ dir = paths.Dir(dir)
+ } else {
+ dir = strings.TrimSuffix(dir, p.fullSuffix)
+ }
+
+ if dir == "/" {
+ dir = ""
+ }
+
+ return dir
+}
+
+func (p *pagePathBuilder) PathFile() string {
+ dir := p.Path(0)
+ if p.prefixPath != "" {
+ dir = "/" + p.prefixPath + dir
+ }
+ return dir
+}
+
+func (p *pagePathBuilder) Prepend(el ...string) {
+ p.els = append(p.els[:0], append(el, p.els[0:]...)...)
+}
+
+func (p *pagePathBuilder) Sanitize() {
+ for i, el := range p.els {
+ p.els[i] = p.d.PathSpec.MakePathSanitized(el)
+ }
+}
+
+var pagePathBuilderPool = &sync.Pool{
+ New: func() any {
+ return &pagePathBuilder{}
+ },
+}
+
+func getPagePathBuilder(d TargetPathDescriptor) *pagePathBuilder {
+ b := pagePathBuilderPool.Get().(*pagePathBuilder)
+ b.d = d
+ return b
+}
+
+func putPagePathBuilder(b *pagePathBuilder) {
+ b.els = b.els[:0]
+ b.fullSuffix = ""
+ b.baseNameSameAsType = false
+ b.isUgly = false
+ b.noSubResources = false
+ b.prefixLink = ""
+ b.prefixPath = ""
+ b.linkUpperOffset = 0
+ pagePathBuilderPool.Put(b)
}
diff --git a/resources/page/page_paths_test.go b/resources/page/page_paths_test.go
deleted file mode 100644
index dd6457f77..000000000
--- a/resources/page/page_paths_test.go
+++ /dev/null
@@ -1,295 +0,0 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package page_test
-
-import (
- "fmt"
- "path/filepath"
- "strings"
- "testing"
-
- "github.com/gohugoio/hugo/media"
- "github.com/gohugoio/hugo/resources/kinds"
- "github.com/gohugoio/hugo/resources/page"
-
- "github.com/gohugoio/hugo/output"
-)
-
-func TestPageTargetPath(t *testing.T) {
- pathSpec := newTestPathSpec()
-
- noExtNoDelimMediaType := media.WithDelimiterAndSuffixes(media.Builtin.TextType, "", "")
- noExtNoDelimMediaType.Delimiter = ""
-
- // Netlify style _redirects
- noExtDelimFormat := output.Format{
- Name: "NER",
- MediaType: noExtNoDelimMediaType,
- BaseName: "_redirects",
- }
-
- for _, langPrefixPath := range []string{"", "no"} {
- for _, langPrefixLink := range []string{"", "no"} {
- for _, uglyURLs := range []bool{false, true} {
-
- tests := []struct {
- name string
- d page.TargetPathDescriptor
- expected page.TargetPaths
- }{
- {"JSON home", page.TargetPathDescriptor{Kind: kinds.KindHome, Type: output.JSONFormat}, page.TargetPaths{TargetFilename: "/index.json", SubResourceBaseTarget: "", Link: "/index.json"}},
- {"AMP home", page.TargetPathDescriptor{Kind: kinds.KindHome, Type: output.AMPFormat}, page.TargetPaths{TargetFilename: "/amp/index.html", SubResourceBaseTarget: "/amp", Link: "/amp/"}},
- {"HTML home", page.TargetPathDescriptor{Kind: kinds.KindHome, BaseName: "_index", Type: output.HTMLFormat}, page.TargetPaths{TargetFilename: "/index.html", SubResourceBaseTarget: "", Link: "/"}},
- {"Netlify redirects", page.TargetPathDescriptor{Kind: kinds.KindHome, BaseName: "_index", Type: noExtDelimFormat}, page.TargetPaths{TargetFilename: "/_redirects", SubResourceBaseTarget: "", Link: "/_redirects"}},
- {"HTML section list", page.TargetPathDescriptor{
- Kind: kinds.KindSection,
- Sections: []string{"sect1"},
- BaseName: "_index",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/sect1/index.html", SubResourceBaseTarget: "/sect1", Link: "/sect1/"}},
- {"HTML taxonomy term", page.TargetPathDescriptor{
- Kind: kinds.KindTerm,
- Sections: []string{"tags", "hugo"},
- BaseName: "_index",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/tags/hugo/index.html", SubResourceBaseTarget: "/tags/hugo", Link: "/tags/hugo/"}},
- {"HTML taxonomy", page.TargetPathDescriptor{
- Kind: kinds.KindTaxonomy,
- Sections: []string{"tags"},
- BaseName: "_index",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/tags/index.html", SubResourceBaseTarget: "/tags", Link: "/tags/"}},
- {
- "HTML page", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/a/b",
- BaseName: "mypage",
- Sections: []string{"a"},
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/a/b/mypage/index.html", SubResourceBaseTarget: "/a/b/mypage", Link: "/a/b/mypage/"},
- },
-
- {
- "HTML page with index as base", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/a/b",
- BaseName: "index",
- Sections: []string{"a"},
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/a/b/index.html", SubResourceBaseTarget: "/a/b", Link: "/a/b/"},
- },
-
- {
- "HTML page with special chars", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/a/b",
- BaseName: "My Page!",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/a/b/my-page/index.html", SubResourceBaseTarget: "/a/b/my-page", Link: "/a/b/my-page/"},
- },
- {"RSS home", page.TargetPathDescriptor{Kind: "rss", Type: output.RSSFormat}, page.TargetPaths{TargetFilename: "/index.xml", SubResourceBaseTarget: "", Link: "/index.xml"}},
- {"RSS section list", page.TargetPathDescriptor{
- Kind: "rss",
- Sections: []string{"sect1"},
- Type: output.RSSFormat,
- }, page.TargetPaths{TargetFilename: "/sect1/index.xml", SubResourceBaseTarget: "/sect1", Link: "/sect1/index.xml"}},
- {
- "AMP page", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/a/b/c",
- BaseName: "myamp",
- Type: output.AMPFormat,
- }, page.TargetPaths{TargetFilename: "/amp/a/b/c/myamp/index.html", SubResourceBaseTarget: "/amp/a/b/c/myamp", Link: "/amp/a/b/c/myamp/"},
- },
- {
- "AMP page with URL with suffix", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/sect/",
- BaseName: "mypage",
- URL: "/some/other/url.xhtml",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/some/other/url.xhtml", SubResourceBaseTarget: "/some/other", Link: "/some/other/url.xhtml"},
- },
- {
- "JSON page with URL without suffix", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/sect/",
- BaseName: "mypage",
- URL: "/some/other/path/",
- Type: output.JSONFormat,
- }, page.TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"},
- },
- {
- "JSON page with URL without suffix and no trailing slash", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/sect/",
- BaseName: "mypage",
- URL: "/some/other/path",
- Type: output.JSONFormat,
- }, page.TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"},
- },
- {
- "HTML page with URL without suffix and no trailing slash", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/sect/",
- BaseName: "mypage",
- URL: "/some/other/path",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/some/other/path/index.html", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/"},
- },
- {
- "HTML page with URL containing double hyphen", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/sect/",
- BaseName: "mypage",
- URL: "/some/other--url/",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/some/other--url/index.html", SubResourceBaseTarget: "/some/other--url", Link: "/some/other--url/"},
- },
- {
- "HTML page with expanded permalink", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/a/b",
- BaseName: "mypage",
- ExpandedPermalink: "/2017/10/my-title/",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/2017/10/my-title/index.html", SubResourceBaseTarget: "/2017/10/my-title", Link: "/2017/10/my-title/"},
- },
- {
- "Paginated HTML home", page.TargetPathDescriptor{
- Kind: kinds.KindHome,
- BaseName: "_index",
- Type: output.HTMLFormat,
- Addends: "page/3",
- }, page.TargetPaths{TargetFilename: "/page/3/index.html", SubResourceBaseTarget: "/page/3", Link: "/page/3/"},
- },
- {
- "Paginated Taxonomy terms list", page.TargetPathDescriptor{
- Kind: kinds.KindTerm,
- BaseName: "_index",
- Sections: []string{"tags", "hugo"},
- Type: output.HTMLFormat,
- Addends: "page/3",
- }, page.TargetPaths{TargetFilename: "/tags/hugo/page/3/index.html", SubResourceBaseTarget: "/tags/hugo/page/3", Link: "/tags/hugo/page/3/"},
- },
- {
- "Regular page with addend", page.TargetPathDescriptor{
- Kind: kinds.KindPage,
- Dir: "/a/b",
- BaseName: "mypage",
- Addends: "c/d/e",
- Type: output.HTMLFormat,
- }, page.TargetPaths{TargetFilename: "/a/b/mypage/c/d/e/index.html", SubResourceBaseTarget: "/a/b/mypage/c/d/e", Link: "/a/b/mypage/c/d/e/"},
- },
- }
-
- for i, test := range tests {
- t.Run(fmt.Sprintf("langPrefixPath=%s,langPrefixLink=%s,uglyURLs=%t,name=%s", langPrefixPath, langPrefixLink, uglyURLs, test.name),
- func(t *testing.T) {
- test.d.ForcePrefix = true
- test.d.PathSpec = pathSpec
- test.d.UglyURLs = uglyURLs
- test.d.PrefixFilePath = langPrefixPath
- test.d.PrefixLink = langPrefixLink
- test.d.Dir = filepath.FromSlash(test.d.Dir)
- isUgly := uglyURLs && !test.d.Type.NoUgly
-
- expected := test.expected
-
- // TODO(bep) simplify
- if test.d.Kind == kinds.KindPage && test.d.BaseName == test.d.Type.BaseName {
- } else if test.d.Kind == kinds.KindHome && test.d.Type.Path != "" {
- } else if test.d.Type.MediaType.FirstSuffix.Suffix != "" && (!strings.HasPrefix(expected.TargetFilename, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly {
- expected.TargetFilename = strings.Replace(expected.TargetFilename,
- "/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.FirstSuffix.Suffix,
- "."+test.d.Type.MediaType.FirstSuffix.Suffix, 1)
- expected.Link = strings.TrimSuffix(expected.Link, "/") + "." + test.d.Type.MediaType.FirstSuffix.Suffix
-
- }
-
- if test.d.PrefixFilePath != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixFilePath) {
- expected.TargetFilename = "/" + test.d.PrefixFilePath + expected.TargetFilename
- expected.SubResourceBaseTarget = "/" + test.d.PrefixFilePath + expected.SubResourceBaseTarget
- }
-
- if test.d.PrefixLink != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixLink) {
- expected.Link = "/" + test.d.PrefixLink + expected.Link
- }
-
- expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
- expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
-
- pagePath := page.CreateTargetPaths(test.d)
-
- if !eqTargetPaths(pagePath, expected) {
- t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
- }
- })
- }
- }
- }
- }
-}
-
-func TestPageTargetPathPrefix(t *testing.T) {
- pathSpec := newTestPathSpec()
- tests := []struct {
- name string
- d page.TargetPathDescriptor
- expected page.TargetPaths
- }{
- {
- "URL set, prefix both, no force",
- page.TargetPathDescriptor{Kind: kinds.KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: false, PrefixFilePath: "pf", PrefixLink: "pl"},
- page.TargetPaths{TargetFilename: "/mydir/my.json", SubResourceBaseTarget: "/mydir", SubResourceBaseLink: "/mydir", Link: "/mydir/my.json"},
- },
- {
- "URL set, prefix both, force",
- page.TargetPathDescriptor{Kind: kinds.KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: true, PrefixFilePath: "pf", PrefixLink: "pl"},
- page.TargetPaths{TargetFilename: "/pf/mydir/my.json", SubResourceBaseTarget: "/pf/mydir", SubResourceBaseLink: "/pl/mydir", Link: "/pl/mydir/my.json"},
- },
- }
-
- for i, test := range tests {
- t.Run(fmt.Sprintf(test.name),
- func(t *testing.T) {
- test.d.PathSpec = pathSpec
- expected := test.expected
- expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
- expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
-
- pagePath := page.CreateTargetPaths(test.d)
-
- if pagePath != expected {
- t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
- }
- })
- }
-}
-
-func eqTargetPaths(p1, p2 page.TargetPaths) bool {
- if p1.Link != p2.Link {
- return false
- }
-
- if p1.SubResourceBaseTarget != p2.SubResourceBaseTarget {
- return false
- }
-
- if p1.TargetFilename != p2.TargetFilename {
- return false
- }
-
- return true
-}
diff --git a/resources/page/pagegroup.go b/resources/page/pagegroup.go
index d091c6bef..e691a112e 100644
--- a/resources/page/pagegroup.go
+++ b/resources/page/pagegroup.go
@@ -244,7 +244,7 @@ func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDa
return nil, nil
}
- firstPage := sp[0].(Page)
+ firstPage := sp[0]
date := getDate(firstPage)
// Pages may be a mix of multiple languages, so we need to use the language
@@ -258,7 +258,7 @@ func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDa
i := 0
for _, e := range sp[1:] {
- date = getDate(e.(Page))
+ date = getDate(e)
formatted := formatter.Format(date, format)
if r[i].Key.(string) != formatted {
r = append(r, PageGroup{Key: formatted})
diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go
index 98ab6b222..d804f27a7 100644
--- a/resources/page/pagemeta/page_frontmatter.go
+++ b/resources/page/pagemeta/page_frontmatter.go
@@ -47,9 +47,8 @@ type FrontMatterHandler struct {
// FrontMatterDescriptor describes how to handle front matter for a given Page.
// It has pointers to values in the receiving page which gets updated.
type FrontMatterDescriptor struct {
-
- // This the Page's front matter.
- Frontmatter map[string]any
+ // This is the Page's params.
+ Params map[string]any
// This is the Page's base filename (BaseFilename), e.g. page.md., or
// if page is a leaf bundle, the bundle folder name (ContentBaseName).
@@ -63,9 +62,6 @@ type FrontMatterDescriptor struct {
// The below are pointers to values on Page and will be modified.
- // This is the Page's params.
- Params map[string]any
-
// This is the Page's dates.
Dates *resource.Dates
@@ -365,7 +361,7 @@ type frontmatterFieldHandlers int
func (f *frontmatterFieldHandlers) newDateFieldHandler(key string, setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
return func(d *FrontMatterDescriptor) (bool, error) {
- v, found := d.Frontmatter[key]
+ v, found := d.Params[key]
if !found {
return false, nil
@@ -396,7 +392,7 @@ func (f *frontmatterFieldHandlers) newDateFilenameHandler(setter func(d *FrontMa
setter(d, date)
- if _, found := d.Frontmatter["slug"]; !found {
+ if _, found := d.Params["slug"]; !found {
// Use slug from filename
d.PageURLs.Slug = slug
}
diff --git a/resources/page/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go
index f040af163..1aff8b511 100644
--- a/resources/page/pagemeta/page_frontmatter_test.go
+++ b/resources/page/pagemeta/page_frontmatter_test.go
@@ -29,11 +29,10 @@ import (
func newTestFd() *pagemeta.FrontMatterDescriptor {
return &pagemeta.FrontMatterDescriptor{
- Frontmatter: make(map[string]any),
- Params: make(map[string]any),
- Dates: &resource.Dates{},
- PageURLs: &pagemeta.URLPath{},
- Location: time.UTC,
+ Params: make(map[string]any),
+ Dates: &resource.Dates{},
+ PageURLs: &pagemeta.URLPath{},
+ Location: time.UTC,
}
}
@@ -106,13 +105,13 @@ func TestFrontMatterDatesHandlers(t *testing.T) {
case ":git":
d.GitAuthorDate = d1
}
- d.Frontmatter["date"] = d2
+ d.Params["date"] = d2
c.Assert(handler.HandleDates(d), qt.IsNil)
c.Assert(d.Dates.FDate, qt.Equals, d1)
c.Assert(d.Params["date"], qt.Equals, d2)
d = newTestFd()
- d.Frontmatter["date"] = d2
+ d.Params["date"] = d2
c.Assert(handler.HandleDates(d), qt.IsNil)
c.Assert(d.Dates.FDate, qt.Equals, d2)
c.Assert(d.Params["date"], qt.Equals, d2)
@@ -120,54 +119,6 @@ func TestFrontMatterDatesHandlers(t *testing.T) {
}
}
-func TestFrontMatterDatesCustomConfig(t *testing.T) {
- t.Parallel()
-
- c := qt.New(t)
-
- cfg := config.New()
- cfg.Set("frontmatter", map[string]any{
- "date": []string{"mydate"},
- "lastmod": []string{"publishdate"},
- "publishdate": []string{"publishdate"},
- })
-
- conf := testconfig.GetTestConfig(nil, cfg)
- handler, err := pagemeta.NewFrontmatterHandler(nil, conf.GetConfigSection("frontmatter").(pagemeta.FrontmatterConfig))
- c.Assert(err, qt.IsNil)
-
- testDate, err := time.Parse("2006-01-02", "2018-02-01")
- c.Assert(err, qt.IsNil)
-
- d := newTestFd()
- d.Frontmatter["mydate"] = testDate
- testDate = testDate.Add(24 * time.Hour)
- d.Frontmatter["date"] = testDate
- testDate = testDate.Add(24 * time.Hour)
- d.Frontmatter["lastmod"] = testDate
- testDate = testDate.Add(24 * time.Hour)
- d.Frontmatter["publishdate"] = testDate
- testDate = testDate.Add(24 * time.Hour)
- d.Frontmatter["expirydate"] = testDate
-
- c.Assert(handler.HandleDates(d), qt.IsNil)
-
- c.Assert(d.Dates.FDate.Day(), qt.Equals, 1)
- c.Assert(d.Dates.FLastmod.Day(), qt.Equals, 4)
- c.Assert(d.Dates.FPublishDate.Day(), qt.Equals, 4)
- c.Assert(d.Dates.FExpiryDate.Day(), qt.Equals, 5)
-
- c.Assert(d.Params["date"], qt.Equals, d.Dates.FDate)
- c.Assert(d.Params["mydate"], qt.Equals, d.Dates.FDate)
- c.Assert(d.Params["publishdate"], qt.Equals, d.Dates.FPublishDate)
- c.Assert(d.Params["expirydate"], qt.Equals, d.Dates.FExpiryDate)
-
- c.Assert(handler.IsDateKey("date"), qt.Equals, false) // This looks odd, but is configured like this.
- c.Assert(handler.IsDateKey("mydate"), qt.Equals, true)
- c.Assert(handler.IsDateKey("publishdate"), qt.Equals, true)
- c.Assert(handler.IsDateKey("pubdate"), qt.Equals, true)
-}
-
func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
t.Parallel()
@@ -186,10 +137,10 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
testDate, _ := time.Parse("2006-01-02", "2018-02-01")
d := newTestFd()
- d.Frontmatter["mydate"] = testDate
- d.Frontmatter["date"] = testDate.Add(1 * 24 * time.Hour)
- d.Frontmatter["mypubdate"] = testDate.Add(2 * 24 * time.Hour)
- d.Frontmatter["publishdate"] = testDate.Add(3 * 24 * time.Hour)
+ d.Params["mydate"] = testDate
+ d.Params["date"] = testDate.Add(1 * 24 * time.Hour)
+ d.Params["mypubdate"] = testDate.Add(2 * 24 * time.Hour)
+ d.Params["publishdate"] = testDate.Add(3 * 24 * time.Hour)
c.Assert(handler.HandleDates(d), qt.IsNil)
diff --git a/resources/page/pages.go b/resources/page/pages.go
index 77e56a062..088abb9ac 100644
--- a/resources/page/pages.go
+++ b/resources/page/pages.go
@@ -66,9 +66,7 @@ func ToPages(seq any) (Pages, error) {
return v.Pages, nil
case []Page:
pages := make(Pages, len(v))
- for i, vv := range v {
- pages[i] = vv
- }
+ copy(pages, v)
return pages, nil
case []any:
pages := make(Pages, len(v))
diff --git a/resources/page/pages_related.go b/resources/page/pages_related.go
index 217aced47..3322a4fbf 100644
--- a/resources/page/pages_related.go
+++ b/resources/page/pages_related.go
@@ -35,7 +35,6 @@ var (
// A PageGenealogist finds related pages in a page collection. This interface is implemented
// by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc.
type PageGenealogist interface {
-
// Template example:
// {{ $related := .RegularPages.Related . }}
Related(ctx context.Context, opts any) (Pages, error)
@@ -76,7 +75,6 @@ func (p Pages) Related(ctx context.Context, optsv any) (Pages, error) {
}
return result, nil
-
}
// RelatedIndices searches the given indices with the search keywords from the
@@ -186,6 +184,7 @@ func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
}
return nil
}
+
func (s *RelatedDocsHandler) getOrCreateIndex(ctx context.Context, p Pages) (*related.InvertedIndex, error) {
s.mu.RLock()
cachedIndex := s.getIndex(p)
diff --git a/resources/page/pages_sort.go b/resources/page/pages_sort.go
index 32b1b3895..3f4875702 100644
--- a/resources/page/pages_sort.go
+++ b/resources/page/pages_sort.go
@@ -54,6 +54,19 @@ func getOrdinals(p1, p2 Page) (int, int) {
return p1o.Ordinal(), p2o.Ordinal()
}
+func getWeight0s(p1, p2 Page) (int, int) {
+ p1w, ok1 := p1.(resource.Weight0Provider)
+ if !ok1 {
+ return -1, -1
+ }
+ p2w, ok2 := p2.(resource.Weight0Provider)
+ if !ok2 {
+ return -1, -1
+ }
+
+ return p1w.Weight0(), p2w.Weight0()
+}
+
// Sort stable sorts the pages given the receiver's sort order.
func (by pageBy) Sort(pages Pages) {
ps := &pageSorter{
@@ -72,12 +85,17 @@ var (
if o1 != o2 && o1 != -1 && o2 != -1 {
return o1 < o2
}
+ // Weight0, as by the weight of the taxonomy entrie in the front matter.
+ w01, w02 := getWeight0s(p1, p2)
+ if w01 != w02 && w01 != -1 && w02 != -1 {
+ return w01 < w02
+ }
if p1.Weight() == p2.Weight() {
if p1.Date().Unix() == p2.Date().Unix() {
c := collatorStringCompare(func(p Page) string { return p.LinkTitle() }, p1, p2)
if c == 0 {
- if p1.File().IsZero() || p2.File().IsZero() {
- return p1.File().IsZero()
+ if p1.File() == nil || p2.File() == nil {
+ return p1.File() == nil
}
return compare.LessStrings(p1.File().Filename(), p2.File().Filename())
}
@@ -102,7 +120,7 @@ var (
if p1.Date().Unix() == p2.Date().Unix() {
c := compare.Strings(p1.LinkTitle(), p2.LinkTitle())
if c == 0 {
- if !p1.File().IsZero() && !p2.File().IsZero() {
+ if p1.File() != nil && p2.File() != nil {
return compare.LessStrings(p1.File().Filename(), p2.File().Filename())
}
}
@@ -192,7 +210,6 @@ var collatorStringLess = func(p Page) (less func(s1, s2 string) bool, close func
func() {
coll.Unlock()
}
-
}
// ByWeight sorts the Pages by weight and returns a copy.
@@ -406,7 +423,6 @@ func (p Pages) ByParam(paramsKey any) Pages {
s2 := cast.ToString(v2)
return stringLess(s1, s2)
-
}
pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p)
diff --git a/resources/page/pages_sort_test.go b/resources/page/pages_sort_test.go
index 728237230..12fa4a1e1 100644
--- a/resources/page/pages_sort_test.go
+++ b/resources/page/pages_sort_test.go
@@ -109,7 +109,6 @@ func TestSortByN(t *testing.T) {
byLen := func(p Pages) Pages {
return p.ByLength(ctx)
-
}
for i, this := range []struct {
@@ -273,7 +272,7 @@ func createSortTestPages(num int) Pages {
for i := 0; i < num; i++ {
p := newTestPage()
p.path = fmt.Sprintf("/x/y/p%d.md", i)
- p.title = fmt.Sprintf("Title %d", i%(num+1/2))
+ p.title = fmt.Sprintf("Title %d", i%((num+1)/2))
p.params = map[string]any{
"arbitrarily": map[string]any{
"nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
diff --git a/resources/page/permalinks.go b/resources/page/permalinks.go
index 4577f5240..1677d3a90 100644
--- a/resources/page/permalinks.go
+++ b/resources/page/permalinks.go
@@ -120,12 +120,18 @@ func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
return expand(p)
}
+// Allow " " and / to represent the root section.
+var sectionCutSet = " /"
+
+func init() {
+ if string(os.PathSeparator) != "/" {
+ sectionCutSet += string(os.PathSeparator)
+ }
+}
+
func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) {
expanders := make(map[string]func(Page) (string, error))
- // Allow " " and / to represent the root section.
- const sectionCutSet = " /" + string(os.PathSeparator)
-
for k, pattern := range patterns {
k = strings.Trim(k, sectionCutSet)
@@ -295,7 +301,7 @@ func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, er
}
func (l PermalinkExpander) translationBaseName(p Page) string {
- if p.File().IsZero() {
+ if p.File() == nil {
return ""
}
return p.File().TranslationBaseName()
diff --git a/resources/page/permalinks_integration_test.go b/resources/page/permalinks_integration_test.go
index 6c2411ad7..9a76ac602 100644
--- a/resources/page/permalinks_integration_test.go
+++ b/resources/page/permalinks_integration_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -102,7 +102,6 @@ slug: "mytagslug"
"taxonomy": {"tags": "/tagsslug/:slug/"},
"term": {"tags": "/tagsslug/tag/:slug/"},
})
-
}
func TestPermalinksOldSetup(t *testing.T) {
@@ -145,7 +144,6 @@ slug: "p1slugvalue"
"taxonomy": {},
"term": {"withpageslug": "/pageslug/:slug/"},
})
-
}
func TestPermalinksNestedSections(t *testing.T) {
@@ -194,5 +192,4 @@ List.
b.AssertFileContent("public/libros/index.html", "List.")
b.AssertFileContent("public/libros/fiction/index.html", "List.")
b.AssertFileContent("public/libros/fiction/2023/book1/index.html", "Single.")
-
}
diff --git a/resources/page/permalinks_test.go b/resources/page/permalinks_test.go
index 194387d5c..a3a45bb88 100644
--- a/resources/page/permalinks_test.go
+++ b/resources/page/permalinks_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -202,7 +202,6 @@ func TestPermalinkExpansionSliceSyntax(t *testing.T) {
c.Assert(fn1("[:last]"), qt.DeepEquals, []string{})
c.Assert(fn1("[1:last]"), qt.DeepEquals, []string{})
c.Assert(fn1("[1]"), qt.DeepEquals, []string{})
-
})
c.Run("Out of bounds", func(c *qt.C) {
@@ -218,9 +217,7 @@ func TestPermalinkExpansionSliceSyntax(t *testing.T) {
c.Assert(fn4("[]"), qt.IsNil)
c.Assert(fn4("[1:}"), qt.IsNil)
c.Assert(fn4("foo"), qt.IsNil)
-
})
-
}
func BenchmarkPermalinkExpand(b *testing.B) {
diff --git a/resources/page/site.go b/resources/page/site.go
index 0480ce674..9ef76505d 100644
--- a/resources/page/site.go
+++ b/resources/page/site.go
@@ -21,7 +21,6 @@ import (
"github.com/gohugoio/hugo/config/privacy"
"github.com/gohugoio/hugo/config/services"
"github.com/gohugoio/hugo/identity"
- "github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/config"
@@ -88,8 +87,12 @@ type Site interface {
Taxonomies() TaxonomyList
// Returns the last modification date of the content.
+ // Deprecated: Use .Lastmod instead.
LastChange() time.Time
+ // Returns the last modification date of the content.
+ Lastmod() time.Time
+
// Returns the Menus for this site.
Menus() navigation.Menus
@@ -108,10 +111,6 @@ type Site interface {
// Returns the site config.
Config() SiteConfig
- // Returns the identity of this site.
- // This is for internal use only.
- GetIdentity() identity.Identity
-
// Author is deprecated and will be removed in a future release.
Author() map[string]interface{}
@@ -127,9 +126,6 @@ type Site interface {
// Deprecated: Use Config().Privacy.Disqus instead.
DisqusShortname() string
- // For internal use only.
- GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error)
-
// BuildDrafts is deprecated and will be removed in a future release.
BuildDrafts() bool
@@ -154,6 +150,9 @@ func (s Sites) First() Site {
return s[0]
}
+// Some additional interfaces implemented by siteWrapper that's not on Site.
+var _ identity.ForEeachIdentityByNameProvider = (*siteWrapper)(nil)
+
type siteWrapper struct {
s Site
}
@@ -165,6 +164,10 @@ func WrapSite(s Site) Site {
return &siteWrapper{s: s}
}
+func (s *siteWrapper) Key() string {
+ return s.s.Language().Lang
+}
+
func (s *siteWrapper) Social() map[string]string {
return s.s.Social()
}
@@ -260,7 +263,11 @@ func (s *siteWrapper) Taxonomies() TaxonomyList {
}
func (s *siteWrapper) LastChange() time.Time {
- return s.s.LastChange()
+ return s.s.Lastmod()
+}
+
+func (s *siteWrapper) Lastmod() time.Time {
+ return s.s.Lastmod()
}
func (s *siteWrapper) Menus() navigation.Menus {
@@ -283,14 +290,6 @@ func (s *siteWrapper) Data() map[string]any {
return s.s.Data()
}
-func (s *siteWrapper) GetIdentity() identity.Identity {
- return s.s.GetIdentity()
-}
-
-func (s *siteWrapper) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) {
- return s.s.GetPageWithTemplateInfo(info, ref...)
-}
-
func (s *siteWrapper) BuildDrafts() bool {
return s.s.BuildDrafts()
}
@@ -312,6 +311,11 @@ func (s *siteWrapper) RSSLink() template.URL {
return s.s.RSSLink()
}
+// For internal use only.
+func (s *siteWrapper) ForEeachIdentityByName(name string, f func(identity.Identity) bool) {
+ s.s.(identity.ForEeachIdentityByNameProvider).ForEeachIdentityByName(name, f)
+}
+
type testSite struct {
h hugo.HugoInfo
l *langs.Language
@@ -341,6 +345,10 @@ func (testSite) LastChange() (t time.Time) {
return
}
+func (testSite) Lastmod() (t time.Time) {
+ return
+}
+
func (t testSite) Title() string {
return "foo"
}
@@ -386,10 +394,6 @@ func (t testSite) MainSections() []string {
return nil
}
-func (t testSite) GetIdentity() identity.Identity {
- return identity.KeyValueIdentity{Key: "site", Value: t.l.Lang}
-}
-
// Deprecated: use hugo.IsServer instead
func (t testSite) IsServer() bool {
return false
@@ -439,10 +443,6 @@ func (s testSite) Config() SiteConfig {
return SiteConfig{}
}
-func (testSite) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (Page, error) {
- return nil, nil
-}
-
// Deprecated: Use .Site.Config.Services.Disqus.Shortname instead
func (testSite) DisqusShortname() string {
return ""
diff --git a/resources/page/siteidentities/identities.go b/resources/page/siteidentities/identities.go
new file mode 100644
index 000000000..8481999cf
--- /dev/null
+++ b/resources/page/siteidentities/identities.go
@@ -0,0 +1,34 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package siteidentities
+
+import (
+ "github.com/gohugoio/hugo/identity"
+)
+
+const (
+ // Identifies site.Data.
+ // The change detection in /data is currently very coarse grained.
+ Data = identity.StringIdentity("site.Data")
+)
+
+// FromString returns the identity from the given string,
+// or identity.Anonymous if not found.
+func FromString(name string) (identity.Identity, bool) {
+ switch name {
+ case "Data":
+ return Data, true
+ }
+ return identity.Anonymous, false
+}
diff --git a/resources/page/taxonomy.go b/resources/page/taxonomy.go
index 3aa0c7a7b..66c9e6fae 100644
--- a/resources/page/taxonomy.go
+++ b/resources/page/taxonomy.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/resources/page/testhelpers_page_test.go b/resources/page/testhelpers_page_test.go
deleted file mode 100644
index 95124cb58..000000000
--- a/resources/page/testhelpers_page_test.go
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package page_test
-
-import (
- "github.com/gohugoio/hugo/common/loggers"
- "github.com/gohugoio/hugo/config"
- "github.com/gohugoio/hugo/config/testconfig"
- "github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugofs"
- "github.com/spf13/afero"
-)
-
-func newTestPathSpec() *helpers.PathSpec {
- return newTestPathSpecFor(config.New())
-}
-
-func newTestPathSpecFor(cfg config.Provider) *helpers.PathSpec {
- mfs := afero.NewMemMapFs()
- conf := testconfig.GetTestConfig(mfs, cfg)
- fs := hugofs.NewFrom(mfs, conf.BaseConfig())
- ps, err := helpers.NewPathSpec(fs, conf, loggers.NewDefault())
- if err != nil {
- panic(err)
- }
- return ps
-}
diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go
index ca2c4ff53..e80ed422d 100644
--- a/resources/page/testhelpers_test.go
+++ b/resources/page/testhelpers_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -21,10 +21,7 @@ import (
"path/filepath"
"time"
- "github.com/gohugoio/hugo/hugofs/files"
- "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/markup/tableofcontents"
- "github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/resources/resource"
@@ -32,6 +29,7 @@ import (
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs"
@@ -54,7 +52,7 @@ func newTestPage() *testPage {
func newTestPageWithFile(filename string) *testPage {
filename = filepath.FromSlash(filename)
- file := source.NewTestFile(filename)
+ file := source.NewFileInfoFrom(filename, filename)
l, err := langs.NewLanguage(
"en",
@@ -107,7 +105,7 @@ type testPage struct {
params map[string]any
data map[string]any
- file source.File
+ file *source.File
currentSection *testPage
sectionEntries []string
@@ -141,7 +139,7 @@ func (p *testPage) BaseFileName() string {
panic("testpage: not implemented")
}
-func (p *testPage) BundleType() files.ContentClass {
+func (p *testPage) BundleType() string {
panic("testpage: not implemented")
}
@@ -201,7 +199,7 @@ func (p *testPage) Extension() string {
panic("testpage: not implemented")
}
-func (p *testPage) File() source.File {
+func (p *testPage) File() *source.File {
return p.file
}
@@ -225,10 +223,6 @@ func (p *testPage) GetPage(ref string) (Page, error) {
panic("testpage: not implemented")
}
-func (p *testPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) {
- panic("testpage: not implemented")
-}
-
func (p *testPage) GetParam(key string) any {
panic("testpage: not implemented")
}
@@ -261,15 +255,15 @@ func (p *testPage) Hugo() hugo.HugoInfo {
panic("testpage: not implemented")
}
-func (p *testPage) InSection(other any) (bool, error) {
+func (p *testPage) InSection(other any) bool {
panic("testpage: not implemented")
}
-func (p *testPage) IsAncestor(other any) (bool, error) {
+func (p *testPage) IsAncestor(other any) bool {
panic("testpage: not implemented")
}
-func (p *testPage) IsDescendant(other any) (bool, error) {
+func (p *testPage) IsDescendant(other any) bool {
panic("testpage: not implemented")
}
@@ -301,6 +295,10 @@ func (p *testPage) IsTranslated() bool {
panic("testpage: not implemented")
}
+func (p *testPage) Ancestors() Pages {
+ panic("testpage: not implemented")
+}
+
func (p *testPage) Keywords() []string {
return nil
}
@@ -415,16 +413,12 @@ func (p *testPage) Parent() Page {
panic("testpage: not implemented")
}
-func (p *testPage) Ancestors() Pages {
- panic("testpage: not implemented")
-}
-
func (p *testPage) Path() string {
return p.path
}
-func (p *testPage) Pathc() string {
- return p.path
+func (p *testPage) PathInfo() *paths.Path {
+ panic("testpage: not implemented")
}
func (p *testPage) Permalink() string {
@@ -604,10 +598,6 @@ func (p *testPage) WordCount(context.Context) int {
panic("testpage: not implemented")
}
-func (p *testPage) GetIdentity() identity.Identity {
- panic("testpage: not implemented")
-}
-
func createTestPages(num int) Pages {
pages := make(Pages, num)
diff --git a/resources/page/zero_file.autogen.go b/resources/page/zero_file.autogen.go
index 72d98998e..4b7c034a1 100644
--- a/resources/page/zero_file.autogen.go
+++ b/resources/page/zero_file.autogen.go
@@ -14,75 +14,3 @@
// This file is autogenerated.
package page
-
-import (
- "github.com/gohugoio/hugo/common/loggers"
- "github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/source"
-)
-
-// ZeroFile represents a zero value of source.File with warnings if invoked.
-type zeroFile struct {
- log loggers.Logger
-}
-
-func NewZeroFile(log loggers.Logger) source.File {
- return zeroFile{log: log}
-}
-
-func (zeroFile) IsZero() bool {
- return true
-}
-
-func (z zeroFile) Path() (o0 string) {
- z.log.Warnln(".File.Path on zero object. Wrap it in if or with: {{ with .File }}{{ .Path }}{{ end }}")
- return
-}
-func (z zeroFile) Section() (o0 string) {
- z.log.Warnln(".File.Section on zero object. Wrap it in if or with: {{ with .File }}{{ .Section }}{{ end }}")
- return
-}
-func (z zeroFile) Lang() (o0 string) {
- z.log.Warnln(".File.Lang on zero object. Wrap it in if or with: {{ with .File }}{{ .Lang }}{{ end }}")
- return
-}
-func (z zeroFile) Filename() (o0 string) {
- z.log.Warnln(".File.Filename on zero object. Wrap it in if or with: {{ with .File }}{{ .Filename }}{{ end }}")
- return
-}
-func (z zeroFile) Dir() (o0 string) {
- z.log.Warnln(".File.Dir on zero object. Wrap it in if or with: {{ with .File }}{{ .Dir }}{{ end }}")
- return
-}
-func (z zeroFile) Extension() (o0 string) {
- z.log.Warnln(".File.Extension on zero object. Wrap it in if or with: {{ with .File }}{{ .Extension }}{{ end }}")
- return
-}
-func (z zeroFile) Ext() (o0 string) {
- z.log.Warnln(".File.Ext on zero object. Wrap it in if or with: {{ with .File }}{{ .Ext }}{{ end }}")
- return
-}
-func (z zeroFile) LogicalName() (o0 string) {
- z.log.Warnln(".File.LogicalName on zero object. Wrap it in if or with: {{ with .File }}{{ .LogicalName }}{{ end }}")
- return
-}
-func (z zeroFile) BaseFileName() (o0 string) {
- z.log.Warnln(".File.BaseFileName on zero object. Wrap it in if or with: {{ with .File }}{{ .BaseFileName }}{{ end }}")
- return
-}
-func (z zeroFile) TranslationBaseName() (o0 string) {
- z.log.Warnln(".File.TranslationBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .TranslationBaseName }}{{ end }}")
- return
-}
-func (z zeroFile) ContentBaseName() (o0 string) {
- z.log.Warnln(".File.ContentBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .ContentBaseName }}{{ end }}")
- return
-}
-func (z zeroFile) UniqueID() (o0 string) {
- z.log.Warnln(".File.UniqueID on zero object. Wrap it in if or with: {{ with .File }}{{ .UniqueID }}{{ end }}")
- return
-}
-func (z zeroFile) FileInfo() (o0 hugofs.FileMetaInfo) {
- z.log.Warnln(".File.FileInfo on zero object. Wrap it in if or with: {{ with .File }}{{ .FileInfo }}{{ end }}")
- return
-}
diff --git a/resources/postpub/postpub.go b/resources/postpub/postpub.go
index 93b5c2638..65e32145c 100644
--- a/resources/postpub/postpub.go
+++ b/resources/postpub/postpub.go
@@ -31,7 +31,7 @@ import (
type PostPublishedResource interface {
resource.ResourceTypeProvider
resource.ResourceLinksProvider
- resource.ResourceMetaProvider
+ resource.ResourceNameTitleProvider
resource.ResourceParamsProvider
resource.ResourceDataProvider
resource.OriginProvider
diff --git a/resources/resource.go b/resources/resource.go
index b7e6b65a8..e78dd12cb 100644
--- a/resources/resource.go
+++ b/resources/resource.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -15,68 +15,55 @@ package resources
import (
"context"
+ "errors"
"fmt"
"io"
- "os"
- "path"
- "path/filepath"
+ "mime"
"strings"
"sync"
+ "sync/atomic"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources/internal"
"github.com/gohugoio/hugo/common/herrors"
-
- "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/media"
- "github.com/gohugoio/hugo/source"
-
- "errors"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/maps"
- "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
- "github.com/spf13/afero"
"github.com/gohugoio/hugo/helpers"
)
var (
- _ resource.ContentResource = (*genericResource)(nil)
- _ resource.ReadSeekCloserResource = (*genericResource)(nil)
- _ resource.Resource = (*genericResource)(nil)
- _ resource.Source = (*genericResource)(nil)
- _ resource.Cloner = (*genericResource)(nil)
- _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil)
- _ permalinker = (*genericResource)(nil)
- _ resource.Identifier = (*genericResource)(nil)
- _ fileInfo = (*genericResource)(nil)
+ _ resource.ContentResource = (*genericResource)(nil)
+ _ resource.ReadSeekCloserResource = (*genericResource)(nil)
+ _ resource.Resource = (*genericResource)(nil)
+ _ resource.Source = (*genericResource)(nil)
+ _ resource.Cloner = (*genericResource)(nil)
+ _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil)
+ _ resource.Identifier = (*genericResource)(nil)
+ _ identity.IdentityGroupProvider = (*genericResource)(nil)
+ _ identity.DependencyManagerProvider = (*genericResource)(nil)
+ _ identity.Identity = (*genericResource)(nil)
+ _ fileInfo = (*genericResource)(nil)
)
type ResourceSourceDescriptor struct {
- // TargetPaths is a callback to fetch paths's relative to its owner.
- TargetPaths func() page.TargetPaths
-
- // Need one of these to load the resource content.
- SourceFile source.File
- OpenReadSeekCloser resource.OpenReadSeekCloser
-
- FileInfo os.FileInfo
-
- // If OpenReadSeekerCloser is not set, we use this to open the file.
- SourceFilename string
+ // The source content.
+ OpenReadSeekCloser hugio.OpenReadSeekCloser
- Fs afero.Fs
+ // The canonical source path.
+ Path *paths.Path
- Data map[string]any
-
- // Set when its known up front, else it's resolved from the target filename.
- MediaType media.Type
+ // The name of the resource.
+ Name string
- // The relative target filename without any language code.
- RelTargetFilename string
+ // The name of the resource as it was read from the source.
+ NameOriginal string
// Any base paths prepended to the target path. This will also typically be the
// language code, but setting it here means that it should not have any effect on
@@ -85,15 +72,109 @@ type ResourceSourceDescriptor struct {
// multiple targets.
TargetBasePaths []string
+ TargetPath string
+ BasePathRelPermalink string
+ BasePathTargetPath string
+
+ // The Data to associate with this resource.
+ Data map[string]any
+
// Delay publishing until either Permalink or RelPermalink is called. Maybe never.
LazyPublish bool
+
+ // Set when its known up front, else it's resolved from the target filename.
+ MediaType media.Type
+
+ // Used to track depenencies (e.g. imports). May be nil if that's of no concern.
+ DependencyManager identity.Manager
+
+ // A shared identity for this resource and all its clones.
+ // If this is not set, an Identity is created.
+ GroupIdentity identity.Identity
}
-func (r ResourceSourceDescriptor) Filename() string {
- if r.SourceFile != nil {
- return r.SourceFile.Filename()
+func (fd *ResourceSourceDescriptor) init(r *Spec) error {
+ if len(fd.TargetBasePaths) == 0 {
+ // If not set, we publish the same resource to all hosts.
+ fd.TargetBasePaths = r.MultihostTargetBasePaths
+ }
+
+ if fd.OpenReadSeekCloser == nil {
+ panic(errors.New("OpenReadSeekCloser is nil"))
+ }
+
+ if fd.TargetPath == "" {
+ panic(errors.New("RelPath is empty"))
+ }
+
+ if fd.Path == nil {
+ fd.Path = paths.Parse("", fd.TargetPath)
+ }
+
+ if fd.TargetPath == "" {
+ fd.TargetPath = fd.Path.Path()
+ } else {
+ fd.TargetPath = paths.ToSlashPreserveLeading(fd.TargetPath)
}
- return r.SourceFilename
+
+ fd.BasePathRelPermalink = paths.ToSlashPreserveLeading(fd.BasePathRelPermalink)
+ if fd.BasePathRelPermalink == "/" {
+ fd.BasePathRelPermalink = ""
+ }
+ fd.BasePathTargetPath = paths.ToSlashPreserveLeading(fd.BasePathTargetPath)
+ if fd.BasePathTargetPath == "/" {
+ fd.BasePathTargetPath = ""
+ }
+
+ fd.TargetPath = paths.ToSlashPreserveLeading(fd.TargetPath)
+ for i, base := range fd.TargetBasePaths {
+ dir := paths.ToSlashPreserveLeading(base)
+ if dir == "/" {
+ dir = ""
+ }
+ fd.TargetBasePaths[i] = dir
+ }
+
+ if fd.Name == "" {
+ fd.Name = fd.TargetPath
+ }
+
+ if fd.NameOriginal == "" {
+ fd.NameOriginal = fd.Name
+ }
+
+ mediaType := fd.MediaType
+ if mediaType.IsZero() {
+ ext := fd.Path.Ext()
+ var (
+ found bool
+ suffixInfo media.SuffixInfo
+ )
+ mediaType, suffixInfo, found = r.MediaTypes().GetFirstBySuffix(ext)
+ // TODO(bep) we need to handle these ambiguous types better, but in this context
+ // we most likely want the application/xml type.
+ if suffixInfo.Suffix == "xml" && mediaType.SubType == "rss" {
+ mediaType, found = r.MediaTypes().GetByType("application/xml")
+ }
+
+ if !found {
+ // A fallback. Note that mime.TypeByExtension is slow by Hugo standards,
+ // so we should configure media types to avoid this lookup for most
+ // situations.
+ mimeStr := mime.TypeByExtension("." + ext)
+ if mimeStr != "" {
+ mediaType, _ = media.FromStringAndExt(mimeStr, ext)
+ }
+ }
+ }
+
+ fd.MediaType = mediaType
+
+ if fd.DependencyManager == nil {
+ fd.DependencyManager = identity.NopManager
+ }
+
+ return nil
}
type ResourceTransformer interface {
@@ -147,23 +228,25 @@ type baseResourceResource interface {
type baseResourceInternal interface {
resource.Source
+ resource.NameOriginalProvider
fileInfo
- metaAssigner
+ mediaTypeAssigner
targetPather
ReadSeekCloser() (hugio.ReadSeekCloser, error)
+ identity.IdentityGroupProvider
+ identity.DependencyManagerProvider
+
// For internal use.
cloneWithUpdates(*transformationUpdate) (baseResource, error)
tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser
+ getResourcePaths() internal.ResourcePaths
+
specProvider
- getResourcePaths() *resourcePathDescriptor
- getTargetFilenames() []string
openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error)
-
- relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string
}
type specProvider interface {
@@ -173,10 +256,10 @@ type specProvider interface {
type baseResource interface {
baseResourceResource
baseResourceInternal
+ resource.Staler
}
-type commonResource struct {
-}
+type commonResource struct{}
// Slice is for internal use.
// for the template functions. See collections.Slice.
@@ -201,60 +284,131 @@ func (commonResource) Slice(in any) (any, error) {
}
}
-type dirFile struct {
- // This is the directory component with Unix-style slashes.
- dir string
- // This is the file component.
- file string
+type fileInfo interface {
+ setOpenSource(hugio.OpenReadSeekCloser)
+ setSourceFilenameIsHash(bool)
+ setTargetPath(internal.ResourcePaths)
+ size() int64
+ hashProvider
+}
+
+type hashProvider interface {
+ hash() string
}
-func (d dirFile) path() string {
- return path.Join(d.dir, d.file)
+type StaleValue[V any] struct {
+ // The value.
+ Value V
+
+ // IsStaleFunc reports whether the value is stale.
+ IsStaleFunc func() bool
}
-type fileInfo interface {
- getSourceFilename() string
- setSourceFilename(string)
- setSourceFilenameIsHash(bool)
- setSourceFs(afero.Fs)
- getFileInfo() hugofs.FileMetaInfo
- hash() (string, error)
- size() int
+func (s *StaleValue[V]) IsStale() bool {
+ return s.IsStaleFunc()
+}
+
+type AtomicStaler struct {
+ stale uint32
+}
+
+func (s *AtomicStaler) MarkStale() {
+ atomic.StoreUint32(&s.stale, 1)
+}
+
+func (s *AtomicStaler) IsStale() bool {
+ return atomic.LoadUint32(&(s.stale)) > 0
+}
+
+// For internal use.
+type GenericResourceTestInfo struct {
+ Paths internal.ResourcePaths
+}
+
+// For internal use.
+func GetTestInfoForResource(r resource.Resource) GenericResourceTestInfo {
+ var gr *genericResource
+ switch v := r.(type) {
+ case *genericResource:
+ gr = v
+ case *resourceAdapter:
+ gr = v.target.(*genericResource)
+ default:
+ panic(fmt.Sprintf("unknown resource type: %T", r))
+ }
+ return GenericResourceTestInfo{
+ Paths: gr.paths,
+ }
}
// genericResource represents a generic linkable resource.
type genericResource struct {
- *resourcePathDescriptor
- *resourceFileInfo
*resourceContent
- spec *Spec
+ sd ResourceSourceDescriptor
+ paths internal.ResourcePaths
+
+ sourceFilenameIsHash bool
+
+ h *resourceHash // A hash of the source content. Is only calculated in caching situations.
+
+ resource.Staler
title string
name string
params map[string]any
- data map[string]any
- resourceType string
- mediaType media.Type
+ spec *Spec
+}
+
+func (l *genericResource) IdentifierBase() string {
+ return l.sd.Path.IdentifierBase()
+}
+
+func (l *genericResource) GetIdentityGroup() identity.Identity {
+ return l.sd.GroupIdentity
+}
+
+func (l *genericResource) GetDependencyManager() identity.Manager {
+ return l.sd.DependencyManager
+}
+
+func (l *genericResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
+ return l.sd.OpenReadSeekCloser()
}
func (l *genericResource) Clone() resource.Resource {
return l.clone()
}
-func (l *genericResource) cloneTo(targetPath string) resource.Resource {
- c := l.clone()
-
- targetPath = helpers.ToSlashTrimLeading(targetPath)
- dir, file := path.Split(targetPath)
+func (l *genericResource) size() int64 {
+ l.hash()
+ return l.h.size
+}
- c.resourcePathDescriptor = &resourcePathDescriptor{
- relTargetDirFile: dirFile{dir: dir, file: file},
+func (l *genericResource) hash() string {
+ if err := l.h.init(l); err != nil {
+ panic(err)
}
+ return l.h.value
+}
- return c
+func (l *genericResource) setOpenSource(openSource hugio.OpenReadSeekCloser) {
+ l.sd.OpenReadSeekCloser = openSource
+}
+func (l *genericResource) setSourceFilenameIsHash(b bool) {
+ l.sourceFilenameIsHash = b
+}
+
+func (l *genericResource) setTargetPath(d internal.ResourcePaths) {
+ l.paths = d
+}
+
+func (l *genericResource) cloneTo(targetPath string) resource.Resource {
+ c := l.clone()
+ c.paths = c.paths.FromTargetPath(targetPath)
+ return c
}
func (l *genericResource) Content(context.Context) (any, error) {
@@ -270,41 +424,50 @@ func (r *genericResource) Err() resource.ResourceError {
}
func (l *genericResource) Data() any {
- return l.data
+ return l.sd.Data
}
func (l *genericResource) Key() string {
- basePath := l.spec.Cfg.BaseURL().BasePath
+ basePath := l.spec.Cfg.BaseURL().BasePathNoTrailingSlash
+ var key string
if basePath == "" {
- return l.RelPermalink()
+ key = l.RelPermalink()
+ } else {
+ key = strings.TrimPrefix(l.RelPermalink(), basePath)
}
- return strings.TrimPrefix(l.RelPermalink(), basePath)
+
+ if l.spec.Cfg.IsMultihost() {
+ key = l.spec.Lang() + key
+ }
+
+ return key
}
func (l *genericResource) MediaType() media.Type {
- return l.mediaType
+ return l.sd.MediaType
}
func (l *genericResource) setMediaType(mediaType media.Type) {
- l.mediaType = mediaType
+ l.sd.MediaType = mediaType
}
func (l *genericResource) Name() string {
return l.name
}
-func (l *genericResource) Params() maps.Params {
- return l.params
+func (l *genericResource) NameOriginal() string {
+ return l.sd.NameOriginal
}
-func (l *genericResource) Permalink() string {
- return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetDirFile.path(), true), l.spec.Cfg.BaseURL().HostURL())
+func (l *genericResource) Params() maps.Params {
+ return l.params
}
func (l *genericResource) Publish() error {
var err error
l.publishInit.Do(func() {
- targetFilenames := l.getTargetFilenames()
+ targetFilenames := l.getResourcePaths().TargetFilenames()
+
if l.sourceFilenameIsHash {
// This is a processed image. We want to avoid copying it if it hasn't changed.
var changedFilenames []string
@@ -340,40 +503,30 @@ func (l *genericResource) Publish() error {
}
func (l *genericResource) RelPermalink() string {
- return l.relPermalinkFor(l.relTargetDirFile.path())
+ return l.spec.PathSpec.GetBasePath(false) + paths.PathEscape(l.paths.TargetLink())
+}
+
+func (l *genericResource) Permalink() string {
+ return l.spec.Cfg.BaseURL().WithPathNoTrailingSlash + paths.PathEscape(l.paths.TargetPath())
}
func (l *genericResource) ResourceType() string {
- return l.resourceType
+ return l.MediaType().MainType
}
func (l *genericResource) String() string {
- return fmt.Sprintf("Resource(%s: %s)", l.resourceType, l.name)
+ return fmt.Sprintf("Resource(%s: %s)", l.ResourceType(), l.name)
}
// Path is stored with Unix style slashes.
func (l *genericResource) TargetPath() string {
- return l.relTargetDirFile.path()
+ return l.paths.TargetPath()
}
func (l *genericResource) Title() string {
return l.title
}
-func (l *genericResource) createBasePath(rel string, isURL bool) string {
- if l.targetPathBuilder == nil {
- return rel
- }
- tp := l.targetPathBuilder()
-
- if isURL {
- return path.Join(tp.SubResourceBaseLink, rel)
- }
-
- // TODO(bep) path
- return path.Join(filepath.ToSlash(tp.SubResourceBaseTarget), rel)
-}
-
func (l *genericResource) initContent() error {
var err error
l.contentInit.Do(func() {
@@ -396,28 +549,12 @@ func (l *genericResource) initContent() error {
return err
}
-func (l *genericResource) setName(name string) {
- l.name = name
-}
-
-func (l *genericResource) getResourcePaths() *resourcePathDescriptor {
- return l.resourcePathDescriptor
-}
-
func (l *genericResource) getSpec() *Spec {
return l.spec
}
-func (l *genericResource) getTargetFilenames() []string {
- paths := l.relTargetPaths()
- for i, p := range paths {
- paths[i] = filepath.Clean(p)
- }
- return paths
-}
-
-func (l *genericResource) setTitle(title string) {
- l.title = title
+func (l *genericResource) getResourcePaths() internal.ResourcePaths {
+ return l.paths
}
func (r *genericResource) tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser {
@@ -437,12 +574,12 @@ func (r *genericResource) mergeData(in map[string]any) {
if len(in) == 0 {
return
}
- if r.data == nil {
- r.data = make(map[string]any)
+ if r.sd.Data == nil {
+ r.sd.Data = make(map[string]any)
}
for k, v := range in {
- if _, found := r.data[k]; !found {
- r.data[k] = v
+ if _, found := r.sd.Data[k]; !found {
+ r.sd.Data[k] = v
}
}
}
@@ -453,142 +590,49 @@ func (rc *genericResource) cloneWithUpdates(u *transformationUpdate) (baseResour
if u.content != nil {
r.contentInit.Do(func() {
r.content = *u.content
- r.openReadSeekerCloser = func() (hugio.ReadSeekCloser, error) {
+ r.sd.OpenReadSeekCloser = func() (hugio.ReadSeekCloser, error) {
return hugio.NewReadSeekerNoOpCloserFromString(r.content), nil
}
})
}
- r.mediaType = u.mediaType
+ r.sd.MediaType = u.mediaType
if u.sourceFilename != nil {
- r.setSourceFilename(*u.sourceFilename)
- }
-
- if u.sourceFs != nil {
- r.setSourceFs(u.sourceFs)
+ if u.sourceFs == nil {
+ return nil, errors.New("sourceFs is nil")
+ }
+ r.setOpenSource(func() (hugio.ReadSeekCloser, error) {
+ return u.sourceFs.Open(*u.sourceFilename)
+ })
+ } else if u.sourceFs != nil {
+ return nil, errors.New("sourceFs is set without sourceFilename")
}
if u.targetPath == "" {
return nil, errors.New("missing targetPath")
}
- fpath, fname := path.Split(u.targetPath)
- r.resourcePathDescriptor.relTargetDirFile = dirFile{dir: fpath, file: fname}
-
+ r.setTargetPath(r.paths.FromTargetPath(u.targetPath))
r.mergeData(u.data)
return r, nil
}
func (l genericResource) clone() *genericResource {
- gi := *l.resourceFileInfo
- rp := *l.resourcePathDescriptor
- l.resourceFileInfo = &gi
- l.resourcePathDescriptor = &rp
l.resourceContent = &resourceContent{}
return &l
}
func (r *genericResource) openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) {
- return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, r.relTargetPathsFor(relTargetPath)...)
-}
-
-func (l *genericResource) permalinkFor(target string) string {
- return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(target, true), l.spec.Cfg.BaseURL().HostURL())
-}
-
-func (l *genericResource) relPermalinkFor(target string) string {
- return l.relPermalinkForRel(target, false)
-}
-
-func (l *genericResource) relPermalinkForRel(rel string, isAbs bool) string {
- return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, false, isAbs, true))
-}
-
-func (l *genericResource) relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string {
- if addBaseTargetPath && len(l.baseTargetPathDirs) > 1 {
- panic("multiple baseTargetPathDirs")
- }
- var basePath string
- if addBaseTargetPath && len(l.baseTargetPathDirs) > 0 {
- basePath = l.baseTargetPathDirs[0]
- }
-
- return l.relTargetPathForRelAndBasePath(rel, basePath, isAbs, isURL)
-}
-
-func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string {
- rel = l.createBasePath(rel, isURL)
-
- if basePath != "" {
- rel = path.Join(basePath, rel)
- }
-
- if l.baseOffset != "" {
- rel = path.Join(l.baseOffset, rel)
- }
-
- if isURL {
- bp := l.spec.PathSpec.GetBasePath(!isAbs)
- if bp != "" {
- rel = path.Join(bp, rel)
- }
- }
-
- if len(rel) == 0 || rel[0] != '/' {
- rel = "/" + rel
- }
-
- return rel
-}
-
-func (l *genericResource) relTargetPaths() []string {
- return l.relTargetPathsForRel(l.TargetPath())
-}
-
-func (l *genericResource) relTargetPathsFor(target string) []string {
- return l.relTargetPathsForRel(target)
-}
-
-func (l *genericResource) relTargetPathsForRel(rel string) []string {
- if len(l.baseTargetPathDirs) == 0 {
- return []string{l.relTargetPathForRelAndBasePath(rel, "", false, false)}
- }
-
- targetPaths := make([]string, len(l.baseTargetPathDirs))
- for i, dir := range l.baseTargetPathDirs {
- targetPaths[i] = l.relTargetPathForRelAndBasePath(rel, dir, false, false)
- }
- return targetPaths
-}
-
-func (l *genericResource) updateParams(params map[string]any) {
- if l.params == nil {
- l.params = params
- return
- }
-
- // Sets the params not already set
- for k, v := range params {
- if _, found := l.params[k]; !found {
- l.params[k] = v
- }
- }
+ filenames := r.paths.FromTargetPath(relTargetPath).TargetFilenames()
+ return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, filenames...)
}
type targetPather interface {
TargetPath() string
}
-type permalinker interface {
- targetPather
- permalinkFor(target string) string
- relPermalinkFor(target string) string
- relTargetPaths() []string
- relTargetPathsFor(target string) []string
-}
-
type resourceContent struct {
content string
contentInit sync.Once
@@ -596,113 +640,31 @@ type resourceContent struct {
publishInit sync.Once
}
-type resourceFileInfo struct {
- // Will be set if this resource is backed by something other than a file.
- openReadSeekerCloser resource.OpenReadSeekCloser
-
- // This may be set to tell us to look in another filesystem for this resource.
- // We, by default, use the sourceFs filesystem in the spec below.
- sourceFs afero.Fs
-
- // Absolute filename to the source, including any content folder path.
- // Note that this is absolute in relation to the filesystem it is stored in.
- // It can be a base path filesystem, and then this filename will not match
- // the path to the file on the real filesystem.
- sourceFilename string
-
- // For performance. This means that whenever the content changes, the filename changes.
- sourceFilenameIsHash bool
-
- fi hugofs.FileMetaInfo
-
- // A hash of the source content. Is only calculated in caching situations.
- h *resourceHash
-}
-
-func (fi *resourceFileInfo) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
- if fi.openReadSeekerCloser != nil {
- return fi.openReadSeekerCloser()
- }
-
- f, err := fi.getSourceFs().Open(fi.getSourceFilename())
- if err != nil {
- return nil, err
- }
- return f, nil
-}
-
-func (fi *resourceFileInfo) getFileInfo() hugofs.FileMetaInfo {
- return fi.fi
-}
-
-func (fi *resourceFileInfo) getSourceFilename() string {
- return fi.sourceFilename
-}
-
-func (fi *resourceFileInfo) setSourceFilename(s string) {
- // Make sure it's always loaded by sourceFilename.
- fi.openReadSeekerCloser = nil
- fi.sourceFilename = s
-}
-
-func (fi *resourceFileInfo) setSourceFilenameIsHash(b bool) {
- fi.sourceFilenameIsHash = b
-}
-
-func (fi *resourceFileInfo) getSourceFs() afero.Fs {
- return fi.sourceFs
-}
-
-func (fi *resourceFileInfo) setSourceFs(fs afero.Fs) {
- fi.sourceFs = fs
+type resourceHash struct {
+ value string
+ size int64
+ initOnce sync.Once
}
-func (fi *resourceFileInfo) hash() (string, error) {
- var err error
- fi.h.init.Do(func() {
+func (r *resourceHash) init(l hugio.ReadSeekCloserProvider) error {
+ var initErr error
+ r.initOnce.Do(func() {
var hash string
- var f hugio.ReadSeekCloser
- f, err = fi.ReadSeekCloser()
+ var size int64
+ f, err := l.ReadSeekCloser()
if err != nil {
- err = fmt.Errorf("failed to open source file: %w", err)
+ initErr = fmt.Errorf("failed to open source: %w", err)
return
}
defer f.Close()
-
- hash, err = helpers.MD5FromFileFast(f)
+ hash, size, err = helpers.MD5FromReaderFast(f)
if err != nil {
+ initErr = fmt.Errorf("failed to calculate hash: %w", err)
return
}
- fi.h.value = hash
+ r.value = hash
+ r.size = size
})
- return fi.h.value, err
-}
-
-func (fi *resourceFileInfo) size() int {
- if fi.fi == nil {
- return 0
- }
-
- return int(fi.fi.Size())
-}
-
-type resourceHash struct {
- value string
- init sync.Once
-}
-
-type resourcePathDescriptor struct {
- // The relative target directory and filename.
- relTargetDirFile dirFile
-
- // Callback used to construct a target path relative to its owner.
- targetPathBuilder func() page.TargetPaths
-
- // This will normally be the same as above, but this will only apply to publishing
- // of resources. It may be multiple values when in multihost mode.
- baseTargetPathDirs []string
-
- // baseOffset is set when the output format's path has a offset, e.g. for AMP.
- baseOffset string
+ return initErr
}
diff --git a/resources/resource/dates.go b/resources/resource/dates.go
index 6d19ca7b9..88968750d 100644
--- a/resources/resource/dates.go
+++ b/resources/resource/dates.go
@@ -45,6 +45,10 @@ type Dates struct {
FExpiryDate time.Time
}
+func (d *Dates) IsDateOrLastModAfter(in Dated) bool {
+ return d.Date().After(in.Date()) || d.Lastmod().After(in.Lastmod())
+}
+
func (d *Dates) UpdateDateAndLastmodIfAfter(in Dated) {
if in.Date().After(d.Date()) {
d.FDate = in.Date()
diff --git a/resources/resource/resources.go b/resources/resource/resources.go
index 795fe1934..9f298b7a6 100644
--- a/resources/resource/resources.go
+++ b/resources/resource/resources.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -18,6 +18,7 @@ import (
"fmt"
"strings"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/hugofs/glob"
"github.com/spf13/cast"
)
@@ -54,16 +55,33 @@ func (r Resources) ByType(typ any) Resources {
// Get locates the name given in Resources.
// The search is case insensitive.
func (r Resources) Get(name any) Resource {
+ if r == nil {
+ return nil
+ }
namestr, err := cast.ToStringE(name)
if err != nil {
panic(err)
}
namestr = strings.ToLower(namestr)
+
+ // First check the Name.
+ // Note that this can be modified by the user in the front matter,
+ // also, it does not contain any language code.
for _, resource := range r {
if strings.EqualFold(namestr, resource.Name()) {
return resource
}
}
+
+ // Finally, check the original name.
+ for _, resource := range r {
+ if nop, ok := resource.(NameOriginalProvider); ok {
+ if strings.EqualFold(namestr, nop.NameOriginal()) {
+ return resource
+ }
+ }
+ }
+
return nil
}
@@ -75,13 +93,15 @@ func (r Resources) GetMatch(pattern any) Resource {
panic(err)
}
+ patternstr = paths.NormalizePathStringBasic(patternstr)
+
g, err := glob.GetGlob(patternstr)
if err != nil {
panic(err)
}
for _, resource := range r {
- if g.Match(strings.ToLower(resource.Name())) {
+ if g.Match(paths.NormalizePathStringBasic(resource.Name())) {
return resource
}
}
@@ -163,7 +183,6 @@ type Source interface {
// Note that GetRemote (as found in resources.GetRemote) is
// not covered by this interface, as this is only available as a global template function.
type ResourceFinder interface {
-
// Get locates the Resource with the given name in the current context (e.g. in .Page.Resources).
//
// It returns nil if no Resource could found, panics if name is invalid.
diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go
index 9e550e252..43d0aa786 100644
--- a/resources/resource/resourcetypes.go
+++ b/resources/resource/resourcetypes.go
@@ -76,7 +76,7 @@ type Resource interface {
ResourceTypeProvider
MediaTypeProvider
ResourceLinksProvider
- ResourceMetaProvider
+ ResourceNameTitleProvider
ResourceParamsProvider
ResourceDataProvider
ErrProvider
@@ -107,19 +107,41 @@ type ResourceLinksProvider interface {
RelPermalink() string
}
+// ResourceMetaProvider provides metadata about a resource.
type ResourceMetaProvider interface {
+ ResourceNameTitleProvider
+ ResourceParamsProvider
+}
+
+type WithResourceMetaProvider interface {
+ // WithResourceMeta creates a new Resource with the given metadata.
+ // For internal use.
+ WithResourceMeta(ResourceMetaProvider) Resource
+}
+
+type ResourceNameTitleProvider interface {
// Name is the logical name of this resource. This can be set in the front matter
// metadata for this resource. If not set, Hugo will assign a value.
// This will in most cases be the base filename.
// So, for the image "/some/path/sunset.jpg" this will be "sunset.jpg".
// The value returned by this method will be used in the GetByPrefix and ByPrefix methods
// on Resources.
+ // Note that for bundled content resources with language code in the filename, this will
+ // be the name without the language code.
Name() string
// Title returns the title if set in front matter. For content pages, this will be the expected value.
Title() string
}
+type NameOriginalProvider interface {
+ // NameOriginal is the original name of this resource.
+ // Note that for bundled content resources with language code in the filename, this will
+ // be the name with the language code.
+ // For internal use (for now).
+ NameOriginal() string
+}
+
type ResourceParamsProvider interface {
// Params set in front matter for this resource.
Params() maps.Params
@@ -146,6 +168,17 @@ type Identifier interface {
Key() string
}
+// WeightProvider provides a weight.
+type WeightProvider interface {
+ Weight() int
+}
+
+// Weight0Provider provides a weight that's considered before the WeightProvider in sorting.
+// This allows the weight set on a given term to win.
+type Weight0Provider interface {
+ Weight0() int
+}
+
// ContentResource represents a Resource that provides a way to get to its content.
// Most Resource types in Hugo implements this interface, including Page.
type ContentResource interface {
@@ -166,10 +199,6 @@ type ContentProvider interface {
Content(context.Context) (any, error)
}
-// OpenReadSeekCloser allows setting some other way (than reading from a filesystem)
-// to open or create a ReadSeekCloser.
-type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error)
-
// ReadSeekCloserResource is a Resource that supports loading its content.
type ReadSeekCloserResource interface {
MediaType() media.Type
@@ -192,6 +221,41 @@ type TranslationKeyProvider interface {
TranslationKey() string
}
+// Staler controls stale state of a Resource. A stale resource should be discarded.
+type Staler interface {
+ StaleMarker
+ StaleInfo
+}
+
+// StaleMarker marks a Resource as stale.
+type StaleMarker interface {
+ MarkStale()
+}
+
+// StaleInfo tells if a resource is marked as stale.
+type StaleInfo interface {
+ IsStale() bool
+}
+
+// IsStaleAny reports whether any of the os is marked as stale.
+func IsStaleAny(os ...any) bool {
+ for _, o := range os {
+ if s, ok := o.(StaleInfo); ok && s.IsStale() {
+ return true
+ }
+ }
+ return false
+}
+
+// MarkStale will mark any of the oses as stale, if possible.
+func MarkStale(os ...any) {
+ for _, o := range os {
+ if s, ok := o.(Staler); ok {
+ s.MarkStale()
+ }
+ }
+}
+
// UnmarshableResource represents a Resource that can be unmarshaled to some other format.
type UnmarshableResource interface {
ReadSeekCloserResource
diff --git a/resources/resource_cache.go b/resources/resource_cache.go
index 388e293e8..a76a51b1c 100644
--- a/resources/resource_cache.go
+++ b/resources/resource_cache.go
@@ -14,182 +14,69 @@
package resources
import (
+ "context"
"encoding/json"
"io"
"path"
"path/filepath"
- "regexp"
"strings"
"sync"
- "github.com/gohugoio/hugo/helpers"
-
- hglob "github.com/gohugoio/hugo/hugofs/glob"
-
"github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/cache/dynacache"
"github.com/gohugoio/hugo/cache/filecache"
-
- "github.com/BurntSushi/locker"
)
-const (
- CACHE_CLEAR_ALL = "clear_all"
- CACHE_OTHER = "other"
-)
+func newResourceCache(rs *Spec, memCache *dynacache.Cache) *ResourceCache {
+ return &ResourceCache{
+ fileCache: rs.FileCaches.AssetsCache(),
+ cacheResource: dynacache.GetOrCreatePartition[string, resource.Resource](
+ memCache,
+ "/res1",
+ dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40},
+ ),
+ cacheResources: dynacache.GetOrCreatePartition[string, resource.Resources](
+ memCache,
+ "/ress",
+ dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40},
+ ),
+ cacheResourceTransformation: dynacache.GetOrCreatePartition[string, *resourceAdapterInner](
+ memCache,
+ "/res1/tra",
+ dynacache.OptionsPartition{ClearWhen: dynacache.ClearOnChange, Weight: 40},
+ ),
+ }
+}
type ResourceCache struct {
sync.RWMutex
- // Either resource.Resource or resource.Resources.
- cache map[string]any
+ cacheResource *dynacache.Partition[string, resource.Resource]
+ cacheResources *dynacache.Partition[string, resource.Resources]
+ cacheResourceTransformation *dynacache.Partition[string, *resourceAdapterInner]
fileCache *filecache.Cache
-
- // Provides named resource locks.
- nlocker *locker.Locker
-}
-
-// ResourceCacheKey converts the filename into the format used in the resource
-// cache.
-func ResourceCacheKey(filename string) string {
- filename = filepath.ToSlash(filename)
- return path.Join(resourceKeyPartition(filename), filename)
-}
-
-func resourceKeyPartition(filename string) string {
- ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(filename)), ".")
- if ext == "" {
- ext = CACHE_OTHER
- }
- return ext
-}
-
-// Commonly used aliases and directory names used for some types.
-var extAliasKeywords = map[string][]string{
- "sass": {"scss"},
- "scss": {"sass"},
-}
-
-// ResourceKeyPartitions resolves a ordered slice of partitions that is
-// used to do resource cache invalidations.
-//
-// We use the first directory path element and the extension, so:
-//
-// a/b.json => "a", "json"
-// b.json => "json"
-//
-// For some of the extensions we will also map to closely related types,
-// e.g. "scss" will also return "sass".
-func ResourceKeyPartitions(filename string) []string {
- var partitions []string
- filename = hglob.NormalizePath(filename)
- dir, name := path.Split(filename)
- ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(name)), ".")
-
- if dir != "" {
- partitions = append(partitions, strings.Split(dir, "/")[0])
- }
-
- if ext != "" {
- partitions = append(partitions, ext)
- }
-
- if aliases, found := extAliasKeywords[ext]; found {
- partitions = append(partitions, aliases...)
- }
-
- if len(partitions) == 0 {
- partitions = []string{CACHE_OTHER}
- }
-
- return helpers.UniqueStringsSorted(partitions)
-}
-
-// ResourceKeyContainsAny returns whether the key is a member of any of the
-// given partitions.
-//
-// This is used for resource cache invalidation.
-func ResourceKeyContainsAny(key string, partitions []string) bool {
- parts := strings.Split(key, "/")
- for _, p1 := range partitions {
- for _, p2 := range parts {
- if p1 == p2 {
- return true
- }
- }
- }
- return false
-}
-
-func (c *ResourceCache) clear() {
- c.Lock()
- defer c.Unlock()
-
- c.cache = make(map[string]any)
- c.nlocker = locker.NewLocker()
-}
-
-func (c *ResourceCache) Contains(key string) bool {
- key = c.cleanKey(filepath.ToSlash(key))
- _, found := c.get(key)
- return found
}
func (c *ResourceCache) cleanKey(key string) string {
- return strings.TrimPrefix(path.Clean(strings.ToLower(key)), "/")
+ return strings.TrimPrefix(path.Clean(strings.ToLower(filepath.ToSlash(key))), "/")
}
-func (c *ResourceCache) get(key string) (any, bool) {
- c.RLock()
- defer c.RUnlock()
- r, found := c.cache[key]
- return r, found
+func (c *ResourceCache) Get(ctx context.Context, key string) (resource.Resource, bool) {
+ return c.cacheResource.Get(ctx, key)
}
func (c *ResourceCache) GetOrCreate(key string, f func() (resource.Resource, error)) (resource.Resource, error) {
- r, err := c.getOrCreate(key, func() (any, error) { return f() })
- if r == nil || err != nil {
- return nil, err
- }
- return r.(resource.Resource), nil
+ return c.cacheResource.GetOrCreate(key, func(key string) (resource.Resource, error) {
+ return f()
+ })
}
func (c *ResourceCache) GetOrCreateResources(key string, f func() (resource.Resources, error)) (resource.Resources, error) {
- r, err := c.getOrCreate(key, func() (any, error) { return f() })
- if r == nil || err != nil {
- return nil, err
- }
- return r.(resource.Resources), nil
-}
-
-func (c *ResourceCache) getOrCreate(key string, f func() (any, error)) (any, error) {
- key = c.cleanKey(key)
- // First check in-memory cache.
- r, found := c.get(key)
- if found {
- return r, nil
- }
- // This is a potentially long running operation, so get a named lock.
- c.nlocker.Lock(key)
-
- // Double check in-memory cache.
- r, found = c.get(key)
- if found {
- c.nlocker.Unlock(key)
- return r, nil
- }
-
- defer c.nlocker.Unlock(key)
-
- r, err := f()
- if err != nil {
- return nil, err
- }
-
- c.set(key, r)
-
- return r, nil
+ return c.cacheResources.GetOrCreate(key, func(key string) (resource.Resources, error) {
+ return f()
+ })
}
func (c *ResourceCache) getFilenames(key string) (string, string) {
@@ -242,64 +129,3 @@ func (c *ResourceCache) writeMeta(key string, meta transformedResourceMetadata)
return fi, fc, err
}
-
-func (c *ResourceCache) set(key string, r any) {
- c.Lock()
- defer c.Unlock()
- c.cache[key] = r
-}
-
-func (c *ResourceCache) DeletePartitions(partitions ...string) {
- partitionsSet := map[string]bool{
- // Always clear out the resources not matching any partition.
- "other": true,
- }
- for _, p := range partitions {
- partitionsSet[p] = true
- }
-
- if partitionsSet[CACHE_CLEAR_ALL] {
- c.clear()
- return
- }
-
- c.Lock()
- defer c.Unlock()
-
- for k := range c.cache {
- clear := false
- for p := range partitionsSet {
- if strings.Contains(k, p) {
- // There will be some false positive, but that's fine.
- clear = true
- break
- }
- }
-
- if clear {
- delete(c.cache, k)
- }
- }
-}
-
-func (c *ResourceCache) DeleteMatchesRe(re *regexp.Regexp) {
- c.Lock()
- defer c.Unlock()
-
- for k := range c.cache {
- if re.MatchString(k) {
- delete(c.cache, k)
- }
- }
-}
-
-func (c *ResourceCache) DeleteMatches(match func(string) bool) {
- c.Lock()
- defer c.Unlock()
-
- for k := range c.cache {
- if match(k) {
- delete(c.cache, k)
- }
- }
-}
diff --git a/resources/resource_cache_test.go b/resources/resource_cache_test.go
deleted file mode 100644
index bcb241025..000000000
--- a/resources/resource_cache_test.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package resources
-
-import (
- "path/filepath"
- "testing"
-
- qt "github.com/frankban/quicktest"
-)
-
-func TestResourceKeyPartitions(t *testing.T) {
- c := qt.New(t)
-
- for _, test := range []struct {
- input string
- expected []string
- }{
- {"a.js", []string{"js"}},
- {"a.scss", []string{"sass", "scss"}},
- {"a.sass", []string{"sass", "scss"}},
- {"d/a.js", []string{"d", "js"}},
- {"js/a.js", []string{"js"}},
- {"D/a.JS", []string{"d", "js"}},
- {"d/a", []string{"d"}},
- {filepath.FromSlash("/d/a.js"), []string{"d", "js"}},
- {filepath.FromSlash("/d/e/a.js"), []string{"d", "js"}},
- } {
- c.Assert(ResourceKeyPartitions(test.input), qt.DeepEquals, test.expected, qt.Commentf(test.input))
- }
-}
-
-func TestResourceKeyContainsAny(t *testing.T) {
- c := qt.New(t)
-
- for _, test := range []struct {
- key string
- filename string
- expected bool
- }{
- {"styles/css", "asdf.css", true},
- {"styles/css", "styles/asdf.scss", true},
- {"js/foo.bar", "asdf.css", false},
- } {
- c.Assert(ResourceKeyContainsAny(test.key, ResourceKeyPartitions(test.filename)), qt.Equals, test.expected)
- }
-}
diff --git a/resources/resource_factories/bundler/bundler.go b/resources/resource_factories/bundler/bundler.go
index 67f1f90fa..c255da601 100644
--- a/resources/resource_factories/bundler/bundler.go
+++ b/resources/resource_factories/bundler/bundler.go
@@ -18,7 +18,6 @@ import (
"fmt"
"io"
"path"
- "path/filepath"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/media"
@@ -81,8 +80,8 @@ func (r *multiReadSeekCloser) Close() error {
// Concat concatenates the list of Resource objects.
func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resource, error) {
- // The CACHE_OTHER will make sure this will be re-created and published on rebuilds.
- return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) {
+ targetPath = path.Clean(targetPath)
+ return c.rs.ResourceCache.GetOrCreate(targetPath, func() (resource.Resource, error) {
var resolvedm media.Type
// The given set of resources must be of the same Media Type.
@@ -132,12 +131,11 @@ func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resou
return newMultiReadSeekCloser(rcsources...), nil
}
- composite, err := c.rs.New(
+ composite, err := c.rs.NewResource(
resources.ResourceSourceDescriptor{
- Fs: c.rs.FileCaches.AssetsCache().Fs,
LazyPublish: true,
OpenReadSeekCloser: concatr,
- RelTargetFilename: filepath.Clean(targetPath),
+ TargetPath: targetPath,
})
if err != nil {
return nil, err
diff --git a/resources/resource_factories/create/create.go b/resources/resource_factories/create/create.go
index 2e4721299..e98eb7425 100644
--- a/resources/resource_factories/create/create.go
+++ b/resources/resource_factories/create/create.go
@@ -17,15 +17,19 @@ package create
import (
"net/http"
+ "os"
"path"
"path/filepath"
"strings"
"time"
+ "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs/glob"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/cache/dynacache"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/resources"
@@ -53,19 +57,44 @@ func New(rs *resources.Spec) *Client {
// Copy copies r to the new targetPath.
func (c *Client) Copy(r resource.Resource, targetPath string) (resource.Resource, error) {
- return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(targetPath), func() (resource.Resource, error) {
+ key := dynacache.CleanKey(targetPath)
+ return c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) {
return resources.Copy(r, targetPath), nil
})
}
-// Get creates a new Resource by opening the given filename in the assets filesystem.
-func (c *Client) Get(filename string) (resource.Resource, error) {
- filename = filepath.Clean(filename)
- return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(filename), func() (resource.Resource, error) {
- return c.rs.New(resources.ResourceSourceDescriptor{
- Fs: c.rs.BaseFs.Assets.Fs,
- LazyPublish: true,
- SourceFilename: filename,
+func (c *Client) newDependencyManager() identity.Manager {
+ if c.rs.Cfg.Running() {
+ return identity.NewManager("resources")
+ }
+ return identity.NopManager
+}
+
+// Get creates a new Resource by opening the given pathname in the assets filesystem.
+func (c *Client) Get(pathname string) (resource.Resource, error) {
+ pathname = path.Clean(pathname)
+ key := dynacache.CleanKey(pathname)
+
+ return c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) {
+ // The resource file will not be read before it gets used (e.g. in .Content),
+ // so we need to check that the file exists here.
+ filename := filepath.FromSlash(pathname)
+ if _, err := c.rs.BaseFs.Assets.Fs.Stat(filename); err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ // A real error.
+ return nil, err
+ }
+
+ return c.rs.NewResource(resources.ResourceSourceDescriptor{
+ LazyPublish: true,
+ OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
+ return c.rs.BaseFs.Assets.Fs.Open(filename)
+ },
+ GroupIdentity: identity.StringIdentity(key),
+ DependencyManager: c.newDependencyManager(),
+ TargetPath: pathname,
})
})
}
@@ -95,9 +124,6 @@ func (c *Client) GetMatch(pattern string) (resource.Resource, error) {
func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) bool, firstOnly bool) (resource.Resources, error) {
pattern = glob.NormalizePath(pattern)
partitions := glob.FilterGlobParts(strings.Split(pattern, "/"))
- if len(partitions) == 0 {
- partitions = []string{resources.CACHE_OTHER}
- }
key := path.Join(name, path.Join(partitions...))
key = path.Join(key, pattern)
@@ -106,13 +132,13 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource)
handle := func(info hugofs.FileMetaInfo) (bool, error) {
meta := info.Meta()
- r, err := c.rs.New(resources.ResourceSourceDescriptor{
+ r, err := c.rs.NewResource(resources.ResourceSourceDescriptor{
LazyPublish: true,
- FileInfo: info,
OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
return meta.Open()
},
- RelTargetFilename: meta.Path,
+ GroupIdentity: meta.PathInfo,
+ TargetPath: meta.PathInfo.PathNoLang(),
})
if err != nil {
return true, err
@@ -138,15 +164,19 @@ func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource)
// FromString creates a new Resource from a string with the given relative target path.
// TODO(bep) see #10912; we currently emit a warning for this config scenario.
func (c *Client) FromString(targetPath, content string) (resource.Resource, error) {
- return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) {
- return c.rs.New(
+ targetPath = path.Clean(targetPath)
+ key := dynacache.CleanKey(targetPath) + helpers.MD5String(content)
+ r, err := c.rs.ResourceCache.GetOrCreate(key, func() (resource.Resource, error) {
+ return c.rs.NewResource(
resources.ResourceSourceDescriptor{
- Fs: c.rs.FileCaches.AssetsCache().Fs,
- LazyPublish: true,
+ LazyPublish: true,
+ GroupIdentity: identity.Anonymous, // All usage of this resource are tracked via its string content.
OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
return hugio.NewReadSeekerNoOpCloserFromString(content), nil
},
- RelTargetFilename: filepath.Clean(targetPath),
+ TargetPath: targetPath,
})
})
+
+ return r, err
}
diff --git a/resources/resource_factories/create/integration_test.go b/resources/resource_factories/create/integration_test.go
index 140c5d091..61bc17adb 100644
--- a/resources/resource_factories/create/integration_test.go
+++ b/resources/resource_factories/create/integration_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -25,7 +25,6 @@ import (
)
func TestGetRemoteHead(t *testing.T) {
-
files := `
-- config.toml --
[security]
@@ -60,7 +59,6 @@ func TestGetRemoteHead(t *testing.T) {
"Head Content: .",
"Head Data: map[ContentLength:18210 ContentType:image/png Status:200 OK StatusCode:200 TransferEncoding:[]]",
)
-
}
func TestGetRemoteRetry(t *testing.T) {
@@ -133,14 +131,11 @@ mediaTypes = ['text/plain']
TxtarString: files,
},
).BuildE()
-
// This is hard to get stable on GitHub Actions, it sometimes succeeds due to timing issues.
if err != nil {
b.AssertLogContains("Got Err")
b.AssertLogContains("Retry timeout")
b.AssertLogContains("ContentLength:0")
}
-
})
-
}
diff --git a/resources/resource_factories/create/remote.go b/resources/resource_factories/create/remote.go
index d1fd2481d..c2d17e7a5 100644
--- a/resources/resource_factories/create/remote.go
+++ b/resources/resource_factories/create/remote.go
@@ -24,7 +24,6 @@ import (
"net/http/httputil"
"net/url"
"path"
- "path/filepath"
"strings"
"time"
@@ -253,15 +252,16 @@ func (c *Client) FromRemote(uri string, optionsm map[string]any) (resource.Resou
resourceID = filename[:len(filename)-len(path.Ext(filename))] + "_" + resourceID + mediaType.FirstSuffix.FullSuffix
data := responseToData(res, false)
- return c.rs.New(
+ return c.rs.NewResource(
resources.ResourceSourceDescriptor{
- MediaType: mediaType,
- Data: data,
- LazyPublish: true,
+ MediaType: mediaType,
+ Data: data,
+ GroupIdentity: identity.StringIdentity(resourceID),
+ LazyPublish: true,
OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
return hugio.NewReadSeekerNoOpCloser(bytes.NewReader(body)), nil
},
- RelTargetFilename: filepath.Clean(resourceID),
+ TargetPath: resourceID,
})
}
diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go
index 8954a5109..869fc11bf 100644
--- a/resources/resource_metadata.go
+++ b/resources/resource_metadata.go
@@ -28,111 +28,161 @@ import (
)
var (
- _ metaAssigner = (*genericResource)(nil)
- _ metaAssigner = (*imageResource)(nil)
- _ metaAssignerProvider = (*resourceAdapter)(nil)
+ _ mediaTypeAssigner = (*genericResource)(nil)
+ _ mediaTypeAssigner = (*imageResource)(nil)
+ _ resource.Staler = (*genericResource)(nil)
+ _ resource.NameOriginalProvider = (*genericResource)(nil)
)
-type metaAssignerProvider interface {
- getMetaAssigner() metaAssigner
-}
-
// metaAssigner allows updating metadata in resources that supports it.
type metaAssigner interface {
setTitle(title string)
setName(name string)
- setMediaType(mediaType media.Type)
updateParams(params map[string]any)
}
+// metaAssigner allows updating the media type in resources that supports it.
+type mediaTypeAssigner interface {
+ setMediaType(mediaType media.Type)
+}
+
const counterPlaceHolder = ":counter"
+var _ metaAssigner = (*metaResource)(nil)
+
+// metaResource is a resource with metadata that can be updated.
+type metaResource struct {
+ changed bool
+ title string
+ name string
+ params maps.Params
+}
+
+func (r *metaResource) Name() string {
+ return r.name
+}
+
+func (r *metaResource) Title() string {
+ return r.title
+}
+
+func (r *metaResource) Params() maps.Params {
+ return r.params
+}
+
+func (r *metaResource) setTitle(title string) {
+ r.title = title
+ r.changed = true
+}
+
+func (r *metaResource) setName(name string) {
+ r.name = name
+ r.changed = true
+}
+
+func (r *metaResource) updateParams(params map[string]any) {
+ if r.params == nil {
+ r.params = make(map[string]interface{})
+ }
+ for k, v := range params {
+ r.params[k] = v
+ }
+ r.changed = true
+}
+
+func CloneWithMetadataIfNeeded(m []map[string]any, r resource.Resource) resource.Resource {
+ wmp, ok := r.(resource.WithResourceMetaProvider)
+ if !ok {
+ return r
+ }
+
+ wrapped := &metaResource{
+ name: r.Name(),
+ title: r.Title(),
+ params: r.Params(),
+ }
+
+ assignMetadata(m, wrapped)
+ if !wrapped.changed {
+ return r
+ }
+
+ return wmp.WithResourceMeta(wrapped)
+}
+
// AssignMetadata assigns the given metadata to those resources that supports updates
// and matching by wildcard given in `src` using `filepath.Match` with lower cased values.
// This assignment is additive, but the most specific match needs to be first.
// The `name` and `title` metadata field support shell-matched collection it got a match in.
// See https://golang.org/pkg/path/#Match
-func AssignMetadata(metadata []map[string]any, resources ...resource.Resource) error {
+func assignMetadata(metadata []map[string]any, ma *metaResource) error {
counters := make(map[string]int)
- for _, r := range resources {
- var ma metaAssigner
- mp, ok := r.(metaAssignerProvider)
- if ok {
- ma = mp.getMetaAssigner()
- } else {
- ma, ok = r.(metaAssigner)
- if !ok {
- continue
- }
+ var (
+ nameSet, titleSet bool
+ nameCounter, titleCounter = 0, 0
+ nameCounterFound, titleCounterFound bool
+ resourceSrcKey = strings.ToLower(ma.Name())
+ )
+
+ for _, meta := range metadata {
+ src, found := meta["src"]
+ if !found {
+ return fmt.Errorf("missing 'src' in metadata for resource")
}
- var (
- nameSet, titleSet bool
- nameCounter, titleCounter = 0, 0
- nameCounterFound, titleCounterFound bool
- resourceSrcKey = strings.ToLower(r.Name())
- )
-
- for _, meta := range metadata {
- src, found := meta["src"]
- if !found {
- return fmt.Errorf("missing 'src' in metadata for resource")
- }
+ srcKey := strings.ToLower(cast.ToString(src))
- srcKey := strings.ToLower(cast.ToString(src))
+ glob, err := glob.GetGlob(srcKey)
+ if err != nil {
+ return fmt.Errorf("failed to match resource with metadata: %w", err)
+ }
- glob, err := glob.GetGlob(srcKey)
- if err != nil {
- return fmt.Errorf("failed to match resource with metadata: %w", err)
- }
+ match := glob.Match(resourceSrcKey)
- match := glob.Match(resourceSrcKey)
-
- if match {
- if !nameSet {
- name, found := meta["name"]
- if found {
- name := cast.ToString(name)
- if !nameCounterFound {
- nameCounterFound = strings.Contains(name, counterPlaceHolder)
- }
- if nameCounterFound && nameCounter == 0 {
- counterKey := "name_" + srcKey
- nameCounter = counters[counterKey] + 1
- counters[counterKey] = nameCounter
- }
-
- ma.setName(replaceResourcePlaceholders(name, nameCounter))
- nameSet = true
+ if match {
+ if !nameSet {
+ name, found := meta["name"]
+ if found {
+ name := cast.ToString(name)
+ if !nameCounterFound {
+ nameCounterFound = strings.Contains(name, counterPlaceHolder)
}
- }
-
- if !titleSet {
- title, found := meta["title"]
- if found {
- title := cast.ToString(title)
- if !titleCounterFound {
- titleCounterFound = strings.Contains(title, counterPlaceHolder)
- }
- if titleCounterFound && titleCounter == 0 {
- counterKey := "title_" + srcKey
- titleCounter = counters[counterKey] + 1
- counters[counterKey] = titleCounter
- }
- ma.setTitle((replaceResourcePlaceholders(title, titleCounter)))
- titleSet = true
+ if nameCounterFound && nameCounter == 0 {
+ counterKey := "name_" + srcKey
+ nameCounter = counters[counterKey] + 1
+ counters[counterKey] = nameCounter
}
+
+ ma.setName(replaceResourcePlaceholders(name, nameCounter))
+ nameSet = true
}
+ }
- params, found := meta["params"]
+ if !titleSet {
+ title, found := meta["title"]
if found {
- m := maps.ToStringMap(params)
- // Needed for case insensitive fetching of params values
- maps.PrepareParams(m)
- ma.updateParams(m)
+ title := cast.ToString(title)
+ if !titleCounterFound {
+ titleCounterFound = strings.Contains(title, counterPlaceHolder)
+ }
+ if titleCounterFound && titleCounter == 0 {
+ counterKey := "title_" + srcKey
+ titleCounter = counters[counterKey] + 1
+ counters[counterKey] = titleCounter
+ }
+ ma.setTitle((replaceResourcePlaceholders(title, titleCounter)))
+ titleSet = true
}
}
+
+ params, found := meta["params"]
+ if found {
+ m := maps.ToStringMap(params)
+ // Needed for case insensitive fetching of params values
+ maps.PrepareParams(m)
+ ma.updateParams(m)
+ }
}
}
diff --git a/resources/resource_spec.go b/resources/resource_spec.go
index 3e1b53205..66f56d147 100644
--- a/resources/resource_spec.go
+++ b/resources/resource_spec.go
@@ -14,54 +14,44 @@
package resources
import (
- "errors"
- "fmt"
- "mime"
- "os"
"path"
- "path/filepath"
- "strings"
"sync"
- "github.com/BurntSushi/locker"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/allconfig"
"github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/internal"
"github.com/gohugoio/hugo/resources/jsconfig"
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hexec"
"github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/postpub"
+ "github.com/gohugoio/hugo/cache/dynacache"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/images"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/tpl"
- "github.com/spf13/afero"
)
func NewSpec(
s *helpers.PathSpec,
common *SpecCommon, // may be nil
- imageCache *ImageCache, // may be nil
+ fileCaches filecache.Caches,
+ memCache *dynacache.Cache,
incr identity.Incrementer,
logger loggers.Logger,
errorHandler herrors.ErrorSender,
- execHelper *hexec.Exec) (*Spec, error) {
-
- fileCaches, err := filecache.NewCaches(s)
- if err != nil {
- return nil, fmt.Errorf("failed to create file caches from configuration: %w", err)
- }
-
+ execHelper *hexec.Exec,
+) (*Spec, error) {
conf := s.Cfg.GetConfig().(*allconfig.Config)
imgConfig := conf.Imaging
@@ -91,37 +81,28 @@ func NewSpec(
PostProcessResources: make(map[string]postpub.PostPublishedResource),
JSConfigBuilder: jsconfig.NewBuilder(),
},
- ResourceCache: &ResourceCache{
- fileCache: fileCaches.AssetsCache(),
- cache: make(map[string]any),
- nlocker: locker.NewLocker(),
- },
}
}
- if imageCache == nil {
- imageCache = newImageCache(
- fileCaches.ImageCache(),
- s,
- )
- } else {
- imageCache = imageCache.WithPathSpec(s)
-
- }
-
rs := &Spec{
PathSpec: s,
Logger: logger,
ErrorSender: errorHandler,
imaging: imaging,
- ImageCache: imageCache,
- ExecHelper: execHelper,
+ ImageCache: newImageCache(
+ fileCaches.ImageCache(),
+ memCache,
+ s,
+ ),
+ ExecHelper: execHelper,
Permalinks: permalinks,
SpecCommon: common,
}
+ rs.ResourceCache = newResourceCache(rs, memCache)
+
return rs, nil
}
@@ -162,221 +143,65 @@ type PostBuildAssets struct {
JSConfigBuilder *jsconfig.Builder
}
-func (r *Spec) New(fd ResourceSourceDescriptor) (resource.Resource, error) {
- return r.newResourceFor(fd)
-}
-
-func (r *Spec) MediaTypes() media.Types {
- return r.Cfg.GetConfigSection("mediaTypes").(media.Types)
-}
-
-func (r *Spec) OutputFormats() output.Formats {
- return r.Cfg.GetConfigSection("outputFormats").(output.Formats)
-}
-
-func (r *Spec) BuildConfig() config.BuildConfig {
- return r.Cfg.GetConfigSection("build").(config.BuildConfig)
-}
-
-func (r *Spec) CacheStats() string {
- r.ImageCache.mu.RLock()
- defer r.ImageCache.mu.RUnlock()
-
- s := fmt.Sprintf("Cache entries: %d", len(r.ImageCache.store))
-
- count := 0
- for k := range r.ImageCache.store {
- if count > 5 {
- break
- }
- s += "\n" + k
- count++
- }
-
- return s
-}
-
-func (r *Spec) ClearCaches() {
- r.ImageCache.clear()
- r.ResourceCache.clear()
-}
-
-func (r *Spec) DeleteBySubstring(s string) {
- r.ImageCache.deleteIfContains(s)
-}
-
-func (s *Spec) String() string {
- return "spec"
-}
-
-// TODO(bep) clean up below
-func (r *Spec) newGenericResource(sourceFs afero.Fs,
- targetPathBuilder func() page.TargetPaths,
- osFileInfo os.FileInfo,
- sourceFilename,
- baseFilename string,
- mediaType media.Type) *genericResource {
- return r.newGenericResourceWithBase(
- sourceFs,
- nil,
- nil,
- targetPathBuilder,
- osFileInfo,
- sourceFilename,
- baseFilename,
- mediaType,
- nil,
- )
-}
-
-func (r *Spec) newGenericResourceWithBase(
- sourceFs afero.Fs,
- openReadSeekerCloser resource.OpenReadSeekCloser,
- targetPathBaseDirs []string,
- targetPathBuilder func() page.TargetPaths,
- osFileInfo os.FileInfo,
- sourceFilename,
- baseFilename string,
- mediaType media.Type,
- data map[string]any,
-) *genericResource {
- if osFileInfo != nil && osFileInfo.IsDir() {
- panic(fmt.Sprintf("dirs not supported resource types: %v", osFileInfo))
- }
-
- // This value is used both to construct URLs and file paths, but start
- // with a Unix-styled path.
- baseFilename = helpers.ToSlashTrimLeading(baseFilename)
- fpath, fname := path.Split(baseFilename)
-
- resourceType := mediaType.MainType
-
- pathDescriptor := &resourcePathDescriptor{
- baseTargetPathDirs: helpers.UniqueStringsReuse(targetPathBaseDirs),
- targetPathBuilder: targetPathBuilder,
- relTargetDirFile: dirFile{dir: fpath, file: fname},
- }
-
- var fim hugofs.FileMetaInfo
- if osFileInfo != nil {
- fim = osFileInfo.(hugofs.FileMetaInfo)
- }
-
- gfi := &resourceFileInfo{
- fi: fim,
- openReadSeekerCloser: openReadSeekerCloser,
- sourceFs: sourceFs,
- sourceFilename: sourceFilename,
- h: &resourceHash{},
- }
-
- g := &genericResource{
- resourceFileInfo: gfi,
- resourcePathDescriptor: pathDescriptor,
- mediaType: mediaType,
- resourceType: resourceType,
- spec: r,
- params: make(map[string]any),
- name: baseFilename,
- title: baseFilename,
- resourceContent: &resourceContent{},
- data: data,
+// NewResource creates a new Resource from the given ResourceSourceDescriptor.
+func (r *Spec) NewResource(rd ResourceSourceDescriptor) (resource.Resource, error) {
+ if err := rd.init(r); err != nil {
+ return nil, err
}
- return g
-}
-
-func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) {
- fi := fd.FileInfo
- var sourceFilename string
-
- if fd.OpenReadSeekCloser != nil {
- } else if fd.SourceFilename != "" {
- var err error
- fi, err = sourceFs.Stat(fd.SourceFilename)
- if err != nil {
- if herrors.IsNotExist(err) {
- return nil, nil
- }
- return nil, err
- }
- sourceFilename = fd.SourceFilename
- } else {
- sourceFilename = fd.SourceFile.Filename()
+ dir, name := path.Split(rd.TargetPath)
+ dir = paths.ToSlashPreserveLeading(dir)
+ if dir == "/" {
+ dir = ""
}
-
- if fd.RelTargetFilename == "" {
- fd.RelTargetFilename = sourceFilename
+ rp := internal.ResourcePaths{
+ File: name,
+ Dir: dir,
+ BaseDirTarget: rd.BasePathTargetPath,
+ BaseDirLink: rd.BasePathRelPermalink,
+ TargetBasePaths: rd.TargetBasePaths,
}
- mimeType := fd.MediaType
- if mimeType.IsZero() {
- ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename))
- var (
- found bool
- suffixInfo media.SuffixInfo
- )
- mimeType, suffixInfo, found = r.MediaTypes().GetFirstBySuffix(strings.TrimPrefix(ext, "."))
- // TODO(bep) we need to handle these ambiguous types better, but in this context
- // we most likely want the application/xml type.
- if suffixInfo.Suffix == "xml" && mimeType.SubType == "rss" {
- mimeType, found = r.MediaTypes().GetByType("application/xml")
- }
-
- if !found {
- // A fallback. Note that mime.TypeByExtension is slow by Hugo standards,
- // so we should configure media types to avoid this lookup for most
- // situations.
- mimeStr := mime.TypeByExtension(ext)
- if mimeStr != "" {
- mimeType, _ = media.FromStringAndExt(mimeStr, ext)
- }
- }
+ gr := &genericResource{
+ Staler: &AtomicStaler{},
+ h: &resourceHash{},
+ paths: rp,
+ spec: r,
+ sd: rd,
+ params: make(map[string]any),
+ name: rd.Name,
+ title: rd.Name,
+ resourceContent: &resourceContent{},
}
- gr := r.newGenericResourceWithBase(
- sourceFs,
- fd.OpenReadSeekCloser,
- fd.TargetBasePaths,
- fd.TargetPaths,
- fi,
- sourceFilename,
- fd.RelTargetFilename,
- mimeType,
- fd.Data)
-
- if mimeType.MainType == "image" {
- imgFormat, ok := images.ImageFormatFromMediaSubType(mimeType.SubType)
+ if rd.MediaType.MainType == "image" {
+ imgFormat, ok := images.ImageFormatFromMediaSubType(rd.MediaType.SubType)
if ok {
ir := &imageResource{
Image: images.NewImage(imgFormat, r.imaging, nil, gr),
baseResource: gr,
}
ir.root = ir
- return newResourceAdapter(gr.spec, fd.LazyPublish, ir), nil
+ return newResourceAdapter(gr.spec, rd.LazyPublish, ir), nil
}
+
}
- return newResourceAdapter(gr.spec, fd.LazyPublish, gr), nil
+ return newResourceAdapter(gr.spec, rd.LazyPublish, gr), nil
}
-func (r *Spec) newResourceFor(fd ResourceSourceDescriptor) (resource.Resource, error) {
- if fd.OpenReadSeekCloser == nil {
- if fd.SourceFile != nil && fd.SourceFilename != "" {
- return nil, errors.New("both SourceFile and AbsSourceFilename provided")
- } else if fd.SourceFile == nil && fd.SourceFilename == "" {
- return nil, errors.New("either SourceFile or AbsSourceFilename must be provided")
- }
- }
+func (r *Spec) MediaTypes() media.Types {
+ return r.Cfg.GetConfigSection("mediaTypes").(media.Types)
+}
- if fd.RelTargetFilename == "" {
- fd.RelTargetFilename = fd.Filename()
- }
+func (r *Spec) OutputFormats() output.Formats {
+ return r.Cfg.GetConfigSection("outputFormats").(output.Formats)
+}
- if len(fd.TargetBasePaths) == 0 {
- // If not set, we publish the same resource to all hosts.
- fd.TargetBasePaths = r.MultihostTargetBasePaths
- }
+func (r *Spec) BuildConfig() config.BuildConfig {
+ return r.Cfg.GetConfigSection("build").(config.BuildConfig)
+}
- return r.newResource(fd.Fs, fd)
+func (s *Spec) String() string {
+ return "spec"
}
diff --git a/resources/resource_spec_test.go b/resources/resource_spec_test.go
new file mode 100644
index 000000000..67fe09992
--- /dev/null
+++ b/resources/resource_spec_test.go
@@ -0,0 +1,48 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources_test
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/resources"
+)
+
+func TestNewResource(t *testing.T) {
+ c := qt.New(t)
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ open := hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString("content"))
+
+ rd := resources.ResourceSourceDescriptor{
+ OpenReadSeekCloser: open,
+ TargetPath: "a/b.txt",
+ BasePathRelPermalink: "c/d",
+ BasePathTargetPath: "e/f",
+ GroupIdentity: identity.Anonymous,
+ }
+
+ r, err := spec.NewResource(rd)
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(r.RelPermalink(), qt.Equals, "/c/d/a/b.txt")
+
+ info := resources.GetTestInfoForResource(r)
+ c.Assert(info.Paths.TargetLink(), qt.Equals, "/c/d/a/b.txt")
+ c.Assert(info.Paths.TargetPath(), qt.Equals, "/e/f/a/b.txt")
+}
diff --git a/resources/resource_test.go b/resources/resource_test.go
deleted file mode 100644
index d6065c248..000000000
--- a/resources/resource_test.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package resources_test
-
-import (
- "testing"
-
- "github.com/gohugoio/hugo/resources"
-
- "github.com/gohugoio/hugo/media"
-
- qt "github.com/frankban/quicktest"
-)
-
-func TestNewResourceFromFilename(t *testing.T) {
- c := qt.New(t)
- spec := newTestResourceSpec(specDescriptor{c: c})
-
- writeSource(t, spec.Fs, "assets/a/b/logo.png", "image")
- writeSource(t, spec.Fs, "assets/a/b/data.json", "json")
-
- r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: "a/b/logo.png"})
-
- c.Assert(err, qt.IsNil)
- c.Assert(r, qt.Not(qt.IsNil))
- c.Assert(r.ResourceType(), qt.Equals, "image")
- c.Assert(r.RelPermalink(), qt.Equals, "/a/b/logo.png")
- c.Assert(r.Permalink(), qt.Equals, "https://example.com/a/b/logo.png")
-
- r, err = spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: "a/b/data.json"})
-
- c.Assert(err, qt.IsNil)
- c.Assert(r, qt.Not(qt.IsNil))
- c.Assert(r.ResourceType(), qt.Equals, "application")
-}
-
-var pngType, _ = media.FromStringAndExt("image/png", "png")
diff --git a/resources/resource_transformers/babel/babel.go b/resources/resource_transformers/babel/babel.go
index 2999d73cb..212331d8e 100644
--- a/resources/resource_transformers/babel/babel.go
+++ b/resources/resource_transformers/babel/babel.go
@@ -140,7 +140,7 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx
configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
if configFile == "" && t.options.Config != "" {
// Only fail if the user specified config file is not found.
- return fmt.Errorf("babel config %q not found:", configFile)
+ return fmt.Errorf("babel config %q not found", configFile)
}
}
@@ -177,7 +177,6 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx
// ARGA [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel812882892/babel.config.js --source-maps --filename=js/main2.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-2237820197.js]
// [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel332846848/babel.config.js --filename=js/main.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-1451390834.js 0x10304ee60 0x10304ed60 0x10304f060]
cmd, err := ex.Npx(binaryName, cmdArgs...)
-
if err != nil {
if hexec.IsNotFound(err) {
// This may be on a CI server etc. Will fall back to pre-built assets.
@@ -187,7 +186,6 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx
}
stdin, err := cmd.StdinPipe()
-
if err != nil {
return err
}
diff --git a/resources/resource_transformers/htesting/testhelpers.go b/resources/resource_transformers/htesting/testhelpers.go
index b1feccc5f..c9382b828 100644
--- a/resources/resource_transformers/htesting/testhelpers.go
+++ b/resources/resource_transformers/htesting/testhelpers.go
@@ -16,54 +16,25 @@ package htesting
import (
"path/filepath"
- "github.com/gohugoio/hugo/config"
- "github.com/gohugoio/hugo/config/testconfig"
- "github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources"
"github.com/spf13/afero"
)
-func NewTestResourceSpec() (*resources.Spec, error) {
- cfg := config.New()
-
- imagingCfg := map[string]any{
- "resampleFilter": "linear",
- "quality": 68,
- "anchor": "left",
- }
-
- cfg.Set("imaging", imagingCfg)
- afs := afero.NewMemMapFs()
-
- conf := testconfig.GetTestConfig(afs, cfg)
- fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(afs), conf.BaseConfig())
- s, err := helpers.NewPathSpec(fs, conf, nil)
- if err != nil {
- return nil, err
- }
-
- spec, err := resources.NewSpec(s, nil, nil, nil, nil, nil, nil)
- return spec, err
-}
-
-func NewResourceTransformer(filename, content string) (resources.ResourceTransformer, error) {
- spec, err := NewTestResourceSpec()
- if err != nil {
- return nil, err
- }
- return NewResourceTransformerForSpec(spec, filename, content)
-}
-
func NewResourceTransformerForSpec(spec *resources.Spec, filename, content string) (resources.ResourceTransformer, error) {
filename = filepath.FromSlash(filename)
fs := spec.Fs.Source
- if err := afero.WriteFile(fs, filename, []byte(content), 0777); err != nil {
+ if err := afero.WriteFile(fs, filename, []byte(content), 0o777); err != nil {
return nil, err
}
- r, err := spec.New(resources.ResourceSourceDescriptor{Fs: fs, SourceFilename: filename})
+ var open hugio.OpenReadSeekCloser = func() (hugio.ReadSeekCloser, error) {
+ return fs.Open(filename)
+ }
+
+ r, err := spec.NewResource(resources.ResourceSourceDescriptor{TargetPath: filepath.FromSlash(filename), OpenReadSeekCloser: open, GroupIdentity: identity.Anonymous})
if err != nil {
return nil, err
}
diff --git a/resources/resource_transformers/integrity/integrity.go b/resources/resource_transformers/integrity/integrity.go
index 63f4f4c76..aef744443 100644
--- a/resources/resource_transformers/integrity/integrity.go
+++ b/resources/resource_transformers/integrity/integrity.go
@@ -23,6 +23,7 @@ import (
"hash"
"io"
+ "github.com/gohugoio/hugo/common/constants"
"github.com/gohugoio/hugo/resources/internal"
"github.com/gohugoio/hugo/resources"
@@ -47,7 +48,7 @@ type fingerprintTransformation struct {
}
func (t *fingerprintTransformation) Key() internal.ResourceTransformationKey {
- return internal.NewResourceTransformationKey("fingerprint", t.algo)
+ return internal.NewResourceTransformationKey(constants.ResourceTransformationFingerprint, t.algo)
}
// Transform creates a MD5 hash of the Resource content and inserts that hash before
diff --git a/resources/resource_transformers/integrity/integrity_test.go b/resources/resource_transformers/integrity/integrity_test.go
index 27e193618..e0af68ae9 100644
--- a/resources/resource_transformers/integrity/integrity_test.go
+++ b/resources/resource_transformers/integrity/integrity_test.go
@@ -17,6 +17,7 @@ import (
"context"
"testing"
+ "github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/resources/resource"
qt "github.com/frankban/quicktest"
@@ -51,11 +52,12 @@ func TestHashFromAlgo(t *testing.T) {
func TestTransform(t *testing.T) {
c := qt.New(t)
- spec, err := htesting.NewTestResourceSpec()
- c.Assert(err, qt.IsNil)
- client := New(spec)
+ d := testconfig.GetTestDeps(nil, nil)
+ t.Cleanup(func() { c.Assert(d.Close(), qt.IsNil) })
+
+ client := New(d.ResourceSpec)
- r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.txt", "Hugo Rocks!")
+ r, err := htesting.NewResourceTransformerForSpec(d.ResourceSpec, "hugo.txt", "Hugo Rocks!")
c.Assert(err, qt.IsNil)
transformed, err := client.Fingerprint(r, "")
diff --git a/resources/resource_transformers/js/build.go b/resources/resource_transformers/js/build.go
index aa802d81e..cc68d2253 100644
--- a/resources/resource_transformers/js/build.go
+++ b/resources/resource_transformers/js/build.go
@@ -14,6 +14,7 @@
package js
import (
+ "errors"
"fmt"
"io"
"os"
@@ -22,8 +23,6 @@ import (
"regexp"
"strings"
- "errors"
-
"github.com/spf13/afero"
"github.com/gohugoio/hugo/hugofs"
@@ -93,7 +92,7 @@ func (t *buildTransformation) Transform(ctx *resources.ResourceTransformationCtx
return err
}
- buildOptions.Plugins, err = createBuildPlugins(t.c, opts)
+ buildOptions.Plugins, err = createBuildPlugins(ctx.DependencyManager, t.c, opts)
if err != nil {
return err
}
diff --git a/resources/resource_transformers/js/integration_test.go b/resources/resource_transformers/js/integration_test.go
index 0e311107b..304c51d33 100644
--- a/resources/resource_transformers/js/integration_test.go
+++ b/resources/resource_transformers/js/integration_test.go
@@ -29,6 +29,7 @@ func TestBuildVariants(t *testing.T) {
mainWithImport := `
-- config.toml --
disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT"]
+disableLiveReload = true
-- assets/js/main.js --
import { hello1, hello2 } from './util1';
hello1();
@@ -61,7 +62,7 @@ JS Content:{{ $js.Content }}:End:
b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build()
b.AssertFileContent("public/index.html", `abcd`)
- b.EditFileReplace("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build()
+ b.EditFileReplaceFunc("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build()
b.AssertFileContent("public/index.html", `1234`)
})
@@ -69,7 +70,7 @@ JS Content:{{ $js.Content }}:End:
b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build()
b.AssertFileContent("public/index.html", `efgh`)
- b.EditFileReplace("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build()
+ b.EditFileReplaceFunc("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build()
b.AssertFileContent("public/index.html", `1234`)
})
}
@@ -257,7 +258,6 @@ JS Content:{{ $js.Content }}:End:
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, `util1.js:4:17": No matching export in`)
})
-
}
// See issue 10527.
@@ -301,7 +301,6 @@ IMPORT_SRC_DIR:imp3/foo.ts
b.AssertFileContent("public/js/main.js", expected)
})
}
-
}
// See https://github.com/evanw/esbuild/issues/2745
@@ -342,7 +341,6 @@ License util2
Main license
`)
-
}
// Issue #11232
diff --git a/resources/resource_transformers/js/options.go b/resources/resource_transformers/js/options.go
index e9ffbabe4..df32e7012 100644
--- a/resources/resource_transformers/js/options.go
+++ b/resources/resource_transformers/js/options.go
@@ -21,11 +21,12 @@ import (
"strings"
"github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/identity"
"github.com/spf13/afero"
"github.com/evanw/esbuild/pkg/api"
- "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/media"
"github.com/mitchellh/mapstructure"
@@ -113,7 +114,7 @@ func decodeOptions(m map[string]any) (Options, error) {
}
if opts.TargetPath != "" {
- opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
+ opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath)
}
opts.Target = strings.ToLower(opts.Target)
@@ -203,7 +204,7 @@ func resolveComponentInAssets(fs afero.Fs, impPath string) *hugofs.FileMeta {
return m
}
-func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) {
+func createBuildPlugins(depsManager identity.Manager, c *Client, opts Options) ([]api.Plugin, error) {
fs := c.rs.Assets
resolveImport := func(args api.OnResolveArgs) (api.OnResolveResult, error) {
@@ -224,6 +225,7 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) {
// ESBuild resolve this.
return api.OnResolveResult{}, nil
}
+
relDir = filepath.Dir(rel)
} else {
relDir = opts.sourceDir
@@ -238,6 +240,8 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) {
m := resolveComponentInAssets(fs.Fs, impPath)
if m != nil {
+ depsManager.AddIdentity(m.PathInfo)
+
// Store the source root so we can create a jsconfig.json
// to help IntelliSense when the build is done.
// This should be a small number of elements, and when
diff --git a/resources/resource_transformers/js/options_test.go b/resources/resource_transformers/js/options_test.go
index a76a24caa..b8b031b81 100644
--- a/resources/resource_transformers/js/options_test.go
+++ b/resources/resource_transformers/js/options_test.go
@@ -14,10 +14,15 @@
package js
import (
+ "path"
"path/filepath"
"testing"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+ "github.com/gohugoio/hugo/hugolib/paths"
"github.com/gohugoio/hugo/media"
"github.com/spf13/afero"
@@ -164,20 +169,27 @@ func TestResolveComponentInAssets(t *testing.T) {
mfs := afero.NewMemMapFs()
for _, filename := range test.files {
- c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0o777), qt.IsNil)
}
- bfs := hugofs.DecorateBasePathFs(afero.NewBasePathFs(mfs, baseDir).(*afero.BasePathFs))
+ conf := testconfig.GetTestConfig(mfs, config.New())
+ fs := hugofs.NewFrom(mfs, conf.BaseConfig())
- got := resolveComponentInAssets(bfs, test.impPath)
+ p, err := paths.New(fs, conf)
+ c.Assert(err, qt.IsNil)
+ bfs, err := filesystems.NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+
+ got := resolveComponentInAssets(bfs.Assets.Fs, test.impPath)
gotPath := ""
+ expect := test.expect
if got != nil {
- gotPath = filepath.ToSlash(got.Path)
+ gotPath = filepath.ToSlash(got.Filename)
+ expect = path.Join(baseDir, test.expect)
}
- c.Assert(gotPath, qt.Equals, test.expect)
+ c.Assert(gotPath, qt.Equals, expect)
})
-
}
}
diff --git a/resources/resource_transformers/minifier/minify_test.go b/resources/resource_transformers/minifier/minify_test.go
index b2d8ed734..030abf426 100644
--- a/resources/resource_transformers/minifier/minify_test.go
+++ b/resources/resource_transformers/minifier/minify_test.go
@@ -17,6 +17,7 @@ import (
"context"
"testing"
+ "github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/resources/resource"
qt "github.com/frankban/quicktest"
@@ -26,11 +27,11 @@ import (
func TestTransform(t *testing.T) {
c := qt.New(t)
- spec, err := htesting.NewTestResourceSpec()
- c.Assert(err, qt.IsNil)
- client, _ := New(spec)
+ d := testconfig.GetTestDeps(nil, nil)
+ t.Cleanup(func() { c.Assert(d.Close(), qt.IsNil) })
- r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.html", "<h1> Hugo Rocks! </h1>")
+ client, _ := New(d.ResourceSpec)
+ r, err := htesting.NewResourceTransformerForSpec(d.ResourceSpec, "hugo.html", "<h1> Hugo Rocks! </h1>")
c.Assert(err, qt.IsNil)
transformed, err := client.Minify(r)
diff --git a/resources/resource_transformers/postcss/integration_test.go b/resources/resource_transformers/postcss/integration_test.go
index 74aaa2661..957e69403 100644
--- a/resources/resource_transformers/postcss/integration_test.go
+++ b/resources/resource_transformers/postcss/integration_test.go
@@ -139,7 +139,6 @@ Styles Content: Len: 770917|
b.AssertLogContains("Hugo PublishDir: " + filepath.Join(tempDir, "public"))
}
}
-
}
// 9880
@@ -149,7 +148,7 @@ func TestTransformPostCSSError(t *testing.T) {
}
if runtime.GOOS == "windows" {
- //TODO(bep) This has started to fail on Windows with Go 1.19 on GitHub Actions for some mysterious reason.
+ // TODO(bep) This has started to fail on Windows with Go 1.19 on GitHub Actions for some mysterious reason.
t.Skip("Skip on Windows")
}
@@ -165,7 +164,6 @@ func TestTransformPostCSSError(t *testing.T) {
s.AssertIsFileError(err)
c.Assert(err.Error(), qt.Contains, "a.css:4:2")
-
}
func TestTransformPostCSSNotInstalledError(t *testing.T) {
@@ -184,7 +182,6 @@ func TestTransformPostCSSNotInstalledError(t *testing.T) {
s.AssertIsFileError(err)
c.Assert(err.Error(), qt.Contains, `binary with name "npx" not found`)
-
}
// #9895
@@ -206,8 +203,7 @@ func TestTransformPostCSSImportError(t *testing.T) {
s.AssertIsFileError(err)
c.Assert(err.Error(), qt.Contains, "styles.css:4:3")
- c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "css/components/doesnotexist.css"`))
-
+ c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "/css/components/doesnotexist.css"`))
}
func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) {
@@ -230,7 +226,6 @@ func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) {
}).Build()
s.AssertFileContent("public/css/styles.css", `@import "components/doesnotexist.css";`)
-
}
// Issue 9787
@@ -267,5 +262,4 @@ Styles Content: Len: 770917
`)
}
-
}
diff --git a/resources/resource_transformers/postcss/postcss.go b/resources/resource_transformers/postcss/postcss.go
index a65fa3783..9015e120d 100644
--- a/resources/resource_transformers/postcss/postcss.go
+++ b/resources/resource_transformers/postcss/postcss.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -17,6 +17,7 @@ import (
"bytes"
"crypto/sha256"
"encoding/hex"
+ "errors"
"fmt"
"io"
"path"
@@ -30,6 +31,7 @@ import (
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/text"
"github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/common/hugo"
@@ -37,8 +39,6 @@ import (
"github.com/spf13/afero"
"github.com/spf13/cast"
- "errors"
-
"github.com/mitchellh/mapstructure"
"github.com/gohugoio/hugo/common/herrors"
@@ -86,7 +86,6 @@ func (c *Client) Process(res resources.ResourceTransformer, options map[string]a
// Some of the options from https://github.com/postcss/postcss-cli
type Options struct {
-
// Set a custom path to look for a config file.
Config string
@@ -151,7 +150,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC
const binaryName = "postcss"
infol := t.rs.Logger.InfoCommand(binaryName)
- infoW := loggers.LevelLoggerToWriter(infol)
+ infow := loggers.LevelLoggerToWriter(infol)
ex := t.rs.ExecHelper
@@ -179,7 +178,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC
configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
if configFile == "" && options.Config != "" {
// Only fail if the user specified config file is not found.
- return fmt.Errorf("postcss config %q not found:", options.Config)
+ return fmt.Errorf("postcss config %q not found", options.Config)
}
}
@@ -196,7 +195,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC
var errBuf bytes.Buffer
- stderr := io.MultiWriter(infoW, &errBuf)
+ stderr := io.MultiWriter(infow, &errBuf)
cmdArgs = append(cmdArgs, hexec.WithStderr(stderr))
cmdArgs = append(cmdArgs, hexec.WithStdout(ctx.To))
cmdArgs = append(cmdArgs, hexec.WithEnviron(hugo.GetExecEnviron(t.rs.Cfg.BaseConfig().WorkingDir, t.rs.Cfg, t.rs.BaseFs.Assets.Fs)))
@@ -221,7 +220,7 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC
ctx.From,
ctx.InPath,
options,
- t.rs.Assets.Fs, t.rs.Logger,
+ t.rs.Assets.Fs, t.rs.Logger, ctx.DependencyManager,
)
if options.InlineImports {
@@ -260,17 +259,19 @@ type importResolver struct {
inPath string
opts Options
- contentSeen map[string]bool
- linemap map[int]fileOffset
- fs afero.Fs
- logger loggers.Logger
+ contentSeen map[string]bool
+ dependencyManager identity.Manager
+ linemap map[int]fileOffset
+ fs afero.Fs
+ logger loggers.Logger
}
-func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger) *importResolver {
+func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger, dependencyManager identity.Manager) *importResolver {
return &importResolver{
- r: r,
- inPath: inPath,
- fs: fs, logger: logger,
+ r: r,
+ dependencyManager: dependencyManager,
+ inPath: inPath,
+ fs: fs, logger: logger,
linemap: make(map[int]fileOffset), contentSeen: make(map[string]bool),
opts: opts,
}
@@ -289,7 +290,8 @@ func (imp *importResolver) contentHash(filename string) ([]byte, string) {
func (imp *importResolver) importRecursive(
lineNum int,
content string,
- inPath string) (int, string, error) {
+ inPath string,
+) (int, string, error) {
basePath := path.Dir(inPath)
var replacements []string
@@ -312,6 +314,7 @@ func (imp *importResolver) importRecursive(
} else {
path := strings.Trim(strings.TrimPrefix(line, importIdentifier), " \"';")
filename := filepath.Join(basePath, path)
+ imp.dependencyManager.AddIdentity(identity.CleanStringIdentity(filename))
importContent, hash := imp.contentHash(filename)
if importContent == nil {
@@ -364,8 +367,6 @@ func (imp *importResolver) importRecursive(
}
func (imp *importResolver) resolve() (io.Reader, error) {
- const importIdentifier = "@import"
-
content, err := io.ReadAll(imp.r)
if err != nil {
return nil, err
@@ -438,6 +439,5 @@ func (imp *importResolver) toFileError(output string) error {
pos.LineNumber = file.Offset + 1
return ferr.UpdatePosition(pos).UpdateContent(f, nil)
- //return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher)
-
+ // return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher)
}
diff --git a/resources/resource_transformers/postcss/postcss_test.go b/resources/resource_transformers/postcss/postcss_test.go
index dd0695cd1..1edaaaaf5 100644
--- a/resources/resource_transformers/postcss/postcss_test.go
+++ b/resources/resource_transformers/postcss/postcss_test.go
@@ -20,6 +20,7 @@ import (
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/htesting/hqt"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/helpers"
@@ -71,7 +72,7 @@ func TestImportResolver(t *testing.T) {
fs := afero.NewMemMapFs()
writeFile := func(name, content string) {
- c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, name, []byte(content), 0o777), qt.IsNil)
}
writeFile("a.css", `@import "b.css";
@@ -96,6 +97,7 @@ LOCAL_STYLE
"styles.css",
Options{},
fs, loggers.NewDefault(),
+ identity.NopManager,
)
r, err := imp.resolve()
@@ -123,7 +125,7 @@ func BenchmarkImportResolver(b *testing.B) {
fs := afero.NewMemMapFs()
writeFile := func(name, content string) {
- c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, name, []byte(content), 0o777), qt.IsNil)
}
writeFile("a.css", `@import "b.css";
@@ -153,6 +155,7 @@ LOCAL_STYLE
"styles.css",
Options{},
fs, logger,
+ identity.NopManager,
)
b.StartTimer()
diff --git a/resources/resource_transformers/templates/execute_as_template.go b/resources/resource_transformers/templates/execute_as_template.go
index efe3e4c57..79d249bd6 100644
--- a/resources/resource_transformers/templates/execute_as_template.go
+++ b/resources/resource_transformers/templates/execute_as_template.go
@@ -18,6 +18,7 @@ import (
"context"
"fmt"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/resources/internal"
@@ -68,7 +69,7 @@ func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransforma
func (c *Client) ExecuteAsTemplate(ctx context.Context, res resources.ResourceTransformer, targetPath string, data any) (resource.Resource, error) {
return res.TransformWithContext(ctx, &executeAsTemplateTransform{
rs: c.rs,
- targetPath: helpers.ToSlashTrimLeading(targetPath),
+ targetPath: paths.ToSlashTrimLeading(targetPath),
t: c.t,
data: data,
})
diff --git a/resources/resource_transformers/tocss/dartsass/client.go b/resources/resource_transformers/tocss/dartsass/client.go
index 929900ca8..4b8ca97eb 100644
--- a/resources/resource_transformers/tocss/dartsass/client.go
+++ b/resources/resource_transformers/tocss/dartsass/client.go
@@ -25,6 +25,7 @@ import (
"github.com/bep/logg"
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugolib/filesystems"
@@ -78,7 +79,6 @@ func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error)
}
},
})
-
} else {
transpilerv1, err = godartsassv1.Start(godartsassv1.Options{
DartSassEmbeddedFilename: hugo.DartSassBinaryName,
@@ -153,11 +153,11 @@ func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result,
}
} else {
res, err = c.transpiler.Execute(args)
-
}
if err != nil {
if err.Error() == "unexpected EOF" {
+ //lint:ignore ST1005 end user message.
return res, fmt.Errorf("got unexpected EOF when executing %q. The user running hugo must have read and execute permissions on this program. With execute permissions only, this error is thrown.", hugo.DartSassBinaryName)
}
return res, herrors.NewFileErrorFromFileInErr(err, hugofs.Os, herrors.OffsetMatcher)
@@ -167,7 +167,6 @@ func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result,
}
type Options struct {
-
// Hugo, will by default, just replace the extension of the source
// to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can
// control this by setting this, e.g. "styles/main.css" will create
@@ -204,7 +203,7 @@ func decodeOptions(m map[string]any) (opts Options, err error) {
err = mapstructure.WeakDecode(m, &opts)
if opts.TargetPath != "" {
- opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
+ opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath)
}
return
diff --git a/resources/resource_transformers/tocss/dartsass/transform.go b/resources/resource_transformers/tocss/dartsass/transform.go
index 32855e1c5..73eca6a53 100644
--- a/resources/resource_transformers/tocss/dartsass/transform.go
+++ b/resources/resource_transformers/tocss/dartsass/transform.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -23,6 +23,7 @@ import (
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources"
@@ -80,8 +81,9 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error {
URL: filename,
IncludePaths: t.c.sfs.RealDirs(baseDir),
ImportResolver: importResolver{
- baseDir: baseDir,
- c: t.c,
+ baseDir: baseDir,
+ c: t.c,
+ dependencyManager: ctx.DependencyManager,
varsStylesheet: godartsass.Import{Content: sass.CreateVarsStyleSheet(opts.Vars)},
},
@@ -126,10 +128,10 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error {
}
type importResolver struct {
- baseDir string
- c *Client
-
- varsStylesheet godartsass.Import
+ baseDir string
+ c *Client
+ dependencyManager identity.Manager
+ varsStylesheet godartsass.Import
}
func (t importResolver) CanonicalizeURL(url string) (string, error) {
@@ -172,6 +174,7 @@ func (t importResolver) CanonicalizeURL(url string) (string, error) {
fi, err := t.c.sfs.Fs.Stat(filenameToCheck)
if err == nil {
if fim, ok := fi.(hugofs.FileMetaInfo); ok {
+ t.dependencyManager.AddIdentity(identity.CleanStringIdentity(filenameToCheck))
return "file://" + filepath.ToSlash(fim.Meta().Filename), nil
}
}
@@ -196,7 +199,6 @@ func (t importResolver) Load(url string) (godartsass.Import, error) {
}
return godartsass.Import{Content: string(b), SourceSyntax: sourceSyntax}, err
-
}
type importResolverV1 struct {
diff --git a/resources/resource_transformers/tocss/internal/sass/helpers.go b/resources/resource_transformers/tocss/internal/sass/helpers.go
index acd6d86d5..c1cef141e 100644
--- a/resources/resource_transformers/tocss/internal/sass/helpers.go
+++ b/resources/resource_transformers/tocss/internal/sass/helpers.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -56,7 +56,6 @@ func CreateVarsStyleSheet(vars map[string]any) string {
sort.Strings(varsSlice)
varsStylesheet = strings.Join(varsSlice, "\n")
return varsStylesheet
-
}
var (
diff --git a/resources/resource_transformers/tocss/internal/sass/helpers_test.go b/resources/resource_transformers/tocss/internal/sass/helpers_test.go
index 56e73736e..ef31fdd8f 100644
--- a/resources/resource_transformers/tocss/internal/sass/helpers_test.go
+++ b/resources/resource_transformers/tocss/internal/sass/helpers_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -40,5 +40,4 @@ func TestIsUnquotedCSSValue(t *testing.T) {
} {
c.Assert(isTypedCSSValue(test.in), qt.Equals, test.out)
}
-
}
diff --git a/resources/resource_transformers/tocss/scss/client.go b/resources/resource_transformers/tocss/scss/client.go
index 2028163ff..aead6279b 100644
--- a/resources/resource_transformers/tocss/scss/client.go
+++ b/resources/resource_transformers/tocss/scss/client.go
@@ -16,7 +16,7 @@ package scss
import (
"regexp"
- "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/hugolib/filesystems"
"github.com/gohugoio/hugo/resources"
"github.com/spf13/afero"
@@ -37,7 +37,6 @@ func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error)
}
type Options struct {
-
// Hugo, will by default, just replace the extension of the source
// to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can
// control this by setting this, e.g. "styles/main.css" will create
@@ -73,7 +72,7 @@ func DecodeOptions(m map[string]any) (opts Options, err error) {
err = mapstructure.WeakDecode(m, &opts)
if opts.TargetPath != "" {
- opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
+ opts.TargetPath = paths.ToSlashTrimLeading(opts.TargetPath)
}
return
diff --git a/resources/resource_transformers/tocss/scss/tocss.go b/resources/resource_transformers/tocss/scss/tocss.go
index 1018ea02e..a4c4e6d8e 100644
--- a/resources/resource_transformers/tocss/scss/tocss.go
+++ b/resources/resource_transformers/tocss/scss/tocss.go
@@ -20,7 +20,6 @@ import (
"fmt"
"io"
"path"
-
"path/filepath"
"strings"
@@ -29,6 +28,7 @@ import (
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/resources/resource_transformers/tocss/internal/sass"
@@ -115,6 +115,7 @@ func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx
fi, err := t.c.sfs.Fs.Stat(filenameToCheck)
if err == nil {
if fim, ok := fi.(hugofs.FileMetaInfo); ok {
+ ctx.DependencyManager.AddIdentity(identity.CleanStringIdentity(filenameToCheck))
return fim.Meta().Filename, "", true
}
}
diff --git a/resources/testhelpers_test.go b/resources/testhelpers_test.go
index 1de2f54f6..028524619 100644
--- a/resources/testhelpers_test.go
+++ b/resources/testhelpers_test.go
@@ -2,23 +2,21 @@ package resources_test
import (
"image"
- "io"
"os"
"path/filepath"
"runtime"
"strings"
- "testing"
+ "github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources"
qt "github.com/frankban/quicktest"
- "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/images"
- "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/afero"
)
@@ -44,7 +42,7 @@ func newTestResourceSpec(desc specDescriptor) *resources.Spec {
panic("osFs not supported for this test")
}
- if err := afs.MkdirAll("assets", 0755); err != nil {
+ if err := afs.MkdirAll("assets", 0o755); err != nil {
panic(err)
}
@@ -64,16 +62,13 @@ func newTestResourceSpec(desc specDescriptor) *resources.Spec {
func(d *deps.Deps) { d.Fs.PublishDir = hugofs.NewCreateCountingFs(d.Fs.PublishDir) },
)
- return d.ResourceSpec
-}
-
-func newTargetPaths(link string) func() page.TargetPaths {
- return func() page.TargetPaths {
- return page.TargetPaths{
- SubResourceBaseTarget: filepath.FromSlash(link),
- SubResourceBaseLink: link,
+ desc.c.Cleanup(func() {
+ if err := d.Close(); err != nil {
+ panic(err)
}
- }
+ })
+
+ return d.ResourceSpec
}
func newTestResourceOsFs(c *qt.C) (*resources.Spec, string) {
@@ -92,7 +87,7 @@ func newTestResourceOsFs(c *qt.C) (*resources.Spec, string) {
cfg.Set("workingDir", workDir)
- os.MkdirAll(filepath.Join(workDir, "assets"), 0755)
+ os.MkdirAll(filepath.Join(workDir, "assets"), 0o755)
d := testconfig.GetTestDeps(hugofs.Os, cfg)
@@ -116,22 +111,16 @@ func fetchImageForSpec(spec *resources.Spec, c *qt.C, name string) images.ImageR
}
func fetchResourceForSpec(spec *resources.Spec, c *qt.C, name string, targetPathAddends ...string) resource.ContentResource {
- src, err := os.Open(filepath.FromSlash("testdata/" + name))
- c.Assert(err, qt.IsNil)
- if len(targetPathAddends) > 0 {
- addends := strings.Join(targetPathAddends, "_")
- name = addends + "_" + name
- }
- out, err := helpers.OpenFileForWriting(spec.Fs.WorkingDirWritable, filepath.Join(filepath.Join("assets", name)))
- c.Assert(err, qt.IsNil)
- _, err = io.Copy(out, src)
- out.Close()
- src.Close()
+ b, err := os.ReadFile(filepath.FromSlash("testdata/" + name))
c.Assert(err, qt.IsNil)
-
- factory := newTargetPaths("/a")
-
- r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, TargetPaths: factory, LazyPublish: true, RelTargetFilename: name, SourceFilename: name})
+ open := hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromBytes(b))
+ targetPath := name
+ base := "/a/"
+ r, err := spec.NewResource(resources.ResourceSourceDescriptor{
+ LazyPublish: true,
+ Name: name, TargetPath: targetPath, BasePathRelPermalink: base, BasePathTargetPath: base, OpenReadSeekCloser: open,
+ GroupIdentity: identity.Anonymous,
+ })
c.Assert(err, qt.IsNil)
c.Assert(r, qt.Not(qt.IsNil))
@@ -150,17 +139,3 @@ func assertImageFile(c *qt.C, fs afero.Fs, filename string, width, height int) {
c.Assert(config.Width, qt.Equals, width)
c.Assert(config.Height, qt.Equals, height)
}
-
-func assertFileCache(c *qt.C, fs afero.Fs, filename string, width, height int) {
- assertImageFile(c, fs, filepath.Clean(filename), width, height)
-}
-
-func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
- writeToFs(t, fs.Source, filename, content)
-}
-
-func writeToFs(t testing.TB, fs afero.Fs, filename, content string) {
- if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
- t.Fatalf("Failed to write file: %s", err)
- }
-}
diff --git a/resources/transform.go b/resources/transform.go
index 0c38345ad..408decbb8 100644
--- a/resources/transform.go
+++ b/resources/transform.go
@@ -23,7 +23,9 @@ import (
"strings"
"sync"
+ "github.com/gohugoio/hugo/common/constants"
"github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources/images"
"github.com/gohugoio/hugo/resources/images/exif"
@@ -42,13 +44,18 @@ import (
)
var (
- _ resource.ContentResource = (*resourceAdapter)(nil)
- _ resourceCopier = (*resourceAdapter)(nil)
- _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil)
- _ resource.Resource = (*resourceAdapter)(nil)
- _ resource.Source = (*resourceAdapter)(nil)
- _ resource.Identifier = (*resourceAdapter)(nil)
- _ resource.ResourceMetaProvider = (*resourceAdapter)(nil)
+ _ resource.ContentResource = (*resourceAdapter)(nil)
+ _ resourceCopier = (*resourceAdapter)(nil)
+ _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil)
+ _ resource.Resource = (*resourceAdapter)(nil)
+ _ resource.Staler = (*resourceAdapterInner)(nil)
+ _ resource.Source = (*resourceAdapter)(nil)
+ _ resource.Identifier = (*resourceAdapter)(nil)
+ _ resource.ResourceNameTitleProvider = (*resourceAdapter)(nil)
+ _ resource.WithResourceMetaProvider = (*resourceAdapter)(nil)
+ _ identity.DependencyManagerProvider = (*resourceAdapter)(nil)
+ _ identity.IdentityGroupProvider = (*resourceAdapter)(nil)
+ _ resource.NameOriginalProvider = (*resourceAdapter)(nil)
)
// These are transformations that need special support in Hugo that may not
@@ -68,11 +75,13 @@ func newResourceAdapter(spec *Spec, lazyPublish bool, target transformableResour
}
return &resourceAdapter{
resourceTransformations: &resourceTransformations{},
+ metaProvider: target,
resourceAdapterInner: &resourceAdapterInner{
- ctx: context.TODO(),
+ ctx: context.Background(),
spec: spec,
publishOnce: po,
target: target,
+ Staler: &AtomicStaler{},
},
}
}
@@ -88,6 +97,9 @@ type ResourceTransformationCtx struct {
// The context that started the transformation.
Ctx context.Context
+ // The dependency manager to use for dependency tracking.
+ DependencyManager identity.Manager
+
// The content to transform.
From io.Reader
@@ -162,8 +174,11 @@ type resourceAdapter struct {
commonResource
*resourceTransformations
*resourceAdapterInner
+ metaProvider resource.ResourceMetaProvider
}
+var _ identity.ForEeachIdentityByNameProvider = (*resourceAdapter)(nil)
+
func (r *resourceAdapter) Content(ctx context.Context) (any, error) {
r.init(false, true)
if r.transformationsErr != nil {
@@ -176,16 +191,41 @@ func (r *resourceAdapter) Err() resource.ResourceError {
return nil
}
+func (r *resourceAdapter) GetIdentity() identity.Identity {
+ return identity.FirstIdentity(r.target)
+}
+
func (r *resourceAdapter) Data() any {
r.init(false, false)
return r.target.Data()
}
+func (r *resourceAdapter) ForEeachIdentityByName(name string, f func(identity.Identity) bool) {
+ if constants.IsFieldRelOrPermalink(name) && !r.resourceTransformations.hasTransformationPermalinkHash() {
+ // Special case for links without any content hash in the URL.
+ // We don't need to rebuild all pages that use this resource,
+ // but we want to make sure that the resource is accessed at least once.
+ f(identity.NewFindFirstManagerIdentityProvider(r.target.GetDependencyManager(), r.target.GetIdentityGroup()))
+ return
+ }
+ f(r.target.GetIdentityGroup())
+ f(r.target.GetDependencyManager())
+}
+
+func (r *resourceAdapter) GetIdentityGroup() identity.Identity {
+ return r.target.GetIdentityGroup()
+}
+
+func (r *resourceAdapter) GetDependencyManager() identity.Manager {
+ return r.target.GetDependencyManager()
+}
+
func (r resourceAdapter) cloneTo(targetPath string) resource.Resource {
newtTarget := r.target.cloneTo(targetPath)
newInner := &resourceAdapterInner{
ctx: r.ctx,
spec: r.spec,
+ Staler: r.Staler,
target: newtTarget.(transformableResource),
}
if r.resourceAdapterInner.publishOnce != nil {
@@ -239,12 +279,17 @@ func (r *resourceAdapter) MediaType() media.Type {
func (r *resourceAdapter) Name() string {
r.init(false, false)
- return r.target.Name()
+ return r.metaProvider.Name()
+}
+
+func (r *resourceAdapter) NameOriginal() string {
+ r.init(false, false)
+ return r.target.(resource.NameOriginalProvider).NameOriginal()
}
func (r *resourceAdapter) Params() maps.Params {
r.init(false, false)
- return r.target.Params()
+ return r.metaProvider.Params()
}
func (r *resourceAdapter) Permalink() string {
@@ -283,7 +328,7 @@ func (r *resourceAdapter) String() string {
func (r *resourceAdapter) Title() string {
r.init(false, false)
- return r.target.Title()
+ return r.metaProvider.Title()
}
func (r resourceAdapter) Transform(t ...ResourceTransformation) (ResourceTransformer, error) {
@@ -298,6 +343,7 @@ func (r resourceAdapter) TransformWithContext(ctx context.Context, t ...Resource
r.resourceAdapterInner = &resourceAdapterInner{
ctx: ctx,
spec: r.spec,
+ Staler: r.Staler,
publishOnce: &publishOnce{},
target: r.target,
}
@@ -313,6 +359,11 @@ func (r *resourceAdapter) DecodeImage() (image.Image, error) {
return r.getImageOps().DecodeImage()
}
+func (r resourceAdapter) WithResourceMeta(mp resource.ResourceMetaProvider) resource.Resource {
+ r.metaProvider = mp
+ return &r
+}
+
func (r *resourceAdapter) getImageOps() images.ImageResourceOps {
img, ok := r.target.(images.ImageResourceOps)
if !ok {
@@ -326,14 +377,6 @@ func (r *resourceAdapter) getImageOps() images.ImageResourceOps {
return img
}
-func (r *resourceAdapter) getMetaAssigner() metaAssigner {
- return r.target
-}
-
-func (r *resourceAdapter) getSpec() *Spec {
- return r.spec
-}
-
func (r *resourceAdapter) publish() {
if r.publishOnce == nil {
return
@@ -349,41 +392,28 @@ func (r *resourceAdapter) publish() {
}
func (r *resourceAdapter) TransformationKey() string {
- // Files with a suffix will be stored in cache (both on disk and in memory)
- // partitioned by their suffix.
var key string
for _, tr := range r.transformations {
key = key + "_" + tr.Key().Value()
}
-
- base := ResourceCacheKey(r.target.Key())
- return r.spec.ResourceCache.cleanKey(base) + "_" + helpers.MD5String(key)
+ return r.spec.ResourceCache.cleanKey(r.target.Key()) + "_" + helpers.MD5String(key)
}
-func (r *resourceAdapter) transform(publish, setContent bool) error {
- cache := r.spec.ResourceCache
-
+func (r *resourceAdapter) getOrTransform(publish, setContent bool) error {
key := r.TransformationKey()
-
- cached, found := cache.get(key)
-
- if found {
- r.resourceAdapterInner = cached.(*resourceAdapterInner)
- return nil
+ res, err := r.spec.ResourceCache.cacheResourceTransformation.GetOrCreate(key, func(string) (*resourceAdapterInner, error) {
+ return r.transform(key, publish, setContent)
+ })
+ if err != nil {
+ return err
}
- // Acquire a write lock for the named transformation.
- cache.nlocker.Lock(key)
- // Check the cache again.
- cached, found = cache.get(key)
- if found {
- r.resourceAdapterInner = cached.(*resourceAdapterInner)
- cache.nlocker.Unlock(key)
- return nil
- }
+ r.resourceAdapterInner = res
+ return nil
+}
- defer cache.nlocker.Unlock(key)
- defer cache.set(key, r.resourceAdapterInner)
+func (r *resourceAdapter) transform(key string, publish, setContent bool) (*resourceAdapterInner, error) {
+ cache := r.spec.ResourceCache
b1 := bp.GetBuffer()
b2 := bp.GetBuffer()
@@ -394,6 +424,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
Ctx: r.ctx,
Data: make(map[string]any),
OpenResourcePublisher: r.target.openPublishFileForWriting,
+ DependencyManager: r.target.GetDependencyManager(),
}
tctx.InMediaType = r.target.MediaType()
@@ -406,7 +437,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
contentrc, err := contentReadSeekerCloser(r.target)
if err != nil {
- return err
+ return nil, err
}
defer contentrc.Close()
@@ -479,14 +510,14 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
} else {
err = tr.Transform(tctx)
if err != nil && err != herrors.ErrFeatureNotAvailable {
- return newErr(err)
+ return nil, newErr(err)
}
if mayBeCachedOnDisk {
tryFileCache = bcfg.UseResourceCache(err)
}
if err != nil && !tryFileCache {
- return newErr(err)
+ return nil, newErr(err)
}
}
@@ -494,9 +525,9 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
f := r.target.tryTransformedFileCache(key, updates)
if f == nil {
if err != nil {
- return newErr(err)
+ return nil, newErr(err)
}
- return newErr(fmt.Errorf("resource %q not found in file cache", key))
+ return nil, newErr(fmt.Errorf("resource %q not found in file cache", key))
}
transformedContentr = f
updates.sourceFs = cache.fileCache.Fs
@@ -521,7 +552,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
if publish {
publicw, err := r.target.openPublishFileForWriting(updates.targetPath)
if err != nil {
- return err
+ return nil, err
}
publishwriters = append(publishwriters, publicw)
}
@@ -531,7 +562,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
// Also write it to the cache
fi, metaw, err := cache.writeMeta(key, updates.toTransformedResourceMetadata())
if err != nil {
- return err
+ return nil, err
}
updates.sourceFilename = &fi.Name
updates.sourceFs = cache.fileCache.Fs
@@ -562,7 +593,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
publishw := hugio.NewMultiWriteCloser(publishwriters...)
_, err = io.Copy(publishw, transformedContentr)
if err != nil {
- return err
+ return nil, err
}
publishw.Close()
@@ -573,11 +604,11 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
newTarget, err := r.target.cloneWithUpdates(updates)
if err != nil {
- return err
+ return nil, err
}
r.target = newTarget
- return nil
+ return r.resourceAdapterInner, nil
}
func (r *resourceAdapter) init(publish, setContent bool) {
@@ -597,7 +628,7 @@ func (r *resourceAdapter) initTransform(publish, setContent bool) {
r.publishOnce = nil
}
- r.transformationsErr = r.transform(publish, setContent)
+ r.transformationsErr = r.getOrTransform(publish, setContent)
if r.transformationsErr != nil {
if r.spec.ErrorSender != nil {
r.spec.ErrorSender.SendError(r.transformationsErr)
@@ -618,24 +649,42 @@ type resourceAdapterInner struct {
target transformableResource
+ resource.Staler
+
spec *Spec
// Handles publishing (to /public) if needed.
*publishOnce
}
+func (r *resourceAdapterInner) IsStale() bool {
+ return r.Staler.IsStale() || r.target.IsStale()
+}
+
type resourceTransformations struct {
transformationsInit sync.Once
transformationsErr error
transformations []ResourceTransformation
}
+// hasTransformationPermalinkHash reports whether any of the transformations
+// in the chain creates a permalink that's based on the content, e.g. fingerprint.
+func (r *resourceTransformations) hasTransformationPermalinkHash() bool {
+ for _, t := range r.transformations {
+ if constants.IsResourceTransformationPermalinkHash(t.Key().Name) {
+ return true
+ }
+ }
+ return false
+}
+
type transformableResource interface {
baseResourceInternal
resource.ContentProvider
resource.Resource
resource.Identifier
+ resource.Staler
resourceCopier
}
diff --git a/resources/transform_integration_test.go b/resources/transform_integration_test.go
new file mode 100644
index 000000000..4404f1642
--- /dev/null
+++ b/resources/transform_integration_test.go
@@ -0,0 +1,50 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestTransformCached(t *testing.T) {
+ files := `
+-- hugo.toml --
+disableKinds = ["taxonomy", "term"]
+-- assets/css/main.css --
+body {
+ background: #fff;
+}
+-- content/p1.md --
+---
+title: "P1"
+---
+P1.
+-- content/p2.md --
+---
+title: "P2"
+---
+P2.
+-- layouts/_default/list.html --
+List.
+-- layouts/_default/single.html --
+{{ $css := resources.Get "css/main.css" | resources.Minify }}
+CSS: {{ $css.Content }}
+`
+
+ b := hugolib.Test(t, files)
+
+ b.AssertFileContent("public/p1/index.html", "CSS: body{background:#fff}")
+}
diff --git a/resources/transform_test.go b/resources/transform_test.go
index d430bfb6c..fd152a47c 100644
--- a/resources/transform_test.go
+++ b/resources/transform_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -25,10 +25,12 @@ import (
"testing"
"github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/images"
@@ -47,12 +49,13 @@ const gopher = `iVBORw0KGgoAAAANSUhEUgAAAEsAAAA8CAAAAAALAhhPAAAFfUlEQVRYw62XeWwU
func gopherPNG() io.Reader { return base64.NewDecoder(base64.StdEncoding, strings.NewReader(gopher)) }
func TestTransform(t *testing.T) {
-
createTransformer := func(c *qt.C, spec *resources.Spec, filename, content string) resources.Transformer {
- filename = filepath.FromSlash(filename)
- err := afero.WriteFile(spec.Fs.Source, filepath.Join("assets", filename), []byte(content), 0777)
- c.Assert(err, qt.IsNil)
- r, err := spec.New(resources.ResourceSourceDescriptor{Fs: spec.BaseFs.Assets.Fs, SourceFilename: filename})
+ targetPath := identity.CleanString(filename)
+ r, err := spec.NewResource(resources.ResourceSourceDescriptor{
+ TargetPath: targetPath,
+ OpenReadSeekCloser: hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString(content)),
+ GroupIdentity: identity.StringIdentity(targetPath),
+ })
c.Assert(err, qt.IsNil)
c.Assert(r, qt.Not(qt.IsNil), qt.Commentf(filename))
return r.(resources.Transformer)
@@ -310,8 +313,10 @@ func TestTransform(t *testing.T) {
r := createTransformer(c, spec, "f1.txt", "color is blue")
- tr1, _ := r.Transform(t1)
- tr2, _ := tr1.Transform(t2)
+ tr1, err := r.Transform(t1)
+ c.Assert(err, qt.IsNil)
+ tr2, err := tr1.Transform(t2)
+ c.Assert(err, qt.IsNil)
content1, err := tr1.(resource.ContentProvider).Content(context.Background())
c.Assert(err, qt.IsNil)