summaryrefslogtreecommitdiffstats
path: root/hugolib/content_map.go
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2023-12-24 19:11:05 +0100
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2024-01-27 16:28:14 +0100
commit7285e74090852b5d52f25e577850fa75f4aa8573 (patch)
tree54d07cb4a7de2db5c89f2590266595f0aca6cbd6 /hugolib/content_map.go
parent5fd1e7490305570872d3899f5edda950903c5213 (diff)
downloadhugo-develop2024.tar.gz
hugo-develop2024.zip
all: Rework page store, add a dynacache, improve partial rebuilds, and some general spring cleaningdevelop2024
There are some breaking changes in this commit, see #11455. Closes #11455 Closes #11549 This fixes a set of bugs (see issue list) and it is also paying some technical debt accumulated over the years. We now build with Staticcheck enabled in the CI build. The performance should be about the same as before for regular sized Hugo sites, but it should perform and scale much better to larger data sets, as objects that uses lots of memory (e.g. rendered Markdown, big JSON files read into maps with transform.Unmarshal etc.) will now get automatically garbage collected if needed. Performance on partial rebuilds when running the server in fast render mode should be the same, but the change detection should be much more accurate. A list of the notable new features: * A new dependency tracker that covers (almost) all of Hugo's API and is used to do fine grained partial rebuilds when running the server. * A new and simpler tree document store which allows fast lookups and prefix-walking in all dimensions (e.g. language) concurrently. * You can now configure an upper memory limit allowing for much larger data sets and/or running on lower specced PCs. We have lifted the "no resources in sub folders" restriction for branch bundles (e.g. sections). Memory Limit * Hugos will, by default, set aside a quarter of the total system memory, but you can set this via the OS environment variable HUGO_MEMORYLIMIT (in gigabytes). This is backed by a partitioned LRU cache used throughout Hugo. A cache that gets dynamically resized in low memory situations, allowing Go's Garbage Collector to free the memory. New Dependency Tracker: Hugo has had a rule based coarse grained approach to server rebuilds that has worked mostly pretty well, but there have been some surprises (e.g. stale content). This is now revamped with a new dependency tracker that can quickly calculate the delta given a changed resource (e.g. a content file, template, JS file etc.). This handles transitive relations, e.g. $page -> js.Build -> JS import, or $page1.Content -> render hook -> site.GetPage -> $page2.Title, or $page1.Content -> shortcode -> partial -> site.RegularPages -> $page2.Content -> shortcode ..., and should also handle changes to aggregated values (e.g. site.Lastmod) effectively. This covers all of Hugo's API with 2 known exceptions (a list that may not be fully exhaustive): Changes to files loaded with template func os.ReadFile may not be handled correctly. We recommend loading resources with resources.Get Changes to Hugo objects (e.g. Page) passed in the template context to lang.Translate may not be detected correctly. We recommend having simple i18n templates without too much data context passed in other than simple types such as strings and numbers. Note that the cachebuster configuration (when A changes then rebuild B) works well with the above, but we recommend that you revise that configuration, as it in most situations should not be needed. One example where it is still needed is with TailwindCSS and using changes to hugo_stats.json to trigger new CSS rebuilds. Document Store: Previously, a little simplified, we split the document store (where we store pages and resources) in a tree per language. This worked pretty well, but the structure made some operations harder than they needed to be. We have now restructured it into one Radix tree for all languages. Internally the language is considered to be a dimension of that tree, and the tree can be viewed in all dimensions concurrently. This makes some operations re. language simpler (e.g. finding translations is just a slice range), but the idea is that it should also be relatively inexpensive to add more dimensions if needed (e.g. role). Fixes #10169 Fixes #10364 Fixes #10482 Fixes #10630 Fixes #10656 Fixes #10694 Fixes #10918 Fixes #11262 Fixes #11439 Fixes #11453 Fixes #11457 Fixes #11466 Fixes #11540 Fixes #11551 Fixes #11556 Fixes #11654 Fixes #11661 Fixes #11663 Fixes #11664 Fixes #11669 Fixes #11671 Fixes #11807 Fixes #11808 Fixes #11809 Fixes #11815 Fixes #11840 Fixes #11853 Fixes #11860 Fixes #11883 Fixes #11904 Fixes #7388 Fixes #7425 Fixes #7436 Fixes #7544 Fixes #7882 Fixes #7960 Fixes #8255 Fixes #8307 Fixes #8863 Fixes #8927 Fixes #9192 Fixes #9324
Diffstat (limited to 'hugolib/content_map.go')
-rw-r--r--hugolib/content_map.go1107
1 files changed, 154 insertions, 953 deletions
diff --git a/hugolib/content_map.go b/hugolib/content_map.go
index 8cb307691..fefa90bf1 100644
--- a/hugolib/content_map.go
+++ b/hugolib/content_map.go
@@ -18,1052 +18,253 @@ import (
"path"
"path/filepath"
"strings"
- "sync"
+ "unicode"
- "github.com/gohugoio/hugo/helpers"
+ "github.com/bep/logg"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/source"
- "github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
-
- "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/hugofs"
-
- radix "github.com/armon/go-radix"
-)
-
-// We store the branch nodes in either the `sections` or `taxonomies` tree
-// with their path as a key; Unix style slashes, a leading and trailing slash.
-//
-// E.g. "/blog/" or "/categories/funny/"
-//
-// Pages that belongs to a section are stored in the `pages` tree below
-// the section name and a branch separator, e.g. "/blog/__hb_". A page is
-// given a key using the path below the section and the base filename with no extension
-// with a leaf separator added.
-//
-// For bundled pages (/mybundle/index.md), we use the folder name.
-//
-// An example of a full page key would be "/blog/__hb_page1__hl_"
-//
-// Bundled resources are stored in the `resources` having their path prefixed
-// with the bundle they belong to, e.g.
-// "/blog/__hb_bundle__hl_data.json".
-//
-// The weighted taxonomy entries extracted from page front matter are stored in
-// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
-// "/categories/funny/blog/__hb_bundle__hl_".
-const (
- cmBranchSeparator = "__hb_"
- cmLeafSeparator = "__hl_"
)
// Used to mark ambiguous keys in reverse index lookups.
-var ambiguousContentNode = &contentNode{}
-
-func newContentMap(cfg contentMapConfig) *contentMap {
- m := &contentMap{
- cfg: &cfg,
- pages: &contentTree{Name: "pages", Tree: radix.New()},
- sections: &contentTree{Name: "sections", Tree: radix.New()},
- taxonomies: &contentTree{Name: "taxonomies", Tree: radix.New()},
- taxonomyEntries: &contentTree{Name: "taxonomyEntries", Tree: radix.New()},
- resources: &contentTree{Name: "resources", Tree: radix.New()},
- }
-
- m.pageTrees = []*contentTree{
- m.pages, m.sections, m.taxonomies,
- }
-
- m.bundleTrees = []*contentTree{
- m.pages, m.sections, m.taxonomies, m.resources,
- }
-
- m.branchTrees = []*contentTree{
- m.sections, m.taxonomies,
- }
-
- addToReverseMap := func(k string, n *contentNode, m map[any]*contentNode) {
- k = strings.ToLower(k)
- existing, found := m[k]
- if found && existing != ambiguousContentNode {
- m[k] = ambiguousContentNode
- } else if !found {
- m[k] = n
- }
- }
+var ambiguousContentNode = &pageState{}
- m.pageReverseIndex = &contentTreeReverseIndex{
- t: []*contentTree{m.pages, m.sections, m.taxonomies},
- contentTreeReverseIndexMap: &contentTreeReverseIndexMap{
- initFn: func(t *contentTree, m map[any]*contentNode) {
- t.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- if n.p != nil && !n.p.File().IsZero() {
- meta := n.p.File().FileInfo().Meta()
- if meta.Path != meta.PathFile() {
- // Keep track of the original mount source.
- mountKey := filepath.ToSlash(filepath.Join(meta.Module, meta.PathFile()))
- addToReverseMap(mountKey, n, m)
- }
- }
- k := strings.TrimPrefix(strings.TrimSuffix(path.Base(s), cmLeafSeparator), cmBranchSeparator)
- addToReverseMap(k, n, m)
- return false
- })
- },
- },
- }
-
- return m
+var trimCutsetDotSlashSpace = func(r rune) bool {
+ return r == '.' || r == '/' || unicode.IsSpace(r)
}
-type cmInsertKeyBuilder struct {
- m *contentMap
-
- err error
-
- // Builder state
- tree *contentTree
- baseKey string // Section or page key
- key string
+type contentMapConfig struct {
+ lang string
+ taxonomyConfig taxonomiesConfigValues
+ taxonomyDisabled bool
+ taxonomyTermDisabled bool
+ pageDisabled bool
+ isRebuild bool
}
-func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
- //fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key, "tree:", b.tree.Name)
- baseKey := b.baseKey
- b.baseKey = s
-
- if baseKey != "/" {
- // Don't repeat the section path in the key.
- s = strings.TrimPrefix(s, baseKey)
- }
- s = strings.TrimPrefix(s, "/")
+var _ contentNodeI = (*resourceSource)(nil)
- switch b.tree {
- case b.m.sections:
- b.tree = b.m.pages
- b.key = baseKey + cmBranchSeparator + s + cmLeafSeparator
- case b.m.taxonomies:
- b.key = path.Join(baseKey, s)
- default:
- panic("invalid state")
- }
+type resourceSource struct {
+ path *paths.Path
+ opener hugio.OpenReadSeekCloser
+ fi hugofs.FileMetaInfo
- return &b
+ r resource.Resource
}
-func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
- // fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
-
- baseKey := helpers.AddTrailingSlash(b.baseKey)
- s = strings.TrimPrefix(s, baseKey)
-
- switch b.tree {
- case b.m.pages:
- b.key = b.key + s
- case b.m.sections, b.m.taxonomies:
- b.key = b.key + cmLeafSeparator + s
- default:
- panic(fmt.Sprintf("invalid state: %#v", b.tree))
- }
- b.tree = b.m.resources
- return &b
+func (r resourceSource) clone() *resourceSource {
+ r.r = nil
+ return &r
}
-func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
- if b.err == nil {
- b.tree.Insert(b.Key(), n)
+func (r *resourceSource) LangIndex() int {
+ if r.r != nil && r.isPage() {
+ return r.r.(*pageState).s.languagei
}
- return b
-}
-func (b *cmInsertKeyBuilder) Key() string {
- switch b.tree {
- case b.m.sections, b.m.taxonomies:
- return cleanSectionTreeKey(b.key)
- default:
- return cleanTreeKey(b.key)
- }
+ return r.fi.Meta().LangIndex
}
-func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
- if b.err == nil {
- b.tree.DeletePrefix(b.Key())
- }
- return b
+func (r *resourceSource) MarkStale() {
+ resource.MarkStale(r.r)
}
-func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilder {
- b.newTopLevel()
- m := b.m
- meta := fi.Meta()
- p := cleanTreeKey(meta.Path)
- bundlePath := m.getBundleDir(meta)
- isBundle := meta.Classifier.IsBundle()
- if isBundle {
- panic("not implemented")
- }
-
- p, k := b.getBundle(p)
- if k == "" {
- b.err = fmt.Errorf("no bundle header found for %q", bundlePath)
- return b
+func (r *resourceSource) resetBuildState() {
+ if rr, ok := r.r.(buildStateReseter); ok {
+ rr.resetBuildState()
}
-
- id := k + m.reduceKeyPart(p, fi.Meta().Path)
- b.tree = b.m.resources
- b.key = id
- b.baseKey = p
-
- return b
-}
-
-func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
- s = cleanSectionTreeKey(s)
- b.newTopLevel()
- b.tree = b.m.sections
- b.baseKey = s
- b.key = s
- return b
}
-func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
- s = cleanSectionTreeKey(s)
- b.newTopLevel()
- b.tree = b.m.taxonomies
- b.baseKey = s
- b.key = s
- return b
+func (r *resourceSource) isPage() bool {
+ _, ok := r.r.(page.Page)
+ return ok
}
-// getBundle gets both the key to the section and the prefix to where to store
-// this page bundle and its resources.
-func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) {
- m := b.m
- section, _ := m.getSection(s)
-
- p := strings.TrimPrefix(s, section)
-
- bundlePathParts := strings.Split(p, "/")
- basePath := section + cmBranchSeparator
-
- // Put it into an existing bundle if found.
- for i := len(bundlePathParts) - 2; i >= 0; i-- {
- bundlePath := path.Join(bundlePathParts[:i]...)
- searchKey := basePath + bundlePath + cmLeafSeparator
- if _, found := m.pages.Get(searchKey); found {
- return section + bundlePath, searchKey
- }
+func (r *resourceSource) GetIdentity() identity.Identity {
+ if r.r != nil {
+ return r.r.(identity.IdentityProvider).GetIdentity()
}
-
- // Put it into the section bundle.
- return section, section + cmLeafSeparator
+ return r.path
}
-func (b *cmInsertKeyBuilder) newTopLevel() {
- b.key = ""
-}
-
-type contentBundleViewInfo struct {
- ordinal int
- name viewName
- termKey string
- termOrigin string
- weight int
- ref *contentNode
-}
-
-func (c *contentBundleViewInfo) kind() string {
- if c.termKey != "" {
- return kinds.KindTerm
- }
- return kinds.KindTaxonomy
+func (r *resourceSource) ForEeachIdentity(f func(identity.Identity) bool) {
+ f(r.GetIdentity())
}
-func (c *contentBundleViewInfo) sections() []string {
- if c.kind() == kinds.KindTaxonomy {
- return []string{c.name.plural}
- }
-
- return []string{c.name.plural, c.termKey}
+func (r *resourceSource) Path() string {
+ return r.path.Path()
}
-func (c *contentBundleViewInfo) term() string {
- if c.termOrigin != "" {
- return c.termOrigin
- }
-
- return c.termKey
+func (r *resourceSource) isContentNodeBranch() bool {
+ return false
}
-type contentMap struct {
- cfg *contentMapConfig
-
- // View of regular pages, sections, and taxonomies.
- pageTrees contentTrees
-
- // View of pages, sections, taxonomies, and resources.
- bundleTrees contentTrees
-
- // View of sections and taxonomies.
- branchTrees contentTrees
-
- // Stores page bundles keyed by its path's directory or the base filename,
- // e.g. "blog/post.md" => "/blog/post", "blog/post/index.md" => "/blog/post"
- // These are the "regular pages" and all of them are bundles.
- pages *contentTree
-
- // A reverse index used as a fallback in GetPage.
- // There are currently two cases where this is used:
- // 1. Short name lookups in ref/relRef, e.g. using only "mypage.md" without a path.
- // 2. Links resolved from a remounted content directory. These are restricted to the same module.
- // Both of the above cases can result in ambiguous lookup errors.
- pageReverseIndex *contentTreeReverseIndex
-
- // Section nodes.
- sections *contentTree
+var _ contentNodeI = (*resourceSources)(nil)
- // Taxonomy nodes.
- taxonomies *contentTree
+type resourceSources []*resourceSource
- // Pages in a taxonomy.
- taxonomyEntries *contentTree
-
- // Resources stored per bundle below a common prefix, e.g. "/blog/post__hb_".
- resources *contentTree
-}
-
-func (m *contentMap) AddFiles(fis ...hugofs.FileMetaInfo) error {
- for _, fi := range fis {
- if err := m.addFile(fi); err != nil {
- return err
+func (n resourceSources) MarkStale() {
+ for _, r := range n {
+ if r != nil {
+ r.MarkStale()
}
}
-
- return nil
}
-func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hugofs.FileMetaInfo) error {
- var (
- meta = header.Meta()
- classifier = meta.Classifier
- isBranch = classifier == files.ContentClassBranch
- bundlePath = m.getBundleDir(meta)
-
- n = m.newContentNodeFromFi(header)
- b = m.newKeyBuilder()
-
- section string
- )
-
- if isBranch {
- // Either a section or a taxonomy node.
- section = bundlePath
- if tc := m.cfg.getTaxonomyConfig(section); !tc.IsZero() {
- term := strings.TrimPrefix(strings.TrimPrefix(section, "/"+tc.plural), "/")
-
- n.viewInfo = &contentBundleViewInfo{
- name: tc,
- termKey: term,
- termOrigin: term,
- }
-
- n.viewInfo.ref = n
- b.WithTaxonomy(section).Insert(n)
- } else {
- b.WithSection(section).Insert(n)
- }
- } else {
- // A regular page. Attach it to its section.
- section, _ = m.getOrCreateSection(n, bundlePath)
- b = b.WithSection(section).ForPage(bundlePath).Insert(n)
- }
-
- if m.cfg.isRebuild {
- // The resource owner will be either deleted or overwritten on rebuilds,
- // but make sure we handle deletion of resources (images etc.) as well.
- b.ForResource("").DeleteAll()
- }
-
- for _, r := range resources {
- rb := b.ForResource(cleanTreeKey(r.Meta().Path))
- rb.Insert(&contentNode{fi: r})
- }
-
- return nil
-}
-
-func (m *contentMap) CreateMissingNodes() error {
- // Create missing home and root sections
- rootSections := make(map[string]any)
- trackRootSection := func(s string, b *contentNode) {
- parts := strings.Split(s, "/")
- if len(parts) > 2 {
- root := strings.TrimSuffix(parts[1], cmBranchSeparator)
- if root != "" {
- if _, found := rootSections[root]; !found {
- rootSections[root] = b
- }
- }
- }
- }
-
- m.sections.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
-
- if s == "/" {
- return false
- }
-
- trackRootSection(s, n)
- return false
- })
-
- m.pages.Walk(func(s string, v any) bool {
- trackRootSection(s, v.(*contentNode))
- return false
- })
-
- if _, found := rootSections["/"]; !found {
- rootSections["/"] = true
- }
-
- for sect, v := range rootSections {
- var sectionPath string
- if n, ok := v.(*contentNode); ok && n.path != "" {
- sectionPath = n.path
- firstSlash := strings.Index(sectionPath, "/")
- if firstSlash != -1 {
- sectionPath = sectionPath[:firstSlash]
- }
- }
- sect = cleanSectionTreeKey(sect)
- _, found := m.sections.Get(sect)
- if !found {
- m.sections.Insert(sect, &contentNode{path: sectionPath})
- }
- }
-
- for _, view := range m.cfg.taxonomyConfig {
- s := cleanSectionTreeKey(view.plural)
- _, found := m.taxonomies.Get(s)
- if !found {
- b := &contentNode{
- viewInfo: &contentBundleViewInfo{
- name: view,
- },
- }
- b.viewInfo.ref = b
- m.taxonomies.Insert(s, b)
- }
- }
-
- return nil
-}
-
-func (m *contentMap) getBundleDir(meta *hugofs.FileMeta) string {
- dir := cleanTreeKey(filepath.Dir(meta.Path))
-
- switch meta.Classifier {
- case files.ContentClassContent:
- return path.Join(dir, meta.TranslationBaseName)
- default:
- return dir
- }
+func (n resourceSources) Path() string {
+ panic("not supported")
}
-func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
- return &contentNode{
- fi: fi,
- path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path), "/"),
- }
+func (n resourceSources) isContentNodeBranch() bool {
+ return false
}
-func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
- s = helpers.AddTrailingSlash(s)
- for {
- k, v, found := m.sections.LongestPrefix(s)
-
- if !found {
- return "", nil
+func (n resourceSources) resetBuildState() {
+ for _, r := range n {
+ if r != nil {
+ r.resetBuildState()
}
-
- if strings.Count(k, "/") <= 2 {
- return k, v.(*contentNode)
- }
-
- s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
-
}
}
-func (m *contentMap) newKeyBuilder() *cmInsertKeyBuilder {
- return &cmInsertKeyBuilder{m: m}
-}
-
-func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *contentNode) {
- level := strings.Count(s, "/")
- k, b := m.getSection(s)
-
- mustCreate := false
-
- if k == "" {
- mustCreate = true
- } else if level > 1 && k == "/" {
- // We found the home section, but this page needs to be placed in
- // the root, e.g. "/blog", section.
- mustCreate = true
- }
-
- if mustCreate {
- k = cleanSectionTreeKey(s[:strings.Index(s[1:], "/")+1])
-
- b = &contentNode{
- path: n.rootSection(),
+func (n resourceSources) GetIdentity() identity.Identity {
+ for _, r := range n {
+ if r != nil {
+ return r.GetIdentity()
}
-
- m.sections.Insert(k, b)
- }
-
- return k, b
-}
-
-func (m *contentMap) getPage(section, name string) *contentNode {
- section = helpers.AddTrailingSlash(section)
- key := section + cmBranchSeparator + name + cmLeafSeparator
-
- v, found := m.pages.Get(key)
- if found {
- return v.(*contentNode)
}
return nil
}
-func (m *contentMap) getSection(s string) (string, *contentNode) {
- s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
-
- k, v, found := m.sections.LongestPrefix(s)
-
- if found {
- return k, v.(*contentNode)
- }
- return "", nil
-}
-
-func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
- s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
- k, v, found := m.taxonomies.LongestPrefix(s)
-
- if found {
- return k, v.(*contentNode)
- }
-
- v, found = m.sections.Get("/")
- if found {
- return s, v.(*contentNode)
- }
-
- return "", nil
-}
-
-func (m *contentMap) addFile(fi hugofs.FileMetaInfo) error {
- b := m.newKeyBuilder()
- return b.WithFile(fi).Insert(m.newContentNodeFromFi(fi)).err
-}
-
-func cleanTreeKey(k string) string {
- k = "/" + strings.ToLower(strings.Trim(path.Clean(filepath.ToSlash(k)), "./"))
- return k
-}
-
-func cleanSectionTreeKey(k string) string {
- k = cleanTreeKey(k)
- if k != "/" {
- k += "/"
- }
-
- return k
-}
-
-func (m *contentMap) onSameLevel(s1, s2 string) bool {
- return strings.Count(s1, "/") == strings.Count(s2, "/")
-}
-
-func (m *contentMap) deleteBundleMatching(matches func(b *contentNode) bool) {
- // Check sections first
- s := m.sections.getMatch(matches)
- if s != "" {
- m.deleteSectionByPath(s)
- return
- }
-
- s = m.pages.getMatch(matches)
- if s != "" {
- m.deletePage(s)
- return
- }
-
- s = m.resources.getMatch(matches)
- if s != "" {
- m.resources.Delete(s)
- }
-}
-
-// Deletes any empty root section that's not backed by a content file.
-func (m *contentMap) deleteOrphanSections() {
- var sectionsToDelete []string
-
- m.sections.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
-
- if n.fi != nil {
- // Section may be empty, but is backed by a content file.
- return false
- }
-
- if s == "/" || strings.Count(s, "/") > 2 {
- return false
- }
-
- prefixBundle := s + cmBranchSeparator
-
- if !(m.sections.hasBelow(s) || m.pages.hasBelow(prefixBundle) || m.resources.hasBelow(prefixBundle)) {
- sectionsToDelete = append(sectionsToDelete, s)
- }
-
- return false
- })
-
- for _, s := range sectionsToDelete {
- m.sections.Delete(s)
- }
-}
-
-func (m *contentMap) deletePage(s string) {
- m.pages.DeletePrefix(s)
- m.resources.DeletePrefix(s)
-}
-
-func (m *contentMap) deleteSectionByPath(s string) {
- if !strings.HasSuffix(s, "/") {
- panic("section must end with a slash")
- }
- if !strings.HasPrefix(s, "/") {
- panic("section must start with a slash")
- }
- m.sections.DeletePrefix(s)
- m.pages.DeletePrefix(s)
- m.resources.DeletePrefix(s)
-}
-
-func (m *contentMap) deletePageByPath(s string) {
- m.pages.Walk(func(s string, v any) bool {
- fmt.Println("S", s)
-
- return false
- })
-}
-
-func (m *contentMap) deleteTaxonomy(s string) {
- m.taxonomies.DeletePrefix(s)
-}
-
-func (m *contentMap) reduceKeyPart(dir, filename string) string {
- dir, filename = filepath.ToSlash(dir), filepath.ToSlash(filename)
- dir, filename = strings.TrimPrefix(dir, "/"), strings.TrimPrefix(filename, "/")
-
- return strings.TrimPrefix(strings.TrimPrefix(filename, dir), "/")
-}
-
-func (m *contentMap) splitKey(k string) []string {
- if k == "" || k == "/" {
- return nil
- }
-
- parts := strings.Split(k, "/")[1:]
- if len(parts) == 0 {
- return nil
- }
- if parts[len(parts)-1] == "" {
- parts = parts[:len(parts)-1]
- }
- return parts
-}
-
-func (m *contentMap) testDump() string {
- var sb strings.Builder
-
- for i, r := range []*contentTree{m.pages, m.sections, m.resources} {
- sb.WriteString(fmt.Sprintf("Tree %d:\n", i))
- r.Walk(func(s string, v any) bool {
- sb.WriteString("\t" + s + "\n")
- return false
- })
- }
-
- for i, r := range []*contentTree{m.pages, m.sections} {
- r.Walk(func(s string, v any) bool {
- c := v.(*contentNode)
- cpToString := func(c *contentNode) string {
- var sb strings.Builder
- if c.p != nil {
- sb.WriteString("|p:" + c.p.Title())
- }
- if c.fi != nil {
- sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path))
- }
- return sb.String()
+func (n resourceSources) ForEeachIdentity(f func(identity.Identity) bool) {
+ for _, r := range n {
+ if r != nil {
+ if f(r.GetIdentity()) {
+ return
}
- sb.WriteString(path.Join(m.cfg.lang, r.Name) + s + cpToString(c) + "\n")
-
- resourcesPrefix := s
-
- if i == 1 {
- resourcesPrefix += cmLeafSeparator
-
- m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v any) bool {
- sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n")
- return false
- })
- }
-
- m.resources.WalkPrefix(resourcesPrefix, func(s string, v any) bool {
- sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n")
- return false
- })
-
- return false
- })
+ }
}
-
- return sb.String()
-}
-
-type contentMapConfig struct {
- lang string
- taxonomyConfig []viewName
- taxonomyDisabled bool
- taxonomyTermDisabled bool
- pageDisabled bool
- isRebuild bool
}
func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) {
- s = strings.TrimPrefix(s, "/")
- if s == "" {
- return
- }
- for _, n := range cfg.taxonomyConfig {
- if strings.HasPrefix(s, n.plural) {
+ for _, n := range cfg.taxonomyConfig.views {
+ if strings.HasPrefix(s, n.pluralTreeKey) {
return n
}
}
-
return
}
-type contentNode struct {
- p *pageState
-
- // Set for taxonomy nodes.
- viewInfo *contentBundleViewInfo
-
- // Set if source is a file.
- // We will soon get other sources.
- fi hugofs.FileMetaInfo
-
- // The source path. Unix slashes. No leading slash.
- path string
-}
-
-func (b *contentNode) rootSection() string {
- if b.path == "" {
- return ""
- }
- firstSlash := strings.Index(b.path, "/")
- if firstSlash == -1 {
- return b.path
- }
- return b.path[:firstSlash]
-}
-
-type contentTree struct {
- Name string
- *radix.Tree
-}
-
-type contentTrees []*contentTree
-
-func (t contentTrees) DeletePrefix(prefix string) int {
- var count int
- for _, tree := range t {
- tree.Walk(func(s string, v any) bool {
- return false
- })
- count += tree.DeletePrefix(prefix)
+func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
+ if fi.IsDir() {
+ return nil
}
- return count
-}
-type contentTreeNodeCallback func(s string, n *contentNode) bool
-
-func newContentTreeFilter(fn func(n *contentNode) bool) contentTreeNodeCallback {
- return func(s string, n *contentNode) bool {
- return fn(n)
+ meta := fi.Meta()
+ if m.s.conf.IsLangDisabled(meta.Lang) {
+ return nil
}
-}
-var (
- contentTreeNoListAlwaysFilter = func(s string, n *contentNode) bool {
- if n.p == nil {
- return true
- }
- return n.p.m.noListAlways()
- }
+ insertResource := func(fim hugofs.FileMetaInfo) error {
+ pi := fi.Meta().PathInfo
+ key := pi.Base()
+ tree := m.treeResources
- contentTreeNoRenderFilter = func(s string, n *contentNode) bool {
- if n.p == nil {
- return true
- }
- return n.p.m.noRender()
- }
+ commit := tree.Lock(true)
+ defer commit()
- contentTreeNoLinkFilter = func(s string, n *contentNode) bool {
- if n.p == nil {
- return true
+ r := func() (hugio.ReadSeekCloser, error) {
+ return fim.Meta().Open()
}
- return n.p.m.noLink()
- }
-)
-func (c *contentTree) WalkQuery(query pageMapQuery, walkFn contentTreeNodeCallback) {
- filter := query.Filter
- if filter == nil {
- filter = contentTreeNoListAlwaysFilter
- }
- if query.Prefix != "" {
- c.WalkBelow(query.Prefix, func(s string, v any) bool {
- n := v.(*contentNode)
- if filter != nil && filter(s, n) {
- return false
+ var rs *resourceSource
+ if pi.IsContent() {
+ // Create the page now as we need it at assemembly time.
+ // The other resources are created if needed.
+ pageResource, err := m.s.h.newPage(
+ &pageMeta{
+ f: source.NewFileInfo(fim),
+ pathInfo: pi,
+ bundled: true,
+ },
+ )
+ if err != nil {
+ return err
}
- return walkFn(s, n)
- })
-
- return
- }
-
- c.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- if filter != nil && filter(s, n) {
- return false
+ rs = &resourceSource{r: pageResource}
+ } else {
+ rs = &resourceSource{path: pi, opener: r, fi: fim}
}
- return walkFn(s, n)
- })
-}
-func (c contentTrees) WalkRenderable(fn contentTreeNodeCallback) {
- query := pageMapQuery{Filter: contentTreeNoRenderFilter}
- for _, tree := range c {
- tree.WalkQuery(query, fn)
- }
-}
-
-func (c contentTrees) WalkLinkable(fn contentTreeNodeCallback) {
- query := pageMapQuery{Filter: contentTreeNoLinkFilter}
- for _, tree := range c {
- tree.WalkQuery(query, fn)
- }
-}
+ tree.InsertIntoValuesDimension(key, rs)
-func (c contentTrees) Walk(fn contentTreeNodeCallback) {
- for _, tree := range c {
- tree.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- return fn(s, n)
- })
+ return nil
}
-}
-func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) {
- for _, tree := range c {
- tree.WalkPrefix(prefix, func(s string, v any) bool {
- n := v.(*contentNode)
- return fn(s, n)
- })
- }
-}
+ pi := meta.PathInfo
-// WalkBelow walks the tree below the given prefix, i.e. it skips the
-// node with the given prefix as key.
-func (c *contentTree) WalkBelow(prefix string, fn radix.WalkFn) {
- c.Tree.WalkPrefix(prefix, func(s string, v any) bool {
- if s == prefix {
- return false
+ switch pi.BundleType() {
+ case paths.PathTypeFile, paths.PathTypeContentResource:
+ m.s.Log.Trace(logg.StringFunc(
+ func() string {
+ return fmt.Sprintf("insert resource: %q", fi.Meta().Filename)
+ },
+ ))
+ if err := insertResource(fi); err != nil {
+ return err
}
- return fn(s, v)
- })
-}
-
-func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
- var match string
- c.Walk(func(s string, v any) bool {
- n, ok := v.(*contentNode)
- if !ok {
- return false
+ default:
+ m.s.Log.Trace(logg.StringFunc(
+ func() string {
+ return fmt.Sprintf("insert bundle: %q", fi.Meta().Filename)
+ },
+ ))
+ // A content file.
+ p, err := m.s.h.newPage(
+ &pageMeta{
+ f: source.NewFileInfo(fi),
+ pathInfo: pi,
+ bundled: false,
+ },
+ )
+ if err != nil {
+ return err
}
-
- if matches(n) {
- match = s
- return true
+ if p == nil {
+ // Disabled page.
+ return nil
}
- return false
- })
-
- return match
-}
-
-func (c *contentTree) hasBelow(s1 string) bool {
- var t bool
- c.WalkBelow(s1, func(s2 string, v any) bool {
- t = true
- return true
- })
- return t
-}
-
-func (c *contentTree) printKeys() {
- c.Walk(func(s string, v any) bool {
- fmt.Println(s)
- return false
- })
-}
-
-func (c *contentTree) printKeysPrefix(prefix string) {
- c.WalkPrefix(prefix, func(s string, v any) bool {
- fmt.Println(s)
- return false
- })
-}
-
-// contentTreeRef points to a node in the given tree.
-type contentTreeRef struct {
- m *pageMap
- t *contentTree
- n *contentNode
- key string
-}
-
-func (c *contentTreeRef) getCurrentSection() (string, *contentNode) {
- if c.isSection() {
- return c.key, c.n
- }
- return c.getSection()
-}
-
-func (c *contentTreeRef) isSection() bool {
- return c.t == c.m.sections
-}
-
-func (c *contentTreeRef) getSection() (string, *contentNode) {
- if c.t == c.m.taxonomies {
- return c.m.getTaxonomyParent(c.key)
- }
- return c.m.getSection(c.key)
-}
-
-func (c *contentTreeRef) getPages() page.Pages {
- var pas page.Pages
- c.m.collectPages(
- pageMapQuery{
- Prefix: c.key + cmBranchSeparator,
- Filter: c.n.p.m.getListFilter(true),
- },
- func(c *contentNode) {
- pas = append(pas, c.p)
- },
- )
- page.SortByDefault(pas)
-
- return pas
-}
-
-func (c *contentTreeRef) getPagesRecursive() page.Pages {
- var pas page.Pages
+ m.treePages.InsertWithLock(pi.Base(), p)
- query := pageMapQuery{
- Filter: c.n.p.m.getListFilter(true),
}
-
- query.Prefix = c.key
- c.m.collectPages(query, func(c *contentNode) {
- pas = append(pas, c.p)
- })
-
- page.SortByDefault(pas)
-
- return pas
+ return nil
}
-func (c *contentTreeRef) getPagesAndSections() page.Pages {
- var pas page.Pages
-
- query := pageMapQuery{
- Filter: c.n.p.m.getListFilter(true),
- Prefix: c.key,
+// The home page is represented with the zero string.
+// All other keys starts with a leading slash. No trailing slash.
+// Slashes are Unix-style.
+func cleanTreeKey(elem ...string) string {
+ var s string
+ if len(elem) > 0 {
+ s = elem[0]
+ if len(elem) > 1 {
+ s = path.Join(elem...)
+ }
}
-
- c.m.collectPagesAndSections(query, func(c *contentNode) {
- pas = append(pas, c.p)
- })
-
- page.SortByDefault(pas)
-
- return pas
-}
-
-func (c *contentTreeRef) getSections() page.Pages {
- var pas page.Pages
-
- query := pageMapQuery{
- Filter: c.n.p.m.getListFilter(true),
- Prefix: c.key,
+ s = strings.TrimFunc(s, trimCutsetDotSlashSpace)
+ s = filepath.ToSlash(strings.ToLower(paths.Sanitize(s)))
+ if s == "" || s == "/" {
+ return ""
}
-
- c.m.collectSections(query, func(c *contentNode) {
- pas = append(pas, c.p)
- })
-
- page.SortByDefault(pas)
-
- return pas
-}
-
-type contentTreeReverseIndex struct {
- t []*contentTree
- *contentTreeReverseIndexMap
-}
-
-type contentTreeReverseIndexMap struct {
- m map[any]*contentNode
- init sync.Once
- initFn func(*contentTree, map[any]*contentNode)
-}
-
-func (c *contentTreeReverseIndex) Reset() {
- c.contentTreeReverseIndexMap = &contentTreeReverseIndexMap{
- initFn: c.initFn,
+ if s[0] != '/' {
+ s = "/" + s
}
-}
-
-func (c *contentTreeReverseIndex) Get(key any) *contentNode {
- c.init.Do(func() {
- c.m = make(map[any]*contentNode)
- for _, tree := range c.t {
- c.initFn(tree, c.m)
- }
- })
- return c.m[key]
+ return s
}