ref: a985efcecf44afe1d252690ec0a00cf077974f44
parent: 6c3c6686f5d3c7155e2d455b07ac8ab70f42cb88
author: Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
date: Thu May 21 07:25:00 EDT 2020
Fix GetPage on section/bundle name overlaps In the internal Radix we stored the directory based nodes without a traling slash, e.g. `/blog`. The original motivation was probably to make it easy to do prefix searching: Give me all ancestors. This, however have lead to some ambigouty with overlapping directory names. This particular problem was, however, not possible to work around in an easy way, so from now we store these as `/blog/`. Fixes #7301
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -665,3 +665,12 @@
func Exists(path string, fs afero.Fs) (bool, error) {
return afero.Exists(fs, path)
}
+
+// AddTrailingSlash adds a trailing Unix styled slash (/) if not already
+// there.
+func AddTrailingSlash(path string) string {
+ if !strings.HasSuffix(path, "/") {
+ path += "/"
+ }
+ return path
+}
--- a/htesting/hqt/checkers.go
+++ b/htesting/hqt/checkers.go
@@ -77,7 +77,7 @@
return nil
}
- return fmt.Errorf("values are not the same text: %s", htesting.DiffStrings(s1, s2))
+ return fmt.Errorf("values are not the same text: %s", strings.Join(htesting.DiffStrings(s1, s2), " | "))
}
func normalizeString(s string) string {
--- a/hugolib/content_map.go
+++ b/hugolib/content_map.go
@@ -20,6 +20,8 @@
"strings"
"sync"
+ "github.com/gohugoio/hugo/helpers"
+
"github.com/gohugoio/hugo/resources/page"
"github.com/pkg/errors"
@@ -31,27 +33,26 @@
)
// We store the branch nodes in either the `sections` or `taxonomies` tree
-// with their path as a key; Unix style slashes, a leading slash but no
-// trailing slash.
+// with their path as a key; Unix style slashes, a leading and trailing slash.
//
-// E.g. "/blog" or "/categories/funny"
+// E.g. "/blog/" or "/categories/funny/"
//
// Pages that belongs to a section are stored in the `pages` tree below
-// the section name and a branch separator, e.g. "/blog__hb_". A page is
+// the section name and a branch separator, e.g. "/blog/__hb_". A page is
// given a key using the path below the section and the base filename with no extension
// with a leaf separator added.
//
// For bundled pages (/mybundle/index.md), we use the folder name.
//
-// An exmple of a full page key would be "/blog__hb_/page1__hl_"
+// An exmple of a full page key would be "/blog/__hb_page1__hl_"
//
// Bundled resources are stored in the `resources` having their path prefixed
// with the bundle they belong to, e.g.
-// "/blog__hb_/bundle__hl_data.json".
+// "/blog/__hb_bundle__hl_data.json".
//
// The weighted taxonomy entries extracted from page front matter are stored in
// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
-// "/categories/funny/blog__hb_/bundle__hl_".
+// "/categories/funny/blog/__hb_bundle__hl_".
const (
cmBranchSeparator = "__hb_"
cmLeafSeparator = "__hl_"
@@ -105,7 +106,7 @@
addToReverseMap(mountKey, n, m)
}
}
- k := strings.TrimSuffix(path.Base(s), cmLeafSeparator)
+ k := strings.TrimPrefix(strings.TrimSuffix(path.Base(s), cmLeafSeparator), cmBranchSeparator)
addToReverseMap(k, n, m)
return false
})
@@ -127,18 +128,15 @@
}
func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
- // TODO2 fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
+ //fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
baseKey := b.baseKey
b.baseKey = s
- if !strings.HasPrefix(s, "/") {
- s = "/" + s
- }
-
if baseKey != "/" {
// Don't repeat the section path in the key.
s = strings.TrimPrefix(s, baseKey)
}
+ s = strings.TrimPrefix(s, "/")
switch b.tree {
case b.m.sections:
@@ -154,10 +152,10 @@
}
func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
- // TODO2 fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
+ //fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
- s = strings.TrimPrefix(s, "/")
- s = strings.TrimPrefix(s, strings.TrimPrefix(b.baseKey, "/")+"/")
+ baseKey := helpers.AddTrailingSlash(b.baseKey)
+ s = strings.TrimPrefix(s, baseKey)
switch b.tree {
case b.m.pages:
@@ -173,14 +171,23 @@
func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
if b.err == nil {
- b.tree.Insert(cleanTreeKey(b.key), n)
+ b.tree.Insert(b.Key(), n)
}
return b
}
+func (b *cmInsertKeyBuilder) Key() string {
+ switch b.tree {
+ case b.m.sections, b.m.taxonomies:
+ return cleanSectionTreeKey(b.key)
+ default:
+ return cleanTreeKey(b.key)
+ }
+}
+
func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
if b.err == nil {
- b.tree.DeletePrefix(cleanTreeKey(b.key))
+ b.tree.DeletePrefix(b.Key())
}
return b
}
@@ -211,15 +218,16 @@
}
func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
+ s = cleanSectionTreeKey(s)
b.newTopLevel()
b.tree = b.m.sections
b.baseKey = s
b.key = s
- // TODO2 fmt.Println("WithSection:", s, "baseKey:", b.baseKey, "key:", b.key)
return b
}
func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
+ s = cleanSectionTreeKey(s)
b.newTopLevel()
b.tree = b.m.taxonomies
b.baseKey = s
@@ -233,20 +241,17 @@
m := b.m
section, _ := m.getSection(s)
- p := s
- if section != "/" {
- p = strings.TrimPrefix(s, section)
- }
+ p := strings.TrimPrefix(s, section)
- bundlePathParts := strings.Split(p, "/")[1:]
+ bundlePathParts := strings.Split(p, "/")
basePath := section + cmBranchSeparator
// Put it into an existing bundle if found.
for i := len(bundlePathParts) - 2; i >= 0; i-- {
bundlePath := path.Join(bundlePathParts[:i]...)
- searchKey := basePath + "/" + bundlePath + cmLeafSeparator
+ searchKey := basePath + bundlePath + cmLeafSeparator
if _, found := m.pages.Get(searchKey); found {
- return section + "/" + bundlePath, searchKey
+ return section + bundlePath, searchKey
}
}
@@ -432,7 +437,7 @@
sectionPath = sectionPath[:firstSlash]
}
}
- sect = cleanTreeKey(sect)
+ sect = cleanSectionTreeKey(sect)
_, found := m.sections.Get(sect)
if !found {
m.sections.Insert(sect, &contentNode{path: sectionPath})
@@ -440,7 +445,7 @@
}
for _, view := range m.cfg.taxonomyConfig {
- s := cleanTreeKey(view.plural)
+ s := cleanSectionTreeKey(view.plural)
_, found := m.taxonomies.Get(s)
if !found {
b := &contentNode{
@@ -476,15 +481,20 @@
}
func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
+ s = helpers.AddTrailingSlash(s)
for {
k, v, found := m.sections.LongestPrefix(s)
+
if !found {
return "", nil
}
- if strings.Count(k, "/") == 1 {
+
+ if strings.Count(k, "/") <= 2 {
return k, v.(*contentNode)
}
- s = path.Dir(s)
+
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+
}
}
@@ -507,10 +517,7 @@
}
if mustCreate {
- k = s[:strings.Index(s[1:], "/")+1]
- if k == "" {
- k = "/"
- }
+ k = cleanSectionTreeKey(s[:strings.Index(s[1:], "/")+1])
b = &contentNode{
path: n.rootSection(),
@@ -523,7 +530,9 @@
}
func (m *contentMap) getPage(section, name string) *contentNode {
- key := section + cmBranchSeparator + "/" + name + cmLeafSeparator
+ section = helpers.AddTrailingSlash(section)
+ key := section + cmBranchSeparator + name + cmLeafSeparator
+
v, found := m.pages.Get(key)
if found {
return v.(*contentNode)
@@ -532,8 +541,10 @@
}
func (m *contentMap) getSection(s string) (string, *contentNode) {
- k, v, found := m.sections.LongestPrefix(path.Dir(s))
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+ k, v, found := m.sections.LongestPrefix(s)
+
if found {
return k, v.(*contentNode)
}
@@ -541,21 +552,18 @@
}
func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
- s = path.Dir(s)
- if s == "/" {
- v, found := m.sections.Get(s)
- if found {
- return s, v.(*contentNode)
- }
- return "", nil
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+ k, v, found := m.taxonomies.LongestPrefix(s)
+
+ if found {
+ return k, v.(*contentNode)
}
- for _, tree := range []*contentTree{m.taxonomies, m.sections} {
- k, v, found := tree.LongestPrefix(s)
- if found {
- return k, v.(*contentNode)
- }
+ v, found = m.sections.Get("/")
+ if found {
+ return s, v.(*contentNode)
}
+
return "", nil
}
@@ -569,6 +577,15 @@
return k
}
+func cleanSectionTreeKey(k string) string {
+ k = cleanTreeKey(k)
+ if k != "/" {
+ k += "/"
+ }
+
+ return k
+}
+
func (m *contentMap) onSameLevel(s1, s2 string) bool {
return strings.Count(s1, "/") == strings.Count(s2, "/")
}
@@ -606,13 +623,13 @@
return false
}
- if s == "/" || strings.Count(s, "/") > 1 {
+ if s == "/" || strings.Count(s, "/") > 2 {
return false
}
prefixBundle := s + cmBranchSeparator
- if !(m.sections.hasPrefix(s+"/") || m.pages.hasPrefix(prefixBundle) || m.resources.hasPrefix(prefixBundle)) {
+ if !(m.sections.hasBelow(s) || m.pages.hasBelow(prefixBundle) || m.resources.hasBelow(prefixBundle)) {
sectionsToDelete = append(sectionsToDelete, s)
}
@@ -630,13 +647,15 @@
}
func (m *contentMap) deleteSectionByPath(s string) {
- m.sections.Delete(s)
- m.sections.DeletePrefix(s + "/")
- m.pages.DeletePrefix(s + cmBranchSeparator)
- m.pages.DeletePrefix(s + "/")
- m.resources.DeletePrefix(s + cmBranchSeparator)
- m.resources.DeletePrefix(s + cmLeafSeparator)
- m.resources.DeletePrefix(s + "/")
+ if !strings.HasSuffix(s, "/") {
+ panic("section must end with a slash")
+ }
+ if !strings.HasPrefix(s, "/") {
+ panic("section must start with a slash")
+ }
+ m.sections.DeletePrefix(s)
+ m.pages.DeletePrefix(s)
+ m.resources.DeletePrefix(s)
}
func (m *contentMap) deletePageByPath(s string) {
@@ -648,8 +667,7 @@
}
func (m *contentMap) deleteTaxonomy(s string) {
- m.taxonomies.Delete(s)
- m.taxonomies.DeletePrefix(s + "/")
+ m.taxonomies.DeletePrefix(s)
}
func (m *contentMap) reduceKeyPart(dir, filename string) string {
@@ -817,7 +835,7 @@
filter = contentTreeNoListAlwaysFilter
}
if query.Prefix != "" {
- c.WalkPrefix(query.Prefix, func(s string, v interface{}) bool {
+ c.WalkBelow(query.Prefix, func(s string, v interface{}) bool {
n := v.(*contentNode)
if filter != nil && filter(s, n) {
return false
@@ -862,6 +880,18 @@
}
}
+// WalkBelow walks the tree below the given prefix, i.e. it skips the
+// node with the given prefix as key.
+func (c *contentTree) WalkBelow(prefix string, fn radix.WalkFn) {
+ c.Tree.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ if s == prefix {
+ return false
+ }
+ return fn(s, v)
+ })
+
+}
+
func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
var match string
c.Walk(func(s string, v interface{}) bool {
@@ -881,9 +911,9 @@
return match
}
-func (c *contentTree) hasPrefix(s string) bool {
+func (c *contentTree) hasBelow(s1 string) bool {
var t bool
- c.Tree.WalkPrefix(s, func(s string, v interface{}) bool {
+ c.WalkBelow(s1, func(s2 string, v interface{}) bool {
t = true
return true
})
@@ -953,12 +983,7 @@
Filter: c.n.p.m.getListFilter(true),
}
- query.Prefix = c.key + cmBranchSeparator
- c.m.collectPages(query, func(c *contentNode) {
- pas = append(pas, c.p)
- })
-
- query.Prefix = c.key + "/"
+ query.Prefix = c.key
c.m.collectPages(query, func(c *contentNode) {
pas = append(pas, c.p)
})
--- a/hugolib/content_map_page.go
+++ b/hugolib/content_map_page.go
@@ -70,7 +70,7 @@
m.taxonomyEntries.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
vi := n.viewInfo
- k := cleanTreeKey(vi.name.plural + "/" + vi.termKey)
+ k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey)
if _, found := m.taxonomies.Get(k); !found {
vic := &contentBundleViewInfo{
@@ -266,6 +266,7 @@
func (m *pageMap) createSiteTaxonomies() error {
m.s.taxonomies = make(TaxonomyList)
+ var walkErr error
m.taxonomies.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
t := n.viewInfo
@@ -276,7 +277,11 @@
m.s.taxonomies[viewName.plural] = make(Taxonomy)
} else {
taxonomy := m.s.taxonomies[viewName.plural]
- m.taxonomyEntries.WalkPrefix(s+"/", func(ss string, v interface{}) bool {
+ if taxonomy == nil {
+ walkErr = errors.Errorf("missing taxonomy: %s", viewName.plural)
+ return true
+ }
+ m.taxonomyEntries.WalkPrefix(s, func(ss string, v interface{}) bool {
b2 := v.(*contentNode)
info := b2.viewInfo
taxonomy.add(info.termKey, page.NewWeightedPage(info.weight, info.ref.p, n.p))
@@ -294,7 +299,7 @@
}
}
- return nil
+ return walkErr
}
func (m *pageMap) createListAllPages() page.Pages {
@@ -426,7 +431,6 @@
m.sections.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
-
var shouldBuild bool
defer func() {
@@ -596,11 +600,12 @@
},
}
- if s == "/" {
- // To avoid getting an empty key.
- s = page.KindHome
+ var key string
+ if strings.HasSuffix(s, "/") {
+ key = cleanSectionTreeKey(path.Join(viewName.plural, termKey, s))
+ } else {
+ key = cleanTreeKey(path.Join(viewName.plural, termKey, s))
}
- key := cleanTreeKey(path.Join(viewName.plural, termKey, s))
m.taxonomyEntries.Insert(key, bv)
}
}
@@ -638,22 +643,13 @@
}
func (m *pageMap) collectSections(query pageMapQuery, fn func(c *contentNode)) error {
- var level int
- isHome := query.Prefix == "/"
+ level := strings.Count(query.Prefix, "/")
- if !isHome {
- level = strings.Count(query.Prefix, "/")
- }
-
return m.collectSectionsFn(query, func(s string, c *contentNode) bool {
- if s == query.Prefix {
+ if strings.Count(s, "/") != level+1 {
return false
}
- if (strings.Count(s, "/") - level) != 1 {
- return false
- }
-
fn(c)
return false
@@ -745,10 +741,11 @@
return err
}
- a := (§ionWalker{m: pm.contentMap}).applyAggregates()
+ sw := §ionWalker{m: pm.contentMap}
+ a := sw.applyAggregates()
_, mainSectionsSet := pm.s.s.Info.Params()["mainsections"]
if !mainSectionsSet && a.mainSection != "" {
- mainSections := []string{a.mainSection}
+ mainSections := []string{strings.TrimRight(a.mainSection, "/")}
pm.s.s.Info.Params()["mainSections"] = mainSections
pm.s.s.Info.Params()["mainsections"] = mainSections
}
@@ -847,7 +844,7 @@
b.sectionsInit.Do(func() {
var pas page.Pages
ref := b.owner.treeRef
- ref.m.collectTaxonomies(ref.key+"/", func(c *contentNode) {
+ ref.m.collectTaxonomies(ref.key, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
@@ -888,8 +885,12 @@
s string
}
+func (h *sectionAggregateHandler) String() string {
+ return fmt.Sprintf("%s/%s - %d - %s", h.sectionAggregate.datesAll, h.sectionAggregate.datesSection, h.sectionPageCount, h.s)
+}
+
func (h *sectionAggregateHandler) isRootSection() bool {
- return h.s != "/" && strings.Count(h.s, "/") == 1
+ return h.s != "/" && strings.Count(h.s, "/") == 2
}
func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error {
@@ -963,11 +964,13 @@
func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler {
level := strings.Count(prefix, "/")
+
visitor := createVisitor()
- w.m.taxonomies.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ w.m.taxonomies.WalkBelow(prefix, func(s string, v interface{}) bool {
currentLevel := strings.Count(s, "/")
- if currentLevel > level {
+
+ if currentLevel > level+1 {
return false
}
@@ -977,8 +980,8 @@
return true
}
- if currentLevel == 1 {
- nested := w.walkLevel(s+"/", createVisitor)
+ if currentLevel == 2 {
+ nested := w.walkLevel(s, createVisitor)
if w.err = visitor.handleNested(nested); w.err != nil {
return true
}
@@ -995,9 +998,9 @@
return w.err != nil
})
- w.m.sections.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ w.m.sections.WalkBelow(prefix, func(s string, v interface{}) bool {
currentLevel := strings.Count(s, "/")
- if currentLevel > level {
+ if currentLevel > level+1 {
return false
}
@@ -1016,11 +1019,9 @@
return true
}
- if s != "/" {
- nested := w.walkLevel(s+"/", createVisitor)
- if w.err = visitor.handleNested(nested); w.err != nil {
- return true
- }
+ nested := w.walkLevel(s, createVisitor)
+ if w.err = visitor.handleNested(nested); w.err != nil {
+ return true
}
w.err = visitor.handleSectionPost()
--- a/hugolib/content_map_test.go
+++ b/hugolib/content_map_test.go
@@ -155,19 +155,19 @@
expect := `
Tree 0:
- /blog__hb_/a__hl_
- /blog__hb_/b/c__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
Tree 1:
- /blog
+ /blog/
Tree 2:
- /blog__hb_/a__hl_b/data.json
- /blog__hb_/a__hl_logo.png
- /blog__hl_sectiondata.json
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hl_sectiondata.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/logo.png
- en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
- en/sections/blog|f:blog/_index.md
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
+ en/sections/blog/|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- R: blog/sectiondata.json
@@ -194,24 +194,24 @@
expect = `
Tree 0:
- /blog__hb_/a__hl_
- /blog__hb_/b/c__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
Tree 1:
- /blog
+ /blog/
Tree 2:
- /blog__hb_/a__hl_b/data.json
- /blog__hb_/a__hl_b/data2.json
- /blog__hb_/a__hl_logo.png
- /blog__hb_/b/c__hl_d/data3.json
- /blog__hl_sectiondata.json
- /blog__hl_sectiondata2.json
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_b/data2.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hb_b/c__hl_d/data3.json
+ /blog/__hl_sectiondata.json
+ /blog/__hl_sectiondata2.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/b/data2.json
- R: blog/a/logo.png
- en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
- R: blog/b/c/d/data3.json
- en/sections/blog|f:blog/_index.md
+ en/sections/blog/|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- R: blog/sectiondata.json
@@ -226,26 +226,26 @@
c.Assert(m.testDump(), hqt.IsSameString, `
Tree 0:
- /blog__hb_/a__hl_
- /blog__hb_/b/c__hl_
- /blog__hb_/b__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
+ /blog/__hb_b__hl_
Tree 1:
- /blog
+ /blog/
Tree 2:
- /blog__hb_/a__hl_b/data.json
- /blog__hb_/a__hl_b/data2.json
- /blog__hb_/a__hl_logo.png
- /blog__hb_/b/c__hl_d/data3.json
- /blog__hl_sectiondata.json
- /blog__hl_sectiondata2.json
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_b/data2.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hb_b/c__hl_d/data3.json
+ /blog/__hl_sectiondata.json
+ /blog/__hl_sectiondata2.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/b/data2.json
- R: blog/a/logo.png
- en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
- R: blog/b/c/d/data3.json
- en/pages/blog__hb_/b__hl_|f:blog/b.md
- en/sections/blog|f:blog/_index.md
+ en/pages/blog/__hb_b__hl_|f:blog/b.md
+ en/sections/blog/|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- P: blog/b.md
@@ -280,19 +280,19 @@
c.Assert(got, hqt.IsSameString, `
Tree 0:
- /__hb_/bundle__hl_
- /blog__hb_/a__hl_
- /blog__hb_/page__hl_
+ /__hb_bundle__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_page__hl_
Tree 1:
/
- /blog
+ /blog/
Tree 2:
- en/pages/__hb_/bundle__hl_|f:bundle/index.md
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
- en/pages/blog__hb_/page__hl_|f:blog/page.md
+ en/pages/__hb_bundle__hl_|f:bundle/index.md
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
+ en/pages/blog/__hb_page__hl_|f:blog/page.md
en/sections/
- P: bundle/index.md
- en/sections/blog
+ en/sections/blog/
- P: blog/a/index.md
- P: blog/page.md
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -133,22 +133,21 @@
// GetTerms gets the terms defined on this page in the given taxonomy.
func (p *pageState) GetTerms(taxonomy string) page.Pages {
- taxonomy = strings.ToLower(taxonomy)
+ if p.treeRef == nil {
+ return nil
+ }
+
m := p.s.pageMap
- prefix := cleanTreeKey(taxonomy)
- var self string
- if p.IsHome() {
- // TODO(bep) make this less magical, see taxonomyEntries.Insert.
- self = "/" + page.KindHome
- } else if p.treeRef != nil {
- self = p.treeRef.key
- }
+ taxonomy = strings.ToLower(taxonomy)
+ prefix := cleanSectionTreeKey(taxonomy)
+ self := strings.TrimPrefix(p.treeRef.key, "/")
var pas page.Pages
m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool {
- if _, found := m.taxonomyEntries.Get(s + self); found {
+ key := s + self
+ if _, found := m.taxonomyEntries.Get(key); found {
pas = append(pas, n.p)
}
return false
--- a/hugolib/page__tree.go
+++ b/hugolib/page__tree.go
@@ -58,7 +58,7 @@
return true, nil
}
- if strings.HasPrefix(ref2.key, ref1.key+"/") {
+ if strings.HasPrefix(ref2.key, ref1.key) {
return true, nil
}
@@ -109,7 +109,7 @@
return true, nil
}
- if strings.HasPrefix(ref1.key, ref2.key+"/") {
+ if strings.HasPrefix(ref1.key, ref2.key) {
return true, nil
}
@@ -123,9 +123,11 @@
return pt.p.s.home
}
key := ref.key
+
if !ref.isSection() {
key = path.Dir(key)
}
+
_, b := ref.m.getFirstSection(key)
if b == nil {
return nil
--- a/hugolib/pagecollections.go
+++ b/hugolib/pagecollections.go
@@ -169,18 +169,20 @@
func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) {
var n *contentNode
- s, v, found := c.pageMap.sections.LongestPrefix(ref)
+ pref := helpers.AddTrailingSlash(ref)
+ s, v, found := c.pageMap.sections.LongestPrefix(pref)
if found {
n = v.(*contentNode)
}
- if found && s == ref {
+ if found && s == pref {
// A section
return n, ""
}
m := c.pageMap
+
filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/")
langSuffix := "." + m.s.Lang()
@@ -224,9 +226,11 @@
func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) {
ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref)))
+
if ref == "" {
ref = "/"
}
+
inRef := ref
navUp := strings.HasPrefix(ref, "..")
var doSimpleLookup bool
@@ -275,9 +279,11 @@
}
// Check if it's a taxonomy node
- s, v, found := m.taxonomies.LongestPrefix(ref)
+ pref := helpers.AddTrailingSlash(ref)
+ s, v, found := m.taxonomies.LongestPrefix(pref)
+
if found {
- if !m.onSameLevel(ref, s) {
+ if !m.onSameLevel(pref, s) {
return nil, nil
}
return v.(*contentNode), nil
--- a/hugolib/pagecollections_test.go
+++ b/hugolib/pagecollections_test.go
@@ -211,6 +211,9 @@
writeSource(t, fs, filepath.Join("content", "sect3", "b1", "index.md"), pc("b1 bundle"))
writeSource(t, fs, filepath.Join("content", "sect3", "index", "index.md"), pc("index bundle"))
+ writeSource(t, fs, filepath.Join("content", "section_bundle_overlap", "_index.md"), pc("index overlap section"))
+ writeSource(t, fs, filepath.Join("content", "section_bundle_overlap_bundle", "index.md"), pc("index overlap bundle"))
+
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
sec3, err := s.getPageNew(nil, "/sect3")
@@ -282,6 +285,9 @@
// Bundle variants
{"Bundle regular", page.KindPage, nil, []string{"sect3/b1", "sect3/b1/index.md", "sect3/b1/index.en.md"}, "b1 bundle"},
{"Bundle index name", page.KindPage, nil, []string{"sect3/index/index.md", "sect3/index"}, "index bundle"},
+
+ // https://github.com/gohugoio/hugo/issues/7301
+ {"Section and bundle overlap", page.KindPage, nil, []string{"section_bundle_overlap_bundle"}, "index overlap bundle"},
}
for _, test := range tests {
--- a/hugolib/taxonomy_test.go
+++ b/hugolib/taxonomy_test.go
@@ -637,6 +637,7 @@
b.Assert(funny, qt.Not(qt.IsNil))
b.Assert(cat.Parent().IsHome(), qt.Equals, true)
+ b.Assert(funny.Kind(), qt.Equals, "taxonomy")
b.Assert(funny.Parent(), qt.Equals, cat)
b.AssertFileContent("public/index.html", `