ref: 3737c9bcb39527298744ca287d1bd5b1dd530f52
parent: c2f3cb2d7aef86597e9fb61aaa872308a591f8c2
author: Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
date: Mon Oct 31 15:53:33 EDT 2016
node to page: Handle taxonomy lists Updates #2297
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -464,6 +464,8 @@
continue
}
+ p.setNodeTypeVars(s)
+
// If we got this far it means that this is either a new Page pointer
// or a template or similar has changed so wee need to do a rerendering
// of the shortcodes etc.
@@ -572,7 +574,7 @@
}
}
-func (h *HugoSites) findPagesByNodeType(n NodeType) Pages {+func (h *HugoSites) findAllPagesByNodeType(n NodeType) Pages {var pages Pages
for _, p := range h.Sites[0].AllPages { if p.NodeType == n {--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -32,9 +32,13 @@
const (
NodePage NodeType = iota
+ // Temporary state.
+ NodeUnknown
+
// The rest are node types; home page, sections etc.
NodeHome
NodeSection
+ NodeTaxonomy
)
func (p NodeType) IsNode() bool {@@ -337,8 +341,14 @@
return helpers.FilePathSeparator + filepath.Join(n.Lang(), outfile)
}
+func sectionsFromFilename(filename string) []string {+ dir, _ := filepath.Split(filename)
+ return strings.Split(dir, helpers.FilePathSeparator)
+}
+
+// TODO(bep) np node identificator
func nodeTypeFromFilename(filename string) NodeType {- // TODO(bep) np
+
if !strings.Contains(filename, "_node") {return NodePage
}
@@ -347,5 +357,32 @@
return NodeHome
}
- return NodeSection
+ // We don't know enough yet to determine the type.
+ return NodeUnknown
+}
+
+func (p *Page) setNodeTypeVars(s *Site) {+ // TODO(bep) np taxonomies etc.
+ if p.NodeType == NodeUnknown {+ // This is either a taxonomy or a section
+ if s.isTaxonomy(p.Section()) {+ p.NodeType = NodeTaxonomy
+ } else {+ p.NodeType = NodeSection
+ }
+
+ }
+ // TODO(bep) np node URL
+ // Set Node URL
+ switch p.NodeType {+ case NodeHome:
+ p.URLPath.URL = ""
+ case NodeSection:
+ p.URLPath.URL = p.Section()
+ case NodeTaxonomy:
+ p.URLPath.URL = path.Join(p.sections...)
+ }
+
+ p.site = s
+
}
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -31,8 +31,8 @@
*/
func TestNodesAsPage(t *testing.T) {- //jww.SetStdoutThreshold(jww.LevelDebug)
- jww.SetStdoutThreshold(jww.LevelFatal)
+ jww.SetStdoutThreshold(jww.LevelDebug)
+ //jww.SetStdoutThreshold(jww.LevelFatal)
nodePageFeatureFlag = true
defer toggleNodePageFeatureFlag()
@@ -67,6 +67,12 @@
Section2 **Content!**
`)
+ writeSource(t, filepath.Join("content", "categories", "hugo", "_node.md"), `---+title: Taxonomy Hugo
+---
+Taxonomy Hugo **Content!**
+`)
+
writeSource(t, filepath.Join("layouts", "index.html"), ` Index Title: {{ .Title }} Index Content: {{ .Content }}@@ -90,6 +96,15 @@
{{ end }}`)
+ writeSource(t, filepath.Join("layouts", "_default", "taxonomy.html"), `+Taxonomy Title: {{ .Title }}+Taxonomy Content: {{ .Content }}+# Pages: {{ len .Data.Pages }}+{{ range .Paginator.Pages }}+ Pag: {{ .Title }}+{{ end }}+`)
+
// Add some regular pages
for i := 1; i <= 4; i++ {sect := "sect1"
@@ -120,7 +135,7 @@
assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")h := s.owner
- nodes := h.findPagesByNodeType(NodeHome)
+ nodes := h.findAllPagesByNodeType(NodeHome)
require.Len(t, nodes, 1)
home := nodes[0]
@@ -129,7 +144,7 @@
require.True(t, home.IsNode())
require.False(t, home.IsPage())
- pages := h.findPagesByNodeType(NodePage)
+ pages := h.findAllPagesByNodeType(NodePage)
require.Len(t, pages, 4)
first := pages[0]
@@ -151,7 +166,16 @@
assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,"Pag: Page 02")
- sections := h.findPagesByNodeType(NodeSection)
+ sections := h.findAllPagesByNodeType(NodeSection)
require.Len(t, sections, 2)
+
+ // Check taxonomy list
+ assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,+ "Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>")
+
+ // Check taxonomy list paginator
+ assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,+ "Taxonomy Title: Taxonomy Hugo",
+ "Pag: Page 02")
}
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -98,6 +98,21 @@
Node
GitInfo *gitmap.GitInfo
+
+ // This was added as part of getting the Nodes (taxonomies etc.) to work as
+ // Pages in Hugo 0.18.
+ // It is deliberately named similar to Section, but not exported (for now).
+ // We currently have only one level of section in Hugo, but the page can live
+ // any number of levels down the file path.
+ // To support taxonomies like /categories/hugo etc. we will need to keep track
+ // of that information in a general way.
+ // So, sections represents the path to the content, i.e. a content file or a
+ // virtual content file in the situations where a taxonomy or a section etc.
+ // isn't accomanied by one.
+ sections []string
+
+ // TODO(bep) np Site added to page, keep?
+ site *Site
}
type Source struct {@@ -418,6 +433,7 @@
Node: Node{NodeType: nodeTypeFromFilename(filename), Keywords: []string{}, Sitemap: Sitemap{Priority: -1}}, Params: make(map[string]interface{}),translations: make(Pages, 0),
+ sections: sectionsFromFilename(filename),
}
jww.DEBUG.Println("Reading from", page.File.Path())@@ -449,7 +465,7 @@
return p.layoutsCalculated
}
- // TODO(bep) np
+ // TODO(bep) np taxonomy etc.
switch p.NodeType {case NodeHome:
return []string{"index.html", "_default/list.html"}@@ -456,8 +472,13 @@
case NodeSection:
section := p.Section()
return []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}+ case NodeTaxonomy:
+ singular := p.site.taxonomiesPluralSingular[p.sections[0]]
+ return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"}}
+ // Regular Page handled below
+
if p.Layout != "" {return layouts(p.Type(), p.Layout)
}
@@ -862,15 +883,6 @@
}
}
- // TODO(bep) np node URL
- // Set Node URL
- switch p.NodeType {- case NodeHome:
- p.URLPath.URL = ""
- case NodeSection:
- p.URLPath.URL = p.Section()
- }
-
return nil
}
@@ -1153,6 +1165,8 @@
return "index.html"
case NodeSection:
return filepath.Join(p.Section(), "index.html")
+ case NodeTaxonomy:
+ return filepath.Join(append(p.sections, "index.html")...)
}
// Always use URL if it's specified
@@ -1214,6 +1228,7 @@
return nil
}
+// TODO(bep) np naming, move some
func (p *Page) prepareData(s *Site) error { p.Data = make(map[string]interface{}) switch p.NodeType {@@ -1220,8 +1235,7 @@
case NodePage:
case NodeHome:
// TODO(bep) np cache the below
- // TODO(bep) np
- p.Data["Pages"] = s.owner.findPagesByNodeType(NodePage)
+ p.Data["Pages"] = s.owner.findAllPagesByNodeType(NodePage)
case NodeSection:
sectionData, ok := s.Sections[p.Section()]
if !ok {@@ -1228,50 +1242,20 @@
return fmt.Errorf("Data for section %s not found", p.Section())}
p.Data["Pages"] = sectionData
- }
+ case NodeTaxonomy:
+ plural := p.sections[0]
+ term := p.sections[1]
- return nil
-}
+ singular := s.taxonomiesPluralSingular[plural]
+ taxonomy := s.Taxonomies[plural].Get(term)
-// renderPaginator must be run after the owning Page has been rendered.
-// TODO(bep) np
-func (p *Page) renderPaginator(s *Site) error {- if p.paginator != nil {- paginatePath := helpers.Config().GetString("paginatePath")+ p.Data[singular] = taxonomy
+ p.Data["Singular"] = singular
+ p.Data["Plural"] = plural
+ p.Data["Pages"] = taxonomy.Pages()
- // write alias for page 1
- // TODO(bep) ml all of these n.addLang ... fix.
- //permaLink, _ := p.Permalink()
- // TODO(bep) np fix
- //s.writeDestAlias(p.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), permaLink, nil)-
- pagers := p.paginator.Pagers()
-
- for i, pager := range pagers {- if i == 0 {- // already created
- continue
- }
-
- pagerNode := p.copy()
-
- pagerNode.paginator = pager
- if pager.TotalPages() > 0 {- first, _ := pager.page(0)
- pagerNode.Date = first.Date
- pagerNode.Lastmod = first.Lastmod
- }
-
- pageNumber := i + 1
- htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))- htmlBase = p.addLangPathPrefix(htmlBase)
- if err := s.renderAndWritePage(pagerNode.Title,
- filepath.FromSlash(htmlBase), pagerNode, p.layouts()...); err != nil {- return err
- }
-
- }
}
+
return nil
}
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -91,11 +91,16 @@
nodeCache *nodeCache
nodeCacheInit sync.Once
- Pages Pages
- AllPages Pages
- rawAllPages Pages
- Files []*source.File
- Taxonomies TaxonomyList
+ Pages Pages
+ AllPages Pages
+ rawAllPages Pages
+ Files []*source.File
+ Taxonomies TaxonomyList
+
+ // Plural is what we get in the folder, so keep track of this mapping
+ // to get the singular form from that value.
+ taxonomiesPluralSingular map[string]string
+
Source source.Input
Sections Taxonomy
Info SiteInfo
@@ -1514,12 +1519,14 @@
func (s *Site) assembleTaxonomies() {s.Taxonomies = make(TaxonomyList)
+ s.taxonomiesPluralSingular = make(map[string]string)
taxonomies := s.Language.GetStringMapString("Taxonomies") jww.INFO.Printf("found taxonomies: %#v\n", taxonomies)- for _, plural := range taxonomies {+ for singular, plural := range taxonomies {s.Taxonomies[plural] = make(Taxonomy)
+ s.taxonomiesPluralSingular[plural] = singular
for _, p := range s.Pages {vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
weight := p.GetParam(plural + "_weight")
@@ -1569,9 +1576,13 @@
func (s *Site) assembleSections() {s.Sections = make(Taxonomy)
s.Info.Sections = s.Sections
-
- for i, p := range s.Pages {- s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, s.Pages[i]}, s.Info.preserveTaxonomyNames)+ regularPages := s.findPagesByNodeType(NodePage)
+ for i, p := range regularPages {+ section := p.Section()
+ if s.isTaxonomy(section) {+ continue
+ }
+ s.Sections.add(section, WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)}
for k := range s.Sections {@@ -1588,6 +1599,23 @@
}
}
+func (s *Site) isTaxonomy(section string) bool {+ if _, isTaxonomy := s.Taxonomies[section]; isTaxonomy {+ return true
+ }
+ return false
+}
+
+func (s *Site) findPagesByNodeType(n NodeType) Pages {+ var pages Pages
+ for _, p := range s.Pages {+ if p.NodeType == n {+ pages = append(pages, p)
+ }
+ }
+ return pages
+}
+
// renderAliases renders shell pages that simply have a redirect in the header.
func (s *Site) renderAliases() error { for _, p := range s.Pages {@@ -1645,59 +1673,6 @@
return nil
}
-// renderPages renders pages each corresponding to a markdown file.
-func (s *Site) renderPages() error {-
- results := make(chan error)
- pages := make(chan *Page)
- errs := make(chan error)
-
- go errorCollator(results, errs)
-
- procs := getGoMaxProcs()
-
- wg := &sync.WaitGroup{}-
- for i := 0; i < procs*4; i++ {- wg.Add(1)
- go pageRenderer(s, pages, results, wg)
- }
-
- for _, page := range s.Pages {- pages <- page
- }
-
- close(pages)
-
- wg.Wait()
-
- close(results)
-
- err := <-errs
- if err != nil {- return fmt.Errorf("Error(s) rendering pages: %s", err)- }
- return nil
-}
-
-func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {- defer wg.Done()
- for p := range pages {- targetPath := p.TargetPath()
- layouts := p.layouts()
- jww.DEBUG.Printf("Render Page to %q with layouts %q", targetPath, layouts)- if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil {- results <- err
- }
-
- if p.NodeType.IsNode() {- if err := p.renderPaginator(s); err != nil {- results <- err
- }
- }
- }
-}
-
func errorCollator(results <-chan error, errs chan<- error) { errMsgs := []string{} for err := range results {@@ -1753,6 +1728,9 @@
// renderTaxonomiesLists renders the listing pages based on the meta data
// each unique term within a taxonomy will have a page created
func (s *Site) renderTaxonomiesLists(prepare bool) error {+ if nodePageFeatureFlag {+ return nil
+ }
wg := &sync.WaitGroup{}taxes := make(chan taxRenderInfo)
--- /dev/null
+++ b/hugolib/site_render.go
@@ -1,0 +1,122 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "sync"
+
+ "github.com/spf13/hugo/helpers"
+
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+// renderPages renders pages each corresponding to a markdown file.
+// TODO(bep np doc
+func (s *Site) renderPages() error {+
+ results := make(chan error)
+ pages := make(chan *Page)
+ errs := make(chan error)
+
+ go errorCollator(results, errs)
+
+ procs := getGoMaxProcs()
+
+ wg := &sync.WaitGroup{}+
+ for i := 0; i < procs*4; i++ {+ wg.Add(1)
+ go pageRenderer(s, pages, results, wg)
+ }
+
+ for _, page := range s.Pages {+ pages <- page
+ }
+
+ close(pages)
+
+ wg.Wait()
+
+ close(results)
+
+ err := <-errs
+ if err != nil {+ return fmt.Errorf("Error(s) rendering pages: %s", err)+ }
+ return nil
+}
+
+func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {+ defer wg.Done()
+ for p := range pages {+ targetPath := p.TargetPath()
+ layouts := p.layouts()
+ jww.DEBUG.Printf("Render Page to %q with layouts %q", targetPath, layouts)+ if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil {+ results <- err
+ }
+
+ if p.NodeType.IsNode() {+ if err := s.renderPaginator(p); err != nil {+ results <- err
+ }
+ }
+ }
+}
+
+// renderPaginator must be run after the owning Page has been rendered.
+// TODO(bep) np
+func (s *Site) renderPaginator(p *Page) error {+ if p.paginator != nil {+ jww.DEBUG.Printf("Render paginator for page %q", p.Path())+ paginatePath := helpers.Config().GetString("paginatePath")+
+ // write alias for page 1
+ // TODO(bep) ml all of these n.addLang ... fix.
+ //permaLink, _ := p.Permalink()
+ // TODO(bep) np fix
+ //s.writeDestAlias(p.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), permaLink, nil)+
+ pagers := p.paginator.Pagers()
+
+ for i, pager := range pagers {+ if i == 0 {+ // already created
+ continue
+ }
+
+ pagerNode := p.copy()
+
+ pagerNode.paginator = pager
+ if pager.TotalPages() > 0 {+ first, _ := pager.page(0)
+ pagerNode.Date = first.Date
+ pagerNode.Lastmod = first.Lastmod
+ }
+
+ pageNumber := i + 1
+ htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))+ htmlBase = p.addLangPathPrefix(htmlBase)
+ if err := s.renderAndWritePage(pagerNode.Title,
+ filepath.FromSlash(htmlBase), pagerNode, p.layouts()...); err != nil {+ return err
+ }
+
+ }
+ }
+ return nil
+}
--
⑨