ref: 0f6b334b6715253b030c4e783b88e911b6e53e56
parent: b78f13b0414fd5006237c0e7cdaa0f8cc8034bff
author: Sven Dowideit <SvenDowideit@home.org.au>
date: Wed Sep 9 06:03:38 EDT 2015
Source file based relative linking ala GitHub repository markdown for both md files and non-md files Signed-off-by: Sven Dowideit <SvenDowideit@home.org.au>
--- a/helpers/content.go
+++ b/helpers/content.go
@@ -43,27 +43,29 @@
// Blackfriday holds configuration values for Blackfriday rendering.
type Blackfriday struct {- Smartypants bool
- AngledQuotes bool
- Fractions bool
- HrefTargetBlank bool
- SmartDashes bool
- LatexDashes bool
- PlainIDAnchors bool
- Extensions []string
- ExtensionsMask []string
+ Smartypants bool
+ AngledQuotes bool
+ Fractions bool
+ HrefTargetBlank bool
+ SmartDashes bool
+ LatexDashes bool
+ PlainIDAnchors bool
+ SourceRelativeLinksEval bool
+ Extensions []string
+ ExtensionsMask []string
}
// NewBlackfriday creates a new Blackfriday filled with site config or some sane defaults
func NewBlackfriday() *Blackfriday { combinedParam := map[string]interface{}{- "smartypants": true,
- "angledQuotes": false,
- "fractions": true,
- "hrefTargetBlank": false,
- "smartDashes": true,
- "latexDashes": true,
- "plainIDAnchors": false,
+ "smartypants": true,
+ "angledQuotes": false,
+ "fractions": true,
+ "hrefTargetBlank": false,
+ "smartDashes": true,
+ "latexDashes": true,
+ "plainIDAnchors": false,
+ "sourceRelativeLinks": false,
}
siteParam := viper.GetStringMap("blackfriday")@@ -198,7 +200,9 @@
}
return &HugoHtmlRenderer{- blackfriday.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
+ FileResolver: ctx.FileResolver,
+ LinkResolver: ctx.LinkResolver,
+ Renderer: blackfriday.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
}
}
@@ -329,11 +333,13 @@
// RenderingContext holds contextual information, like content and configuration,
// for a given content renderin.g
type RenderingContext struct {- Content []byte
- PageFmt string
- DocumentID string
- Config *Blackfriday
- configInit sync.Once
+ Content []byte
+ PageFmt string
+ DocumentID string
+ Config *Blackfriday
+ FileResolver FileResolverFunc
+ LinkResolver LinkResolverFunc
+ configInit sync.Once
}
func (c *RenderingContext) getConfig() *Blackfriday {--- a/helpers/content_renderer.go
+++ b/helpers/content_renderer.go
@@ -19,12 +19,18 @@
"github.com/miekg/mmark"
"github.com/russross/blackfriday"
+ jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/viper"
)
+type LinkResolverFunc func(ref string) (string, error)
+type FileResolverFunc func(ref string) (string, error)
+
// Wraps a blackfriday.Renderer, typically a blackfriday.Html
// Enabling Hugo to customise the rendering experience
type HugoHtmlRenderer struct {+ FileResolver FileResolverFunc
+ LinkResolver LinkResolverFunc
blackfriday.Renderer
}
@@ -35,6 +41,33 @@
out.WriteString(Highlight(str, lang, opts))
} else {renderer.Renderer.BlockCode(out, text, lang)
+ }
+}
+
+func (renderer *HugoHtmlRenderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {+ if renderer.LinkResolver == nil || bytes.HasPrefix(link, []byte("{@{@HUGOSHORTCODE")) {+ // Use the blackfriday built in Link handler
+ renderer.Renderer.Link(out, link, title, content)
+ } else {+ newLink, err := renderer.LinkResolver(string(link))
+ if err != nil {+ newLink = string(link)
+ jww.ERROR.Printf("LinkResolver: %s", err)+ }
+ renderer.Renderer.Link(out, []byte(newLink), title, content)
+ }
+}
+func (renderer *HugoHtmlRenderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {+ if renderer.FileResolver == nil || bytes.HasPrefix(link, []byte("{@{@HUGOSHORTCODE")) {+ // Use the blackfriday built in Image handler
+ renderer.Renderer.Image(out, link, title, alt)
+ } else {+ newLink, err := renderer.FileResolver(string(link))
+ if err != nil {+ newLink = string(link)
+ jww.ERROR.Printf("FileResolver: %s", err)+ }
+ renderer.Renderer.Image(out, []byte(newLink), title, alt)
}
}
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -234,14 +234,34 @@
}
func (p *Page) renderBytes(content []byte) []byte {+ var fn helpers.LinkResolverFunc
+ var fileFn helpers.FileResolverFunc
+ if p.getRenderingConfig().SourceRelativeLinksEval {+ fn = func(ref string) (string, error) {+ return p.Node.Site.GitHub(ref, p)
+ }
+ fileFn = func(ref string) (string, error) {+ return p.Node.Site.GitHubFileLink(ref, p)
+ }
+ }
return helpers.RenderBytes(
&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),- DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
+ DocumentID: p.UniqueID(), Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
}
func (p *Page) renderContent(content []byte) []byte {+ var fn helpers.LinkResolverFunc
+ var fileFn helpers.FileResolverFunc
+ if p.getRenderingConfig().SourceRelativeLinksEval {+ fn = func(ref string) (string, error) {+ return p.Node.Site.GitHub(ref, p)
+ }
+ fileFn = func(ref string) (string, error) {+ return p.Node.Site.GitHubFileLink(ref, p)
+ }
+ }
return helpers.RenderBytesWithTOC(&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),- DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
+ DocumentID: p.UniqueID(), Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
}
func (p *Page) getRenderingConfig() *helpers.Blackfriday {--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -100,7 +100,7 @@
Social SiteSocial
Sections Taxonomy
Pages *Pages
- Files []*source.File
+ Files *[]*source.File
Menus *Menus
Hugo *HugoInfo
Title string
@@ -217,6 +217,166 @@
return s.refLink(ref, page, true)
}
+func (s *SiteInfo) GitHub(ref string, page *Page) (string, error) {+ return s.githubLink(ref, page, true)
+}
+
+func (s *SiteInfo) githubLink(ref string, currentPage *Page, relative bool) (string, error) {+ var refURL *url.URL
+ var err error
+
+ // TODO can I make this a param to `hugo --use-github-links=/docs`?
+ // SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
+ repositoryPathPrefix := "/docs"
+
+ refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
+ if err != nil {+ return "", err
+ }
+
+ if refURL.Scheme != "" {+ // TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
+ //return "", fmt.Errorf("Not a plain filepath link (%s)", ref)+ // Treat this as not an error, as the link is used as-is
+ return ref, nil
+ }
+
+ var target *Page
+ var link string
+
+ if refURL.Path != "" {+ refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
+
+ if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.+ refPath = refPath[1:]
+ } else {+ if currentPage != nil {+ refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
+ }
+ }
+
+ for _, page := range []*Page(*s.Pages) {+ if page.Source.Path() == refPath {+ target = page
+ break
+ }
+ }
+ // need to exhaust the test, then try with the others :/
+ // if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
+ mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
+ for _, page := range []*Page(*s.Pages) {+ if page.Source.Path() == mdPath {+ target = page
+ break
+ }
+ }
+ indexPath := filepath.Join(refPath, "index.md")
+ for _, page := range []*Page(*s.Pages) {+ if page.Source.Path() == indexPath {+ target = page
+ break
+ }
+ }
+
+ if target == nil {+ return "", fmt.Errorf("No page found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())+ }
+
+ // SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
+ if relative {+ link, err = target.RelPermalink()
+ } else {+ link, err = target.Permalink()
+ }
+
+ if err != nil {+ return "", err
+ }
+ }
+
+ // SVEN: add tests for github style relative fragments
+ if refURL.Fragment != "" {+ link = link + "#" + refURL.Fragment
+
+ if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {+ link = link + ":" + target.UniqueID()
+ } else if currentPage != nil && !currentPage.getRenderingConfig().PlainIDAnchors {+ link = link + ":" + currentPage.UniqueID()
+ }
+ }
+
+ return link, nil
+}
+
+func (s *SiteInfo) GitHubFileLink(ref string, page *Page) (string, error) {+ return s.githubFileLink(ref, page, false)
+}
+
+// for non-pages in the site tree
+func (s *SiteInfo) githubFileLink(ref string, currentPage *Page, relative bool) (string, error) {+ var refURL *url.URL
+ var err error
+
+ // TODO can I make this a param to `hugo --use-github-links=/docs`?
+ // SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
+ repositoryPathPrefix := "/docs"
+
+ refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
+ if err != nil {+ return "", err
+ }
+
+ if refURL.Scheme != "" {+ // TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
+ //return "", fmt.Errorf("Not a plain filepath link (%s)", ref)+ // Treat this as not an error, as the link is used as-is
+ return ref, nil
+ }
+
+ var target *source.File
+ var link string
+
+ if refURL.Path != "" {+ refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
+
+ if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.+ refPath = refPath[1:]
+ } else {+ if currentPage != nil {+ refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
+ }
+ }
+
+ for _, file := range []*source.File(*s.Files) {+ if file.Path() == refPath {+ target = file
+ break
+ }
+ }
+
+ if target == nil {+ return "", fmt.Errorf("No file found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())+ }
+
+ link = target.Path()
+ // SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
+ // SVEN: reconsider the fact I hardcoded the `relative` bool in both github resolvers
+ if relative {+ return "./" + filepath.ToSlash(link), nil
+ } else {+ return "/" + filepath.ToSlash(link), nil
+ }
+
+ if err != nil {+ return "", err
+ }
+
+ return link, nil
+ }
+
+ return "", fmt.Errorf("failed to find a file to match \"%s\" on page \"%s\"", ref, currentPage.Source.Path())+}
+
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {atomic.AddUint64(&s.paginationPageCount, cnt)
}
@@ -479,6 +639,7 @@
canonifyURLs: viper.GetBool("CanonifyURLs"), preserveTaxonomyNames: viper.GetBool("PreserveTaxonomyNames"),Pages: &s.Pages,
+ Files: &s.Files,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
@@ -1395,6 +1556,7 @@
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
jww.FEEDBACK.Printf("%d pages created\n", len(s.Pages))+ jww.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files)) jww.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount) taxonomies := viper.GetStringMapString("Taxonomies")--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -14,7 +14,6 @@
package hugolib
import (
- "bitbucket.org/pkg/inflect"
"bytes"
"fmt"
"html/template"
@@ -23,6 +22,8 @@
"strings"
"testing"
+ "bitbucket.org/pkg/inflect"
+
"github.com/spf13/afero"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
@@ -1026,26 +1027,30 @@
return nil
}
-func TestRefLinking(t *testing.T) {- viper.Reset()
- defer viper.Reset()
-
+func setupLinkingMockSite(t *testing.T) *Site {hugofs.DestinationFS = new(afero.MemMapFs)
sources := []source.ByteSource{ {filepath.FromSlash("index.md"), []byte("")}, {filepath.FromSlash("rootfile.md"), []byte("")},+ {filepath.FromSlash("root-image.png"), []byte("")}, {filepath.FromSlash("level2/2-root.md"), []byte("")}, {filepath.FromSlash("level2/index.md"), []byte("")}, {filepath.FromSlash("level2/common.md"), []byte("")},- {filepath.FromSlash("level2b/2b-root.md"), []byte("")},- {filepath.FromSlash("level2b/index.md"), []byte("")},- {filepath.FromSlash("level2b/common.md"), []byte("")},+// {filepath.FromSlash("level2b/2b-root.md"), []byte("")},+// {filepath.FromSlash("level2b/index.md"), []byte("")},+// {filepath.FromSlash("level2b/common.md"), []byte("")},+
+ {filepath.FromSlash("level2/2-image.png"), []byte("")},+ {filepath.FromSlash("level2/common.png"), []byte("")},+
{filepath.FromSlash("level2/level3/3-root.md"), []byte("")}, {filepath.FromSlash("level2/level3/index.md"), []byte("")}, {filepath.FromSlash("level2/level3/common.md"), []byte("")},+ {filepath.FromSlash("level2/level3/3-image.png"), []byte("")},+ {filepath.FromSlash("level2/level3/common.png"), []byte("")},}
site := &Site{@@ -1064,8 +1069,14 @@
viper.Set("PluralizeListTitles", false) viper.Set("CanonifyURLs", false)- // END init mock site
+ return site
+}
+func TestRefLinking(t *testing.T) {+ viper.Reset()
+ defer viper.Reset()
+ site := setupLinkingMockSite(t)
+
currentPage := findPage(site, "level2/level3/index.md")
if currentPage == nil { t.Fatalf("failed to find current page in site")@@ -1083,4 +1094,190 @@
}
}
// TODO: and then the failure cases.
+}
+
+func TestSourceRelativeLinksing(t *testing.T) {+ viper.Reset()
+ defer viper.Reset()
+ site := setupLinkingMockSite(t)
+
+ type resultMap map[string]string
+
+ okresults := map[string]resultMap{+ "index.md": map[string]string{+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "rootfile.md": "/rootfile/",
+ "index.md": "/",
+ "level2/2-root.md": "/level2/2-root/",
+ "level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "level2/level3/3-root.md": "/level2/level3/3-root/",
+ "level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/2-root/": "/level2/2-root/",
+ "/docs/level2/": "/level2/",
+ "/docs/level2/2-root": "/level2/2-root/",
+ "/docs/level2": "/level2/",
+ "/level2/2-root/": "/level2/2-root/",
+ "/level2/": "/level2/",
+ "/level2/2-root": "/level2/2-root/",
+ "/level2": "/level2/",
+ }, "rootfile.md": map[string]string{+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "rootfile.md": "/rootfile/",
+ "index.md": "/",
+ "level2/2-root.md": "/level2/2-root/",
+ "level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "level2/level3/3-root.md": "/level2/level3/3-root/",
+ "level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ }, "level2/2-root.md": map[string]string{+ "../rootfile.md": "/rootfile/",
+ "../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "2-root.md": "/level2/2-root/",
+ "index.md": "/level2/",
+ "../level2/2-root.md": "/level2/2-root/",
+ "../level2/index.md": "/level2/",
+ "./2-root.md": "/level2/2-root/",
+ "./index.md": "/level2/",
+ "/docs/level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "level3/3-root.md": "/level2/level3/3-root/",
+ "level3/index.md": "/level2/level3/",
+ "../level2/level3/index.md": "/level2/level3/",
+ "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ }, "level2/index.md": map[string]string{+ "../rootfile.md": "/rootfile/",
+ "../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "2-root.md": "/level2/2-root/",
+ "index.md": "/level2/",
+ "../level2/2-root.md": "/level2/2-root/",
+ "../level2/index.md": "/level2/",
+ "./2-root.md": "/level2/2-root/",
+ "./index.md": "/level2/",
+ "/docs/level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "level3/3-root.md": "/level2/level3/3-root/",
+ "level3/index.md": "/level2/level3/",
+ "../level2/level3/index.md": "/level2/level3/",
+ "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ }, "level2/level3/3-root.md": map[string]string{+ "../../rootfile.md": "/rootfile/",
+ "../../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "../2-root.md": "/level2/2-root/",
+ "../index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "3-root.md": "/level2/level3/3-root/",
+ "index.md": "/level2/level3/",
+ "./3-root.md": "/level2/level3/3-root/",
+ "./index.md": "/level2/level3/",
+ // "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ // "../level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ }, "level2/level3/index.md": map[string]string{+ "../../rootfile.md": "/rootfile/",
+ "../../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "../2-root.md": "/level2/2-root/",
+ "../index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "3-root.md": "/level2/level3/3-root/",
+ "index.md": "/level2/level3/",
+ "./3-root.md": "/level2/level3/3-root/",
+ "./index.md": "/level2/level3/",
+ // "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ // "../level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ },
+ }
+
+ for currentFile, results := range okresults {+ currentPage := findPage(site, currentFile)
+ if currentPage == nil {+ t.Fatalf("failed to find current page in site")+ }
+ for link, url := range results {+ if out, err := site.Info.githubLink(link, currentPage, true); err != nil || out != url {+ t.Errorf("Expected %s to resolve to (%s), got (%s) - error: %s", link, url, out, err)+ } else {+ //t.Logf("tested ok %s maps to %s", link, out)+ }
+ }
+ }
+ // TODO: and then the failure cases.
+ // "https://docker.com": "",
+ // site_test.go:1094: Expected https://docker.com to resolve to (), got () - error: Not a plain filepath link (https://docker.com)
+
+}
+
+func TestGitHubFileLinking(t *testing.T) {+ viper.Reset()
+ defer viper.Reset()
+ site := setupLinkingMockSite(t)
+
+ type resultMap map[string]string
+
+ okresults := map[string]resultMap{+ "index.md": map[string]string{+ "/root-image.png": "/root-image.png",
+ "root-image.png": "/root-image.png",
+ }, "rootfile.md": map[string]string{+ "/root-image.png": "/root-image.png",
+ }, "level2/2-root.md": map[string]string{+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/common.png",
+ }, "level2/index.md": map[string]string{+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/common.png",
+ "./common.png": "/level2/common.png",
+ }, "level2/level3/3-root.md": map[string]string{+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/level3/common.png",
+ "../common.png": "/level2/common.png",
+ }, "level2/level3/index.md": map[string]string{+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/level3/common.png",
+ "../common.png": "/level2/common.png",
+ },
+ }
+
+ for currentFile, results := range okresults {+ currentPage := findPage(site, currentFile)
+ if currentPage == nil {+ t.Fatalf("failed to find current page in site")+ }
+ for link, url := range results {+ if out, err := site.Info.githubFileLink(link, currentPage, false); err != nil || out != url {+ t.Errorf("Expected %s to resolve to (%s), got (%s) - error: %s", link, url, out, err)+ } else {+ //t.Logf("tested ok %s maps to %s", link, out)+ }
+ }
+ }
+ // TODO: and then the failure cases.
+ // "https://docker.com": "",
+ // site_test.go:1094: Expected https://docker.com to resolve to (), got () - error: Not a plain filepath link (https://docker.com)
+
}
--
⑨