diff options
author | Anthony Fok <[email protected]> | 2015-03-17 22:16:54 -0700 |
---|---|---|
committer | bep <[email protected]> | 2015-03-18 11:30:37 +0100 |
commit | 8b8fb417ae065954a056ea018bd092748cc9127c (patch) | |
tree | a9160d1f6acc218bcb8728d729deaf1b532b56b3 | |
parent | ca69cad8aa5d8eefbce13abd97b2b979339a458c (diff) | |
download | hugo-8b8fb417ae065954a056ea018bd092748cc9127c.tar.gz hugo-8b8fb417ae065954a056ea018bd092748cc9127c.zip |
More initialism corrections (golint)
Thanks to @bep's new, brilliant helpers.Deprecated() function,
the following functions or variables are transitioned to their
new names, preserving backward compatibility for v0.14
and warning the user of upcoming obsolescence in v0.15:
* .Url → .URL (for node, menu and paginator)
* .Site.BaseUrl → .Site.BaseURL
* .Site.Indexes → .Site.Taxonomies
* .Site.Recent → .Site.Pages
* getJson → getJSON
* getCsv → getCSV
* safeHtml → safeHTML
* safeCss → safeCSS
* safeUrl → safeURL
Also fix related initialisms in strings and comments.
Continued effort in fixing #959.
-rw-r--r-- | commands/server.go | 4 | ||||
-rw-r--r-- | helpers/path.go | 2 | ||||
-rw-r--r-- | helpers/url.go | 14 | ||||
-rw-r--r-- | helpers/url_test.go | 6 | ||||
-rw-r--r-- | hugolib/menu.go | 21 | ||||
-rw-r--r-- | hugolib/menu_test.go | 20 | ||||
-rw-r--r-- | hugolib/node.go | 17 | ||||
-rw-r--r-- | hugolib/page.go | 24 | ||||
-rw-r--r-- | hugolib/page_permalink_test.go | 8 | ||||
-rw-r--r-- | hugolib/pagination.go | 14 | ||||
-rw-r--r-- | hugolib/pagination_test.go | 12 | ||||
-rw-r--r-- | hugolib/permalinks.go | 4 | ||||
-rw-r--r-- | hugolib/site.go | 62 | ||||
-rw-r--r-- | target/page_test.go | 2 | ||||
-rw-r--r-- | tpl/template.go | 33 | ||||
-rw-r--r-- | tpl/template_embedded.go | 24 | ||||
-rw-r--r-- | tpl/template_resources.go | 16 | ||||
-rw-r--r-- | tpl/template_test.go | 16 | ||||
-rw-r--r-- | transform/absurl.go | 10 | ||||
-rw-r--r-- | transform/absurlreplacer.go | 4 |
20 files changed, 184 insertions, 129 deletions
diff --git a/commands/server.go b/commands/server.go index 29900f9ce..372a67c6b 100644 --- a/commands/server.go +++ b/commands/server.go @@ -135,7 +135,7 @@ func serve(port int) { } } -// fixUrl massages the BaseUrl into a form needed for serving +// fixURL massages the BaseURL into a form needed for serving // all pages correctly. func fixURL(s string) (string, error) { useLocalhost := false @@ -164,7 +164,7 @@ func fixURL(s string) (string, error) { if strings.Contains(host, ":") { host, _, err = net.SplitHostPort(u.Host) if err != nil { - return "", fmt.Errorf("Failed to split BaseUrl hostpost: %s", err) + return "", fmt.Errorf("Failed to split BaseURL hostpost: %s", err) } } u.Host = fmt.Sprintf("%s:%d", host, serverPort) diff --git a/helpers/path.go b/helpers/path.go index 973554664..91419e0c9 100644 --- a/helpers/path.go +++ b/helpers/path.go @@ -330,7 +330,7 @@ func PathPrep(ugly bool, in string) string { return PrettifyPath(in) } -// Same as PrettifyUrlPath() but for file paths. +// Same as PrettifyURLPath() but for file paths. // /section/name.html becomes /section/name/index.html // /section/name/ becomes /section/name/index.html // /section/name/index.html becomes /section/name/index.html diff --git a/helpers/url.go b/helpers/url.go index b9b58d050..13421632f 100644 --- a/helpers/url.go +++ b/helpers/url.go @@ -69,7 +69,7 @@ func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string { // in issues #157, #622, etc., without forcing // relative URLs to begin with '/'. // Once the fixes are in, let's remove this kludge - // and restore SanitizeUrl() to the way it was. + // and restore SanitizeURL() to the way it was. // -- @anthonyfok, 2015-02-16 // // Begin temporary kludge @@ -87,12 +87,12 @@ func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string { } -// SanitizeUrl sanitizes the input URL string. +// SanitizeURL sanitizes the input URL string. func SanitizeURL(in string) string { return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator) } -// SanitizeUrlKeepTrailingSlash is the same as SanitizeUrl, but will keep any trailing slash. +// SanitizeURLKeepTrailingSlash is the same as SanitizeURL, but will keep any trailing slash. func SanitizeURLKeepTrailingSlash(in string) string { return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator) } @@ -147,7 +147,7 @@ func MakePermalink(host, plink string) *url.URL { // AddContextRoot adds the context root to an URL if it's not already set. // For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite), -// relative URLs must not include the context root if canonifyUrls is enabled. But if it's disabled, it must be set. +// relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set. func AddContextRoot(baseURL, relativePath string) string { url, err := url.Parse(baseURL) @@ -185,7 +185,7 @@ func URLPrep(ugly bool, in string) string { return url } -// PrettifyUrl takes a URL string and returns a semantic, clean URL. +// PrettifyURL takes a URL string and returns a semantic, clean URL. func PrettifyURL(in string) string { x := PrettifyURLPath(in) @@ -200,7 +200,7 @@ func PrettifyURL(in string) string { return x } -// PrettifyUrlPath takes a URL path to a content and converts it +// PrettifyURLPath takes a URL path to a content and converts it // to enable pretty URLs. // /section/name.html becomes /section/name/index.html // /section/name/ becomes /section/name/index.html @@ -209,7 +209,7 @@ func PrettifyURLPath(in string) string { return PrettiyPath(in, pathBridge) } -// Uglify does the opposite of PrettifyUrlPath(). +// Uglify does the opposite of PrettifyURLPath(). // /section/name/index.html becomes /section/name.html // /section/name/ becomes /section/name.html // /section/name.html becomes /section/name.html diff --git a/helpers/url_test.go b/helpers/url_test.go index 35ade94e5..b11be3521 100644 --- a/helpers/url_test.go +++ b/helpers/url_test.go @@ -6,7 +6,7 @@ import ( "testing" ) -func TestUrlize(t *testing.T) { +func TestURLize(t *testing.T) { tests := []struct { input string expected string @@ -26,7 +26,7 @@ func TestUrlize(t *testing.T) { } } -func TestSanitizeUrl(t *testing.T) { +func TestSanitizeURL(t *testing.T) { tests := []struct { input string expected string @@ -76,7 +76,7 @@ func TestMakePermalink(t *testing.T) { } } -func TestUrlPrep(t *testing.T) { +func TestURLPrep(t *testing.T) { type test struct { ugly bool input string diff --git a/hugolib/menu.go b/hugolib/menu.go index 4fdf315f0..95370062b 100644 --- a/hugolib/menu.go +++ b/hugolib/menu.go @@ -19,10 +19,11 @@ import ( "strings" "github.com/spf13/cast" + "github.com/spf13/hugo/helpers" ) type MenuEntry struct { - Url string + URL string Name string Menu string Identifier string @@ -37,6 +38,12 @@ type Menu []*MenuEntry type Menus map[string]*Menu type PageMenus map[string]*MenuEntry +// Url is deprecated. Will be removed in 0.15. +func (me *MenuEntry) Url() string { + helpers.Deprecated("MenuEntry", ".Url", ".URL") + return me.URL +} + func (me *MenuEntry) AddChild(child *MenuEntry) { me.Children = append(me.Children, child) me.Children.Sort() @@ -53,22 +60,22 @@ func (me *MenuEntry) KeyName() string { return me.Name } -func (me *MenuEntry) hopefullyUniqueId() string { +func (me *MenuEntry) hopefullyUniqueID() string { if me.Identifier != "" { return me.Identifier - } else if me.Url != "" { - return me.Url + } else if me.URL != "" { + return me.URL } else { return me.Name } } func (me *MenuEntry) IsEqual(inme *MenuEntry) bool { - return me.hopefullyUniqueId() == inme.hopefullyUniqueId() && me.Parent == inme.Parent + return me.hopefullyUniqueID() == inme.hopefullyUniqueID() && me.Parent == inme.Parent } func (me *MenuEntry) IsSameResource(inme *MenuEntry) bool { - return me.Url != "" && inme.Url != "" && me.Url == inme.Url + return me.URL != "" && inme.URL != "" && me.URL == inme.URL } func (me *MenuEntry) MarshallMap(ime map[string]interface{}) { @@ -76,7 +83,7 @@ func (me *MenuEntry) MarshallMap(ime map[string]interface{}) { loki := strings.ToLower(k) switch loki { case "url": - me.Url = cast.ToString(v) + me.URL = cast.ToString(v) case "weight": me.Weight = cast.ToInt(v) case "name": diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go index bea5a84e4..b829dd2ea 100644 --- a/hugolib/menu_test.go +++ b/hugolib/menu_test.go @@ -180,8 +180,8 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou assert.NotNil(t, me1) assert.NotNil(t, me2) - assert.True(t, strings.Contains(me1.Url, "doc1")) - assert.True(t, strings.Contains(me2.Url, "doc2")) + assert.True(t, strings.Contains(me1.URL, "doc1")) + assert.True(t, strings.Contains(me2.URL, "doc2")) } @@ -216,8 +216,8 @@ func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.Byte assert.NotNil(t, me1) assert.NotNil(t, me2) - assert.True(t, strings.Contains(me1.Url, "doc1")) - assert.True(t, strings.Contains(me2.Url, "doc2")) + assert.True(t, strings.Contains(me1.URL, "doc1")) + assert.True(t, strings.Contains(me2.URL, "doc2")) } @@ -275,7 +275,7 @@ func TestMenuWithHashInURL(t *testing.T) { assert.NotNil(t, me) - assert.Equal(t, "/Zoo/resource/#anchor", me.Url) + assert.Equal(t, "/Zoo/resource/#anchor", me.URL) } // issue #719 @@ -309,7 +309,7 @@ func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs, uglyURLs bool) { expected = expectedBase + "/" } - assert.Equal(t, expected, unicodeRussian.Url, "uglyURLs[%t]", uglyURLs) + assert.Equal(t, expected, unicodeRussian.URL, "uglyURLs[%t]", uglyURLs) } func TestTaxonomyNodeMenu(t *testing.T) { @@ -329,7 +329,7 @@ func TestTaxonomyNodeMenu(t *testing.T) { {"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"}, ts.findTestMenuEntryByID("tax", "2"), true, false}, {"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"}, - &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false}, + &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false}, } { n, _ := ts.site.newTaxonomyNode(this.taxInfo) @@ -349,7 +349,7 @@ func TestTaxonomyNodeMenu(t *testing.T) { menuEntryXML := ts.findTestMenuEntryByID("tax", "xml") - if strings.HasSuffix(menuEntryXML.Url, "/") { + if strings.HasSuffix(menuEntryXML.URL, "/") { t.Error("RSS menu item should not be padded with trailing slash") } } @@ -359,7 +359,7 @@ func TestHomeNodeMenu(t *testing.T) { defer resetMenuTestState(ts) home := ts.site.newHomeNode() - homeMenuEntry := &MenuEntry{Name: home.Title, Url: home.Url} + homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL} for i, this := range []struct { menu string @@ -369,7 +369,7 @@ func TestHomeNodeMenu(t *testing.T) { }{ {"main", homeMenuEntry, true, false}, {"doesnotexist", homeMenuEntry, false, false}, - {"main", &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false}, + {"main", &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false}, {"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false}, {"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true}, {"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false}, diff --git a/hugolib/node.go b/hugolib/node.go index aa9a46d99..d13a2626e 100644 --- a/hugolib/node.go +++ b/hugolib/node.go @@ -17,6 +17,7 @@ import ( "html/template" "sync" "time" + "github.com/spf13/hugo/helpers" ) type Node struct { @@ -30,7 +31,7 @@ type Node struct { Params map[string]interface{} Date time.Time Sitemap Sitemap - UrlPath + URLPath paginator *pager paginatorInit sync.Once scratch *Scratch @@ -42,7 +43,7 @@ func (n *Node) Now() time.Time { func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool { if inme.HasChildren() { - me := MenuEntry{Name: n.Title, Url: n.Url} + me := MenuEntry{Name: n.Title, URL: n.URL} for _, child := range inme.Children { if me.IsSameResource(child) { @@ -56,7 +57,7 @@ func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool { func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool { - me := MenuEntry{Name: n.Title, Url: n.Url} + me := MenuEntry{Name: n.Title, URL: n.URL} if !me.IsSameResource(inme) { return false } @@ -119,13 +120,19 @@ func (n *Node) RelRef(ref string) (string, error) { return n.Site.RelRef(ref, nil) } -type UrlPath struct { - Url string +type URLPath struct { + URL string Permalink template.HTML Slug string Section string } +// Url is deprecated. Will be removed in 0.15. +func (n *Node) Url() string { + helpers.Deprecated("Node", ".Url", ".URL") + return n.URL +} + // Scratch returns the writable context associated with this Node. func (n *Node) Scratch() *Scratch { if n.scratch == nil { diff --git a/hugolib/page.go b/hugolib/page.go index 9aa301037..76ac4f51b 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -341,10 +341,10 @@ func (p *Page) analyzePage() { } func (p *Page) permalink() (*url.URL, error) { - baseURL := string(p.Site.BaseUrl) + baseURL := string(p.Site.BaseURL) dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir())) pSlug := strings.TrimSpace(p.Slug) - pURL := strings.TrimSpace(p.Url) + pURL := strings.TrimSpace(p.URL) var permalink string var err error @@ -420,9 +420,9 @@ func (p *Page) RelPermalink() (string, error) { } if viper.GetBool("CanonifyURLs") { - // replacements for relpermalink with baseUrl on the form http://myhost.com/sub/ will fail later on - // have to return the Url relative from baseUrl - relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseUrl)) + // replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on + // have to return the URL relative from baseURL + relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL)) if err != nil { return "", err } @@ -455,9 +455,9 @@ func (p *Page) update(f interface{}) error { p.Slug = helpers.URLize(cast.ToString(v)) case "url": if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") { - return fmt.Errorf("Only relative urls are supported, %v provided", url) + return fmt.Errorf("Only relative URLs are supported, %v provided", url) } - p.Url = helpers.URLize(cast.ToString(v)) + p.URL = helpers.URLize(cast.ToString(v)) case "type": p.contentType = cast.ToString(v) case "extension", "ext": @@ -588,7 +588,7 @@ func (p *Page) Menus() PageMenus { if ms, ok := p.Params["menu"]; ok { link, _ := p.RelPermalink() - me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, Url: link} + me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, URL: link} // Could be the name of the menu to attach it to mname, err := cast.ToStringE(ms) @@ -618,7 +618,7 @@ func (p *Page) Menus() PageMenus { } for name, menu := range menus { - menuEntry := MenuEntry{Name: p.LinkTitle(), Url: link, Weight: p.Weight, Menu: name} + menuEntry := MenuEntry{Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name} jww.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title) ime, err := cast.ToStringMapE(menu) @@ -785,9 +785,9 @@ func (p *Page) FullFilePath() string { func (p *Page) TargetPath() (outfile string) { - // Always use Url if it's specified - if len(strings.TrimSpace(p.Url)) > 2 { - outfile = strings.TrimSpace(p.Url) + // Always use URL if it's specified + if len(strings.TrimSpace(p.URL)) > 2 { + outfile = strings.TrimSpace(p.URL) if strings.HasSuffix(outfile, "/") { outfile = outfile + "index.html" diff --git a/hugolib/page_permalink_test.go b/hugolib/page_permalink_test.go index dc4dc8371..1ed232348 100644 --- a/hugolib/page_permalink_test.go +++ b/hugolib/page_permalink_test.go @@ -35,7 +35,7 @@ func TestPermalink(t *testing.T) { {"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"}, {"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"}, - // test url overrides + // test URL overrides {"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"}, } @@ -46,12 +46,12 @@ func TestPermalink(t *testing.T) { viper.Set("canonifyurls", test.canonifyURLs) p := &Page{ Node: Node{ - UrlPath: UrlPath{ + URLPath: URLPath{ Section: "z", - Url: test.url, + URL: test.url, }, Site: &SiteInfo{ - BaseUrl: test.base, + BaseURL: test.base, }, }, Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))}, diff --git a/hugolib/pagination.go b/hugolib/pagination.go index b56e64f7f..5401aaa66 100644 --- a/hugolib/pagination.go +++ b/hugolib/pagination.go @@ -47,11 +47,17 @@ func (p *pager) PageNumber() int { return p.number } -// Url returns the url to the current page. -func (p *pager) Url() template.HTML { +// URL returns the URL to the current page. +func (p *pager) URL() template.HTML { return template.HTML(p.paginationURLFactory(p.PageNumber())) } +// Url is deprecated. Will be removed in 0.15. +func (p *pager) Url() template.HTML { + helpers.Deprecated("Paginator", ".Url", ".URL") + return p.URL() +} + // Pages returns the elements on this page. func (p *pager) Pages() Pages { if len(p.paginatedPages) == 0 { @@ -142,7 +148,7 @@ func (n *Node) Paginator() (*pager, error) { return } - pagers, err := paginatePages(n.Data["Pages"], n.Url) + pagers, err := paginatePages(n.Data["Pages"], n.URL) if err != nil { initError = err @@ -184,7 +190,7 @@ func (n *Node) Paginate(seq interface{}) (*pager, error) { if n.paginator != nil { return } - pagers, err := paginatePages(seq, n.Url) + pagers, err := paginatePages(seq, n.URL) if err != nil { initError = err diff --git a/hugolib/pagination_test.go b/hugolib/pagination_test.go index 7d3b2ae72..b41cf4c35 100644 --- a/hugolib/pagination_test.go +++ b/hugolib/pagination_test.go @@ -43,7 +43,7 @@ func TestPager(t *testing.T) { assert.Equal(t, 5, paginator.TotalPages()) first := paginatorPages[0] - assert.Equal(t, "page/1/", first.Url()) + assert.Equal(t, "page/1/", first.URL()) assert.Equal(t, first, first.First()) assert.True(t, first.HasNext()) assert.Equal(t, paginatorPages[1], first.Next()) @@ -58,7 +58,7 @@ func TestPager(t *testing.T) { assert.Equal(t, paginatorPages[1], third.Prev()) last := paginatorPages[4] - assert.Equal(t, "page/5/", last.Url()) + assert.Equal(t, "page/5/", last.URL()) assert.Equal(t, last, last.Last()) assert.False(t, last.HasNext()) assert.Nil(t, last.Next()) @@ -97,7 +97,7 @@ func TestPagerNoPages(t *testing.T) { } -func TestPaginationUrlFactory(t *testing.T) { +func TestPaginationURLFactory(t *testing.T) { viper.Set("PaginatePath", "zoo") unicode := newPaginationURLFactory("новости проекта") fooBar := newPaginationURLFactory("foo", "bar") @@ -197,12 +197,12 @@ func createTestPages(num int) Pages { for i := 0; i < num; i++ { pages[i] = &Page{ Node: Node{ - UrlPath: UrlPath{ + URLPath: URLPath{ Section: "z", - Url: fmt.Sprintf("http://base/x/y/p%d.html", num), + URL: fmt.Sprintf("http://base/x/y/p%d.html", num), }, Site: &SiteInfo{ - BaseUrl: "http://base/", + BaseURL: "http://base/", }, }, Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))}, diff --git a/hugolib/permalinks.go b/hugolib/permalinks.go index fdae2b856..325fbfe4b 100644 --- a/hugolib/permalinks.go +++ b/hugolib/permalinks.go @@ -138,7 +138,7 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) { // pageToPermalinkTitle returns the URL-safe form of the title func pageToPermalinkTitle(p *Page, _ string) (string, error) { // Page contains Node which has Title - // (also contains UrlPath which has Slug, sometimes) + // (also contains URLPath which has Slug, sometimes) return helpers.URLize(p.Title), nil } @@ -166,7 +166,7 @@ func pageToPermalinkSlugElseTitle(p *Page, a string) (string, error) { } func pageToPermalinkSection(p *Page, _ string) (string, error) { - // Page contains Node contains UrlPath which has Section + // Page contains Node contains URLPath which has Section return p.Section(), nil } diff --git a/hugolib/site.go b/hugolib/site.go index 39fe888b5..e1f0fe991 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -93,15 +93,13 @@ type targetList struct { } type SiteInfo struct { - BaseUrl template.URL + BaseURL template.URL Taxonomies TaxonomyList Authors AuthorList Social SiteSocial - Indexes *TaxonomyList // legacy, should be identical to Taxonomies Sections Taxonomy Pages *Pages Files []*source.File - Recent *Pages // legacy, should be identical to Pages Menus *Menus Hugo *HugoInfo Title string @@ -133,6 +131,24 @@ type SiteInfo struct { // linkedin type SiteSocial map[string]string +// BaseUrl is deprecated. Will be removed in 0.15. +func (s *SiteInfo) BaseUrl() template.URL { + helpers.Deprecated("Site", ".BaseUrl", ".BaseURL") + return s.BaseURL +} + +// Recent is deprecated. Will be removed in 0.15. +func (s *SiteInfo) Recent() *Pages { + helpers.Deprecated("Site", ".Recent", ".Pages") + return s.Pages +} + +// Indexes is deprecated. Will be removed in 0.15. +func (s *SiteInfo) Indexes() *TaxonomyList { + helpers.Deprecated("Site", ".Indexes", ".Taxonomies") + return &s.Taxonomies +} + func (s *SiteInfo) GetParam(key string) interface{} { v := s.Params[strings.ToLower(key)] @@ -445,7 +461,7 @@ func (s *Site) initializeSiteInfo() { } s.Info = SiteInfo{ - BaseUrl: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))), + BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))), Title: viper.GetString("Title"), Author: viper.GetStringMap("author"), LanguageCode: viper.GetString("languagecode"), @@ -454,7 +470,6 @@ func (s *Site) initializeSiteInfo() { BuildDrafts: viper.GetBool("BuildDrafts"), canonifyURLs: viper.GetBool("CanonifyURLs"), Pages: &s.Pages, - Recent: &s.Pages, Menus: &s.Menus, Params: params, Permalinks: permalinks, @@ -705,14 +720,14 @@ func (s *Site) getMenusFromConfig() Menus { menuEntry.MarshallMap(ime) - if strings.HasPrefix(menuEntry.Url, "/") { + if strings.HasPrefix(menuEntry.URL, "/") { // make it match the nodes - menuEntryURL := menuEntry.Url + menuEntryURL := menuEntry.URL menuEntryURL = helpers.URLizeAndPrep(menuEntryURL) if !s.Info.canonifyURLs { - menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL) + menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseURL), menuEntryURL) } - menuEntry.Url = menuEntryURL + menuEntry.URL = menuEntryURL } if ret[name] == nil { @@ -764,8 +779,8 @@ func (s *Site) assembleMenus() { for p, childmenu := range children { _, ok := flat[twoD{p.MenuName, p.EntryName}] if !ok { - // if parent does not exist, create one without a url - flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, Url: ""} + // if parent does not exist, create one without a URL + flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""} } flat[twoD{p.MenuName, p.EntryName}].Children = childmenu } @@ -818,7 +833,6 @@ func (s *Site) assembleTaxonomies() { } s.Info.Taxonomies = s.Taxonomies - s.Info.Indexes = &s.Taxonomies s.Info.Sections = s.Sections } @@ -1021,7 +1035,7 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) { base := t.plural + "/" + t.key n := s.NewNode() n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1) - s.setUrls(n, base) + s.setURLs(n, base) if len(t.pages) > 0 { n.Date = t.pages[0].Page.Date } @@ -1081,7 +1095,7 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, if !viper.GetBool("DisableRSS") { // XML Feed - n.Url = s.permalinkStr(base + "/index.xml") + n.URL = s.permalinkStr(base + "/index.xml") n.Permalink = s.permalink(base) rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"} @@ -1099,7 +1113,7 @@ func (s *Site) RenderListsOfTaxonomyTerms() (err error) { for singular, plural := range taxonomies { n := s.NewNode() n.Title = strings.Title(plural) - s.setUrls(n, plural) + s.setURLs(n, plural) n.Data["Singular"] = singular n.Data["Plural"] = plural n.Data["Terms"] = s.Taxonomies[plural] @@ -1125,7 +1139,7 @@ func (s *Site) newSectionListNode(section string, data WeightedPages) *Node { } else { n.Title = strings.Title(section) } - s.setUrls(n, section) + s.setURLs(n, section) n.Date = data[0].Page.Date n.Data["Pages"] = data.Pages() @@ -1175,7 +1189,7 @@ func (s *Site) RenderSectionLists() error { if !viper.GetBool("DisableRSS") && section != "" { // XML Feed - n.Url = s.permalinkStr(section + "/index.xml") + n.URL = s.permalinkStr(section + "/index.xml") n.Permalink = s.permalink(section) rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"} if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil { @@ -1189,7 +1203,7 @@ func (s *Site) RenderSectionLists() error { func (s *Site) newHomeNode() *Node { n := s.NewNode() n.Title = n.Site.Title - s.setUrls(n, "/") + s.setURLs(n, "/") n.Data["Pages"] = s.Pages return n } @@ -1232,7 +1246,7 @@ func (s *Site) RenderHomePage() error { if !viper.GetBool("DisableRSS") { // XML Feed - n.Url = s.permalinkStr("index.xml") + n.URL = s.permalinkStr("index.xml") n.Title = "" high := 50 if len(s.Pages) < high { @@ -1250,7 +1264,7 @@ func (s *Site) RenderHomePage() error { } } - n.Url = helpers.URLize("404.html") + n.URL = helpers.URLize("404.html") n.Title = "404 Page not found" n.Permalink = s.permalink("404.html") @@ -1277,7 +1291,7 @@ func (s *Site) RenderSitemap() error { page := &Page{} page.Date = s.Info.LastChange page.Site = &s.Info - page.Url = "/" + page.URL = "/" pages = append(pages, page) pages = append(pages, s.Pages...) @@ -1315,9 +1329,9 @@ func (s *Site) Stats() { } } -func (s *Site) setUrls(n *Node, in string) { - n.Url = helpers.URLizeAndPrep(in) - n.Permalink = s.permalink(n.Url) +func (s *Site) setURLs(n *Node, in string) { + n.URL = helpers.URLizeAndPrep(in) + n.Permalink = s.permalink(n.URL) n.RSSLink = s.permalink(in + ".xml") } diff --git a/target/page_test.go b/target/page_test.go index 250a3e3d1..20f5649ed 100644 --- a/target/page_test.go +++ b/target/page_test.go @@ -62,7 +62,7 @@ func TestPageTranslatorBase(t *testing.T) { } } -func TestTranslateUglyUrls(t *testing.T) { +func TestTranslateUglyURLs(t *testing.T) { tests := []struct { content string expected string diff --git a/tpl/template.go b/tpl/template.go index 499e83186..dedb94f8e 100644 --- a/tpl/template.go +++ b/tpl/template.go @@ -938,7 +938,7 @@ func SafeHTML(text string) template.HTML { return template.HTML(text) } -// "safeHtmlAttr" is currently disabled, pending further discussion +// "safeHTMLAttr" is currently disabled, pending further discussion // on its use case. 2015-01-19 func SafeHTMLAttr(text string) template.HTMLAttr { return template.HTMLAttr(text) @@ -1308,11 +1308,8 @@ func init() { "isset": IsSet, "echoParam": ReturnWhenSet, "safeHTML": SafeHTML, - "safeHtml": SafeHTML, "safeCSS": SafeCSS, - "safeCss": SafeCSS, "safeURL": SafeURL, - "safeUrl": SafeURL, "markdownify": Markdownify, "first": First, "where": Where, @@ -1337,11 +1334,35 @@ func init() { "trim": Trim, "dateFormat": DateFormat, "getJSON": GetJSON, - "getJson": GetJSON, "getCSV": GetCSV, - "getCsv": GetCSV, "seq": helpers.Seq, "getenv": func(varName string) string { return os.Getenv(varName) }, + + // "getJson" is deprecated. Will be removed in 0.15. + "getJson": func(urlParts ...string) interface{} { + helpers.Deprecated("Template", "getJson", "getJSON") + return GetJSON(urlParts...) + }, + // "getJson" is deprecated. Will be removed in 0.15. + "getCsv": func(sep string, urlParts ...string) [][]string { + helpers.Deprecated("Template", "getCsv", "getCSV") + return GetCSV(sep, urlParts...) + }, + // "safeHtml" is deprecated. Will be removed in 0.15. + "safeHtml": func(text string) template.HTML { + helpers.Deprecated("Template", "safeHtml", "safeHTML") + return SafeHTML(text) + }, + // "safeCss" is deprecated. Will be removed in 0.15. + "safeCss": func(text string) template.CSS { + helpers.Deprecated("Template", "safeCss", "safeCSS") + return SafeCSS(text) + }, + // "safeUrl" is deprecated. Will be removed in 0.15. + "safeUrl": func(text string) template.URL { + helpers.Deprecated("Template", "safeUrl", "safeURL") + return SafeURL(text) + }, } } diff --git a/tpl/template_embedded.go b/tpl/template_embedded.go index 420ca20cb..94d54cab4 100644 --- a/tpl/template_embedded.go +++ b/tpl/template_embedded.go @@ -55,13 +55,13 @@ func (t *GoHTMLTemplate) EmbedTemplates() { <managingEditor>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</managingEditor>{{end}}{{ with .Site.Author.email }} <webMaster>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</webMaster>{{end}}{{ with .Site.Copyright }} <copyright>{{.}}</copyright>{{end}}{{ if not .Date.IsZero }} - <lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHtml }}</lastBuildDate>{{ end }} - <atom:link href="{{.Url}}" rel="self" type="application/rss+xml" /> + <lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</lastBuildDate>{{ end }} + <atom:link href="{{.URL}}" rel="self" type="application/rss+xml" /> {{ range first 15 .Data.Pages }} <item> <title>{{ .Title }}</title> <link>{{ .Permalink }}</link> - <pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHtml }}</pubDate> + <pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</pubDate> {{ with .Site.Author.email }}<author>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</author>{{end}} <guid>{{ .Permalink }}</guid> <description>{{ .Content | html }}</description> @@ -74,7 +74,7 @@ func (t *GoHTMLTemplate) EmbedTemplates() { {{ range .Data.Pages }} <url> <loc>{{ .Permalink }}</loc>{{ if not .Date.IsZero }} - <lastmod>{{ safeHtml ( .Date.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }} + <lastmod>{{ safeHTML ( .Date.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }} <changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }} <priority>{{ .Sitemap.Priority }}</priority>{{ end }} </url> @@ -86,24 +86,24 @@ func (t *GoHTMLTemplate) EmbedTemplates() { <ul class="pagination"> {{ with $pag.First }} <li> - <a href="{{ .Url }}" aria-label="First"><span aria-hidden="true">««</span></a> + <a href="{{ .URL }}" aria-label="First"><span aria-hidden="true">««</span></a> </li> {{ end }} <li {{ if not $pag.HasPrev }}class="disabled"{{ end }}> - <a href="{{ if $pag.HasPrev }}{{ $pag.Prev.Url }}{{ end }}" aria-label="Previous"><span aria-hidden="true">«</span></a> + <a href="{{ if $pag.HasPrev }}{{ $pag.Prev.URL }}{{ end }}" aria-label="Previous"><span aria-hidden="true">«</span></a> </li> {{ range $pag.Pagers }} <li - {{ if eq . $pag }}class="active"{{ end }}><a href="{{ .Url }}">{{ .PageNumber }}</a></li> + {{ if eq . $pag }}class="active"{{ end }}><a href="{{ .URL }}">{{ .PageNumber }}</a></li> {{ end }} <li {{ if not $pag.HasNext }}class="disabled"{{ end }}> - <a href="{{ if $pag.HasNext }}{{ $pag.Next.Url }}{{ end }}" aria-label="Next"><span aria-hidden="true">»</span></a> + <a href="{{ if $pag.HasNext }}{{ $pag.Next.URL }}{{ end }}" aria-label="Next"><span aria-hidden="true">»</span></a> </li> {{ with $pag.Last }} <li> - <a href="{{ .Url }}" aria-label="Last"><span aria-hidden="true">»»</span></a> + <a href="{{ .URL }}" aria-label="Last"><span aria-hidden="true">»»</span></a> </li> {{ end }} </ul> @@ -134,7 +134,7 @@ func (t *GoHTMLTemplate) EmbedTemplates() { <meta property="og:image" content="{{ . }}" /> {{ end }}{{ end }} -{{ if not .Date.IsZero }}<meta property="og:updated_time" content="{{ .Date.Format "2006-01-02T15:04:05-07:00" | safeHtml }}"/>{{ end }}{{ with .Params.audio }} +{{ if not .Date.IsZero }}<meta property="og:updated_time" content="{{ .Date.Format "2006-01-02T15:04:05-07:00" | safeHTML }}"/>{{ end }}{{ with .Params.audio }} <meta property="og:audio" content="{{ . }}" />{{ end }}{{ with .Params.locale }} <meta property="og:locale" content="{{ . }}" />{{ end }}{{ with .Site.Params.title }} <meta property="og:site_name" content="{{ . }}" />{{ end }}{{ with .Params.videos }} @@ -193,8 +193,8 @@ func (t *GoHTMLTemplate) EmbedTemplates() { <meta itemprop="description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end }}"> {{if .IsPage}}{{ $ISO8601 := "2006-01-02T15:04:05-07:00" }}{{ if not .PublishDate.IsZero }} -<meta itemprop="datePublished" content="{{ .PublishDate.Format $ISO8601 | safeHtml }}" />{{ end }} -{{ if not .Date.IsZero }}<meta itemprop="dateModified" content="{{ .Date.Format $ISO8601 | safeHtml }}" />{{ end }} +<meta itemprop="datePublished" content="{{ .PublishDate.Format $ISO8601 | safeHTML }}" />{{ end }} +{{ if not .Date.IsZero }}<meta itemprop="dateModified" content="{{ .Date.Format $ISO8601 | safeHTML }}" />{{ end }} <meta itemprop="wordCount" content="{{ .WordCount }}"> {{ with .Params.images }}{{ range first 6 . }} <meta itemprop="image" content="{{ . }}"> diff --git a/tpl/template_resources.go b/tpl/template_resources.go index 96f0b69ab..9c485b04f 100644 --- a/tpl/template_resources.go +++ b/tpl/template_resources.go @@ -38,7 +38,7 @@ type remoteLock struct { m map[string]*sync.Mutex } -// resLock locks an URL during download +// URLLock locks an URL during download func (l *remoteLock) URLLock(url string) { l.Lock() if _, ok := l.m[url]; !ok { @@ -48,7 +48,7 @@ func (l *remoteLock) URLLock(url string) { l.m[url].Lock() } -// resUnlock unlocks an URL when the download has been finished. Use only in defer calls. +// URLUnlock unlocks an URL when the download has been finished. Use only in defer calls. func (l *remoteLock) URLUnlock(url string) { l.RLock() defer l.RUnlock() @@ -57,7 +57,7 @@ func (l *remoteLock) URLUnlock(url string) { } } -// getFileID returns the cache ID for a string +// getCacheFileID returns the cache ID for a string func getCacheFileID(id string) string { return viper.GetString("CacheDir") + url.QueryEscape(id) } @@ -173,9 +173,9 @@ func resGetResource(url string) ([]byte, error) { return resGetLocal(url, hugofs.SourceFs) } -// GetJson expects one or n-parts of a URL to a resource which can either be a local or a remote one. +// GetJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one. // If you provide multiple parts they will be joined together to the final URL. -// GetJson returns nil or parsed JSON to use in a short code. +// GetJSON returns nil or parsed JSON to use in a short code. func GetJSON(urlParts ...string) interface{} { url := strings.Join(urlParts, "") c, err := resGetResource(url) @@ -193,7 +193,7 @@ func GetJSON(urlParts ...string) interface{} { return v } -// parseCsv parses bytes of csv data into a slice slice string or an error +// parseCSV parses bytes of CSV data into a slice slice string or an error func parseCSV(c []byte, sep string) ([][]string, error) { if len(sep) != 1 { return nil, errors.New("Incorrect length of csv separator: " + sep) @@ -206,11 +206,11 @@ func parseCSV(c []byte, sep string) ([][]string, error) { return r.ReadAll() } -// GetCsv expects a data separator and one or n-parts of a URL to a resource which +// GetCSV expects a data separator and one or n-parts of a URL to a resource which // can either be a local or a remote one. // The data separator can be a comma, semi-colon, pipe, etc, but only one character. // If you provide multiple parts for the URL they will be joined together to the final URL. -// GetCsv returns nil or a slice slice to use in a short code. +// GetCSV returns nil or a slice slice to use in a short code. func GetCSV(sep string, urlParts ...string) [][]string { url := strings.Join(urlParts, "") c, err := resGetResource(url) diff --git a/tpl/template_test.go b/tpl/template_test.go index f8eff1f11..1197fa9bf 100644 --- a/tpl/template_test.go +++ b/tpl/template_test.go @@ -999,10 +999,10 @@ func TestSafeHTML(t *testing.T) { buf.Reset() err = tmpl.Execute(buf, SafeHTML(this.str)) if err != nil { - t.Errorf("[%d] execute template with an escaped string value by SafeHtml returns unexpected error: %s", i, err) + t.Errorf("[%d] execute template with an escaped string value by SafeHTML returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { - t.Errorf("[%d] execute template with an escaped string value by SafeHtml, got %v but expected %v", i, buf.String(), this.expectWithEscape) + t.Errorf("[%d] execute template with an escaped string value by SafeHTML, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } @@ -1034,10 +1034,10 @@ func TestSafeHTMLAttr(t *testing.T) { buf.Reset() err = tmpl.Execute(buf, SafeHTMLAttr(this.str)) if err != nil { - t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr returns unexpected error: %s", i, err) + t.Errorf("[%d] execute template with an escaped string value by SafeHTMLAttr returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { - t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape) + t.Errorf("[%d] execute template with an escaped string value by SafeHTMLAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } @@ -1069,10 +1069,10 @@ func TestSafeCSS(t *testing.T) { buf.Reset() err = tmpl.Execute(buf, SafeCSS(this.str)) if err != nil { - t.Errorf("[%d] execute template with an escaped string value by SafeCss returns unexpected error: %s", i, err) + t.Errorf("[%d] execute template with an escaped string value by SafeCSS returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { - t.Errorf("[%d] execute template with an escaped string value by SafeCss, got %v but expected %v", i, buf.String(), this.expectWithEscape) + t.Errorf("[%d] execute template with an escaped string value by SafeCSS, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } @@ -1104,10 +1104,10 @@ func TestSafeURL(t *testing.T) { buf.Reset() err = tmpl.Execute(buf, SafeURL(this.str)) if err != nil { - t.Errorf("[%d] execute template with an escaped string value by SafeUrl returns unexpected error: %s", i, err) + t.Errorf("[%d] execute template with an escaped string value by SafeURL returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { - t.Errorf("[%d] execute template with an escaped string value by SafeUrl, got %v but expected %v", i, buf.String(), this.expectWithEscape) + t.Errorf("[%d] execute template with an escaped string value by SafeURL, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } diff --git a/transform/absurl.go b/transform/absurl.go index 12f616eea..6fc62adcf 100644 --- a/transform/absurl.go +++ b/transform/absurl.go @@ -7,15 +7,15 @@ import ( var absURLInit sync.Once var ar *absURLReplacer -// for performance reasons, we reuse the first baseUrl given -func initAbsurlReplacer(baseURL string) { +// for performance reasons, we reuse the first baseURL given +func initAbsURLReplacer(baseURL string) { absURLInit.Do(func() { - ar = newAbsurlReplacer(baseURL) + ar = newAbsURLReplacer(baseURL) }) } func AbsURL(absURL string) (trs []link, err error) { - initAbsurlReplacer(absURL) + initAbsURLReplacer(absURL) trs = append(trs, func(content []byte) []byte { return ar.replaceInHTML(content) @@ -24,7 +24,7 @@ func AbsURL(absURL string) (trs []link, err error) { } func AbsURLInXML(absURL string) (trs []link, err error) { - initAbsurlReplacer(absURL) + initAbsURLReplacer(absURL) trs = append(trs, func(content []byte) []byte { return ar.replaceInXML(content) diff --git a/transform/absurlreplacer.go b/transform/absurlreplacer.go index a3c68eac9..2f2a5bd53 100644 --- a/transform/absurlreplacer.go +++ b/transform/absurlreplacer.go @@ -120,7 +120,7 @@ func checkCandidate(l *contentlexer) { } if bytes.HasPrefix(l.content[l.pos:], m.match) { - // check for schemaless urls + // check for schemaless URLs posAfter := l.pos + len(m.match) if int(posAfter) >= len(l.content) { return @@ -196,7 +196,7 @@ type absURLReplacer struct { xmlMatchers []absURLMatcher } -func newAbsurlReplacer(baseURL string) *absURLReplacer { +func newAbsURLReplacer(baseURL string) *absURLReplacer { u, _ := url.Parse(baseURL) base := strings.TrimRight(u.String(), "/") |